Snap for 10102166 from e9117c7122741bc6d4422545c42584a321649c1a to mainline-tzdata4-release

Change-Id: I6744514ea60d7f82c7ede2c2eeca33b7dcffc76b
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index b6f8552..6c135fa 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -9211,24 +9211,25 @@
      * camera's crop region is set to maximum size, the FOV of the physical streams for the
      * ultrawide lens will be the same as the logical stream, by making the crop region
      * smaller than its active array size to compensate for the smaller focal length.</p>
-     * <p>There are two ways for the application to capture RAW images from a logical camera
-     * with RAW capability:</p>
+     * <p>For a logical camera, typically the underlying physical cameras have different RAW
+     * capabilities (such as resolution or CFA pattern). There are two ways for the
+     * application to capture RAW images from the logical camera:</p>
      * <ul>
-     * <li>Because the underlying physical cameras may have different RAW capabilities (such
-     * as resolution or CFA pattern), to maintain backward compatibility, when a RAW stream
-     * is configured, the camera device makes sure the default active physical camera remains
-     * active and does not switch to other physical cameras. (One exception is that, if the
-     * logical camera consists of identical image sensors and advertises multiple focalLength
-     * due to different lenses, the camera device may generate RAW images from different
-     * physical cameras based on the focalLength being set by the application.) This
-     * backward-compatible approach usually results in loss of optical zoom, to telephoto
-     * lens or to ultrawide lens.</li>
-     * <li>Alternatively, to take advantage of the full zoomRatio range of the logical camera,
-     * the application should use <a href="https://developer.android.com/reference/android/hardware/camera2/MultiResolutionImageReader.html">MultiResolutionImageReader</a>
-     * to capture RAW images from the currently active physical camera. Because different
-     * physical camera may have different RAW characteristics, the application needs to use
-     * the characteristics and result metadata of the active physical camera for the
-     * relevant RAW metadata.</li>
+     * <li>If the logical camera has RAW capability, the application can create and use RAW
+     * streams in the same way as before. In case a RAW stream is configured, to maintain
+     * backward compatibility, the camera device makes sure the default active physical
+     * camera remains active and does not switch to other physical cameras. (One exception
+     * is that, if the logical camera consists of identical image sensors and advertises
+     * multiple focalLength due to different lenses, the camera device may generate RAW
+     * images from different physical cameras based on the focalLength being set by the
+     * application.) This backward-compatible approach usually results in loss of optical
+     * zoom, to telephoto lens or to ultrawide lens.</li>
+     * <li>Alternatively, if supported by the device,
+     * <a href="https://developer.android.com/reference/android/hardware/camera2/MultiResolutionImageReader.html">MultiResolutionImageReader</a>
+     * can be used to capture RAW images from one of the underlying physical cameras (
+     * depending on current zoom level). Because different physical cameras may have
+     * different RAW characteristics, the application needs to use the characteristics
+     * and result metadata of the active physical camera for the relevant RAW metadata.</li>
      * </ul>
      * <p>The capture request and result metadata tags required for backward compatible camera
      * functionalities will be solely based on the logical camera capability. On the other
diff --git a/media/codec2/components/aom/C2SoftAomDec.cpp b/media/codec2/components/aom/C2SoftAomDec.cpp
index 96b81d7..0eb47f4 100644
--- a/media/codec2/components/aom/C2SoftAomDec.cpp
+++ b/media/codec2/components/aom/C2SoftAomDec.cpp
@@ -578,7 +578,8 @@
     size_t srcVStride = img->stride[AOM_PLANE_V];
     C2PlanarLayout layout = wView.layout();
     size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
-    size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
     if (img->fmt == AOM_IMG_FMT_I42016) {
         const uint16_t *srcY = (const uint16_t *)img->planes[AOM_PLANE_Y];
@@ -592,7 +593,7 @@
                     std::static_pointer_cast<const C2ColorAspectsStruct>(defaultColorAspects));
         } else {
             convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
-                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
+                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUStride,
                                         mWidth, mHeight);
         }
     } else {
@@ -600,7 +601,7 @@
         const uint8_t *srcU = (const uint8_t *)img->planes[AOM_PLANE_U];
         const uint8_t *srcV = (const uint8_t *)img->planes[AOM_PLANE_V];
         convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
-                                   srcVStride, dstYStride, dstUVStride, mWidth, mHeight);
+                                   srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight);
     }
     finishWork(*(int64_t*)img->user_priv, work, std::move(block));
     block = nullptr;
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 953afc5..96a4c4a 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -671,6 +671,9 @@
 void C2SoftAvcDec::resetPlugin() {
     mSignalledOutputEos = false;
     mTimeStart = mTimeEnd = systemTime();
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
 }
 
 status_t C2SoftAvcDec::deleteDecoder() {
diff --git a/media/codec2/components/base/SimpleC2Component.cpp b/media/codec2/components/base/SimpleC2Component.cpp
index 199875d..c252613 100644
--- a/media/codec2/components/base/SimpleC2Component.cpp
+++ b/media/codec2/components/base/SimpleC2Component.cpp
@@ -38,8 +38,8 @@
 void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
                                 const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
-                                size_t dstUVStride, uint32_t width, uint32_t height,
-                                bool isMonochrome) {
+                                size_t dstUStride, size_t dstVStride, uint32_t width,
+                                uint32_t height, bool isMonochrome) {
     for (size_t i = 0; i < height; ++i) {
         memcpy(dstY, srcY, width);
         srcY += srcYStride;
@@ -51,8 +51,8 @@
         for (size_t i = 0; i < (height + 1) / 2; ++i) {
             memset(dstV, kNeutralUVBitDepth8, (width + 1) / 2);
             memset(dstU, kNeutralUVBitDepth8, (width + 1) / 2);
-            dstV += dstUVStride;
-            dstU += dstUVStride;
+            dstV += dstVStride;
+            dstU += dstUStride;
         }
         return;
     }
@@ -60,13 +60,13 @@
     for (size_t i = 0; i < (height + 1) / 2; ++i) {
         memcpy(dstV, srcV, (width + 1) / 2);
         srcV += srcVStride;
-        dstV += dstUVStride;
+        dstV += dstVStride;
     }
 
     for (size_t i = 0; i < (height + 1) / 2; ++i) {
         memcpy(dstU, srcU, (width + 1) / 2);
         srcU += srcUStride;
-        dstU += dstUVStride;
+        dstU += dstUStride;
     }
 }
 
diff --git a/media/codec2/components/base/include/SimpleC2Component.h b/media/codec2/components/base/include/SimpleC2Component.h
index 7600c5b..83c4991 100644
--- a/media/codec2/components/base/include/SimpleC2Component.h
+++ b/media/codec2/components/base/include/SimpleC2Component.h
@@ -33,8 +33,8 @@
 void convertYUV420Planar8ToYV12(uint8_t *dstY, uint8_t *dstU, uint8_t *dstV, const uint8_t *srcY,
                                 const uint8_t *srcU, const uint8_t *srcV, size_t srcYStride,
                                 size_t srcUStride, size_t srcVStride, size_t dstYStride,
-                                size_t dstUVStride, uint32_t width, uint32_t height,
-                                bool isMonochrome = false);
+                                size_t dstUStride, size_t dstVStride, uint32_t width,
+                                uint32_t height, bool isMonochrome = false);
 
 void convertYUV420Planar16ToY410OrRGBA1010102(
         uint32_t *dst, const uint16_t *srcY,
diff --git a/media/codec2/components/gav1/C2SoftGav1Dec.cpp b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
index d234f21..9c5ce9f 100644
--- a/media/codec2/components/gav1/C2SoftGav1Dec.cpp
+++ b/media/codec2/components/gav1/C2SoftGav1Dec.cpp
@@ -857,7 +857,8 @@
 
   C2PlanarLayout layout = wView.layout();
   size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
-  size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+  size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+  size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
   if (buffer->bitdepth == 10) {
     const uint16_t *srcY = (const uint16_t *)buffer->plane[0];
@@ -873,10 +874,10 @@
     } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
         convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
                                     srcYStride / 2, srcUStride / 2, srcVStride / 2, dstYStride / 2,
-                                    dstUVStride / 2, mWidth, mHeight, isMonochrome);
+                                    dstUStride / 2, mWidth, mHeight, isMonochrome);
     } else {
         convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
-                                    srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride, mWidth,
+                                    srcUStride / 2, srcVStride / 2, dstYStride, dstUStride, mWidth,
                                     mHeight, isMonochrome);
     }
   } else {
@@ -884,7 +885,8 @@
     const uint8_t *srcU = (const uint8_t *)buffer->plane[1];
     const uint8_t *srcV = (const uint8_t *)buffer->plane[2];
     convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
-                               srcVStride, dstYStride, dstUVStride, mWidth, mHeight, isMonochrome);
+                               srcVStride, dstYStride, dstUStride, dstVStride, mWidth, mHeight,
+                               isMonochrome);
   }
   finishWork(buffer->user_private_data, work, std::move(block));
   block = nullptr;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index a27c218..15d6dcd 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -664,6 +664,9 @@
 void C2SoftHevcDec::resetPlugin() {
     mSignalledOutputEos = false;
     mTimeStart = mTimeEnd = systemTime();
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
 }
 
 status_t C2SoftHevcDec::deleteDecoder() {
diff --git a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
index 9a41910..439323c 100644
--- a/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
+++ b/media/codec2/components/mpeg2/C2SoftMpeg2Dec.cpp
@@ -732,6 +732,9 @@
 void C2SoftMpeg2Dec::resetPlugin() {
     mSignalledOutputEos = false;
     mTimeStart = mTimeEnd = systemTime();
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
 }
 
 status_t C2SoftMpeg2Dec::deleteDecoder() {
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 54a1d0e..2137964 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -256,7 +256,9 @@
     mFramesConfigured = false;
     mSignalledOutputEos = false;
     mSignalledError = false;
-
+    if (mOutBlock) {
+        mOutBlock.reset();
+    }
     return C2_OK;
 }
 
@@ -601,7 +603,8 @@
 
         C2PlanarLayout layout = wView.layout();
         size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
-        size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+        size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+        size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
         size_t srcYStride = align(mWidth, 16);
         size_t srcUStride = srcYStride / 2;
         size_t srcVStride = srcYStride / 2;
@@ -611,8 +614,8 @@
         const uint8_t *srcV = (const uint8_t *)srcY + vStride * srcYStride * 5 / 4;
 
         convertYUV420Planar8ToYV12(outputBufferY, outputBufferU, outputBufferV, srcY, srcU, srcV,
-                                   srcYStride, srcUStride, srcVStride, dstYStride, dstUVStride,
-                                   mWidth, mHeight);
+                                   srcYStride, srcUStride, srcVStride, dstYStride, dstUStride,
+                                   dstVStride, mWidth, mHeight);
 
         inPos += inSize - (size_t)tmpInSize;
         finishWork(workIndex, work);
diff --git a/media/codec2/components/vpx/C2SoftVpxDec.cpp b/media/codec2/components/vpx/C2SoftVpxDec.cpp
index 18cd1bf..dab7b89 100644
--- a/media/codec2/components/vpx/C2SoftVpxDec.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxDec.cpp
@@ -766,7 +766,8 @@
     size_t srcVStride = img->stride[VPX_PLANE_V];
     C2PlanarLayout layout = wView.layout();
     size_t dstYStride = layout.planes[C2PlanarLayout::PLANE_Y].rowInc;
-    size_t dstUVStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstUStride = layout.planes[C2PlanarLayout::PLANE_U].rowInc;
+    size_t dstVStride = layout.planes[C2PlanarLayout::PLANE_V].rowInc;
 
     if (img->fmt == VPX_IMG_FMT_I42016) {
         const uint16_t *srcY = (const uint16_t *)img->planes[VPX_PLANE_Y];
@@ -804,10 +805,10 @@
         } else if (format == HAL_PIXEL_FORMAT_YCBCR_P010) {
             convertYUV420Planar16ToP010((uint16_t *)dstY, (uint16_t *)dstU, srcY, srcU, srcV,
                                         srcYStride / 2, srcUStride / 2, srcVStride / 2,
-                                        dstYStride / 2, dstUVStride / 2, mWidth, mHeight);
+                                        dstYStride / 2, dstUStride / 2, mWidth, mHeight);
         } else {
             convertYUV420Planar16ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride / 2,
-                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUVStride,
+                                        srcUStride / 2, srcVStride / 2, dstYStride, dstUStride,
                                         mWidth, mHeight);
         }
     } else {
@@ -816,7 +817,7 @@
         const uint8_t *srcV = (const uint8_t *)img->planes[VPX_PLANE_V];
 
         convertYUV420Planar8ToYV12(dstY, dstU, dstV, srcY, srcU, srcV, srcYStride, srcUStride,
-                                   srcVStride, dstYStride, dstUVStride, mWidth, mHeight);
+                                   srcVStride, dstYStride, dstVStride, dstVStride, mWidth, mHeight);
     }
     finishWork(((c2_cntr64_t *)img->user_priv)->peekull(), work, std::move(block));
     return OK;
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.cpp b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
index f99ee24..daf7a61 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.cpp
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.cpp
@@ -59,7 +59,7 @@
 
     addParameter(
         DefineParam(mSize, C2_PARAMKEY_PICTURE_SIZE)
-            .withDefault(new C2StreamPictureSizeInfo::input(0u, 320, 240))
+            .withDefault(new C2StreamPictureSizeInfo::input(0u, 64, 64))
             .withFields({
                 C2F(mSize, width).inRange(2, 2048, 2),
                 C2F(mSize, height).inRange(2, 2048, 2),
@@ -81,7 +81,7 @@
 
     addParameter(
         DefineParam(mFrameRate, C2_PARAMKEY_FRAME_RATE)
-            .withDefault(new C2StreamFrameRateInfo::output(0u, 30.))
+            .withDefault(new C2StreamFrameRateInfo::output(0u, 1.))
             // TODO: More restriction?
             .withFields({C2F(mFrameRate, value).greaterThan(0.)})
             .withSetter(
@@ -127,10 +127,18 @@
                 C2F(mProfileLevel, profile).equalTo(
                     PROFILE_VP9_0
                 ),
-                C2F(mProfileLevel, level).equalTo(
-                    LEVEL_VP9_4_1),
+                C2F(mProfileLevel, level).oneOf({
+                        C2Config::LEVEL_VP9_1,
+                        C2Config::LEVEL_VP9_1_1,
+                        C2Config::LEVEL_VP9_2,
+                        C2Config::LEVEL_VP9_2_1,
+                        C2Config::LEVEL_VP9_3,
+                        C2Config::LEVEL_VP9_3_1,
+                        C2Config::LEVEL_VP9_4,
+                        C2Config::LEVEL_VP9_4_1,
+                }),
             })
-            .withSetter(ProfileLevelSetter)
+            .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
             .build());
 #else
     addParameter(
@@ -144,7 +152,7 @@
                 C2F(mProfileLevel, level).equalTo(
                     LEVEL_UNUSED),
             })
-            .withSetter(ProfileLevelSetter)
+            .withSetter(ProfileLevelSetter, mSize, mFrameRate, mBitrate)
             .build());
 #endif
     addParameter(
@@ -217,14 +225,81 @@
 }
 
 C2R C2SoftVpxEnc::IntfImpl::ProfileLevelSetter(bool mayBlock,
-                                               C2P<C2StreamProfileLevelInfo::output>& me) {
+                                               C2P<C2StreamProfileLevelInfo::output>& me,
+                                               const C2P<C2StreamPictureSizeInfo::input>& size,
+                                               const C2P<C2StreamFrameRateInfo::output>& frameRate,
+                                               const C2P<C2StreamBitrateInfo::output>& bitrate) {
     (void)mayBlock;
+#ifdef VP9
     if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
         me.set().profile = PROFILE_VP9_0;
     }
-    if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+    struct LevelLimits {
+        C2Config::level_t level;
+        float samplesPerSec;
+        uint64_t samples;
+        uint32_t bitrate;
+        size_t dimension;
+    };
+    constexpr LevelLimits kLimits[] = {
+            {LEVEL_VP9_1, 829440, 36864, 200000, 512},
+            {LEVEL_VP9_1_1, 2764800, 73728, 800000, 768},
+            {LEVEL_VP9_2, 4608000, 122880, 1800000, 960},
+            {LEVEL_VP9_2_1, 9216000, 245760, 3600000, 1344},
+            {LEVEL_VP9_3, 20736000, 552960, 7200000, 2048},
+            {LEVEL_VP9_3_1, 36864000, 983040, 12000000, 2752},
+            {LEVEL_VP9_4, 83558400, 2228224, 18000000, 4160},
+            {LEVEL_VP9_4_1, 160432128, 2228224, 30000000, 4160},
+    };
+
+    uint64_t samples = size.v.width * size.v.height;
+    float samplesPerSec = float(samples) * frameRate.v.value;
+    size_t dimension = std::max(size.v.width, size.v.height);
+
+    // Check if the supplied level meets the samples / bitrate requirements.
+    // If not, update the level with the lowest level meeting the requirements.
+    bool found = false;
+
+    // By default needsUpdate = false in case the supplied level does meet
+    // the requirements.
+    bool needsUpdate = false;
+    for (const LevelLimits& limit : kLimits) {
+        if (samples <= limit.samples && samplesPerSec <= limit.samplesPerSec &&
+            bitrate.v.value <= limit.bitrate && dimension <= limit.dimension) {
+            // This is the lowest level that meets the requirements, and if
+            // we haven't seen the supplied level yet, that means we don't
+            // need the update.
+            if (needsUpdate) {
+                ALOGD("Given level %x does not cover current configuration: "
+                        "adjusting to %x",
+                        me.v.level, limit.level);
+                me.set().level = limit.level;
+            }
+            found = true;
+            break;
+        }
+        if (me.v.level == limit.level) {
+            // We break out of the loop when the lowest feasible level is
+            // found. The fact that we're here means that our level doesn't
+            // meet the requirement and needs to be updated.
+            needsUpdate = true;
+        }
+    }
+    if (!found) {
+        // We set to the highest supported level.
         me.set().level = LEVEL_VP9_4_1;
     }
+#else
+    (void)size;
+    (void)frameRate;
+    (void)bitrate;
+    if (!me.F(me.v.profile).supportsAtAll(me.v.profile)) {
+        me.set().profile = PROFILE_VP8_0;
+    }
+    if (!me.F(me.v.level).supportsAtAll(me.v.level)) {
+        me.set().level = LEVEL_UNUSED;
+    }
+#endif
     return C2R::Ok();
 }
 
diff --git a/media/codec2/components/vpx/C2SoftVpxEnc.h b/media/codec2/components/vpx/C2SoftVpxEnc.h
index 714fadb..bfb4444 100644
--- a/media/codec2/components/vpx/C2SoftVpxEnc.h
+++ b/media/codec2/components/vpx/C2SoftVpxEnc.h
@@ -243,9 +243,10 @@
     static C2R SizeSetter(bool mayBlock, const C2P<C2StreamPictureSizeInfo::input> &oldMe,
                           C2P<C2StreamPictureSizeInfo::input> &me);
 
-    static C2R ProfileLevelSetter(
-            bool mayBlock,
-            C2P<C2StreamProfileLevelInfo::output> &me);
+    static C2R ProfileLevelSetter(bool mayBlock, C2P<C2StreamProfileLevelInfo::output>& me,
+                                  const C2P<C2StreamPictureSizeInfo::input>& size,
+                                  const C2P<C2StreamFrameRateInfo::output>& frameRate,
+                                  const C2P<C2StreamBitrateInfo::output>& bitrate);
 
     static C2R LayeringSetter(bool mayBlock, C2P<C2StreamTemporalLayeringTuning::output>& me);
 
diff --git a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
index ef5800d..332d3ac 100644
--- a/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
+++ b/media/codec2/sfplugin/utils/Codec2CommonUtils.cpp
@@ -38,7 +38,7 @@
            !strcmp(deviceCodeName, "Tiramisu");
 }
 
-bool isVendorApiOrFirstApiAtLeastT() {
+static bool isP010Allowed() {
     // The first SDK the device shipped with.
     static const int32_t kProductFirstApiLevel =
         base::GetIntProperty<int32_t>("ro.product.first_api_level", 0);
@@ -47,6 +47,17 @@
     // to signal which VSR requirements they conform to even if the first device SDK was higher.
     static const int32_t kBoardFirstApiLevel =
         base::GetIntProperty<int32_t>("ro.board.first_api_level", 0);
+
+    // Some devices that launched prior to Android S may not support P010 correctly, even
+    // though they may advertise it as supported.
+    if (kProductFirstApiLevel != 0 && kProductFirstApiLevel < __ANDROID_API_S__) {
+        return false;
+    }
+
+    if (kBoardFirstApiLevel != 0 && kBoardFirstApiLevel < __ANDROID_API_S__) {
+        return false;
+    }
+
     static const int32_t kBoardApiLevel =
         base::GetIntProperty<int32_t>("ro.board.api_level", 0);
 
@@ -67,7 +78,7 @@
     // API alone. For now limit P010 to devices that launched with Android T or known to conform
     // to Android T VSR (as opposed to simply limiting to a T vendor image).
     if (format == (AHardwareBuffer_Format)HAL_PIXEL_FORMAT_YCBCR_P010 &&
-            !isVendorApiOrFirstApiAtLeastT()) {
+            !isP010Allowed()) {
         return false;
     }
 
diff --git a/media/codec2/vndk/include/C2SurfaceSyncObj.h b/media/codec2/vndk/include/C2SurfaceSyncObj.h
index ac87fe4..d858f27 100644
--- a/media/codec2/vndk/include/C2SurfaceSyncObj.h
+++ b/media/codec2/vndk/include/C2SurfaceSyncObj.h
@@ -72,12 +72,13 @@
     /**
      * Notify a buffer is queued. Return whether the upcoming dequeue operation
      * is not blocked. if it's blocked and waitId is non-null, waitId is returned
-     * to be used for waiting.
+     * to be used for waiting. Notify(wake-up) waitors only when 'notify' is
+     * true.
      *
      * \retval false    dequeue operation is blocked now.
      * \retval true     dequeue operation is possible.
      */
-    bool notifyQueuedLocked(uint32_t *waitId = nullptr);
+    bool notifyQueuedLocked(uint32_t *waitId = nullptr, bool notify = true);
 
     /**
      * Notify a buffer is dequeued.
diff --git a/media/codec2/vndk/platform/C2BqBuffer.cpp b/media/codec2/vndk/platform/C2BqBuffer.cpp
index e67e42f..f2cd585 100644
--- a/media/codec2/vndk/platform/C2BqBuffer.cpp
+++ b/media/codec2/vndk/platform/C2BqBuffer.cpp
@@ -432,12 +432,16 @@
         if (fence) {
             static constexpr int kFenceWaitTimeMs = 10;
 
+            if (bufferNeedsReallocation) {
+                mBuffers[slot].clear();
+            }
+
             status_t status = fence->wait(kFenceWaitTimeMs);
             if (status == -ETIME) {
                 // fence is not signalled yet.
                 if (syncVar) {
-                    syncVar->lock();
                     (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->lock();
                     dequeueable = syncVar->notifyQueuedLocked(&waitId);
                     syncVar->unlock();
                     if (c2Fence) {
@@ -452,8 +456,8 @@
             if (status != android::NO_ERROR) {
                 ALOGD("buffer fence wait error %d", status);
                 if (syncVar) {
-                    syncVar->lock();
                     (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->lock();
                     syncVar->notifyQueuedLocked();
                     syncVar->unlock();
                     if (c2Fence) {
@@ -502,8 +506,8 @@
             } else if (status != android::NO_ERROR) {
                 slotBuffer.clear();
                 if (syncVar) {
-                    syncVar->lock();
                     (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                    syncVar->lock();
                     syncVar->notifyQueuedLocked();
                     syncVar->unlock();
                     if (c2Fence) {
@@ -550,8 +554,8 @@
             // Block was not created. call requestBuffer# again next time.
             slotBuffer.clear();
             if (syncVar) {
-                syncVar->lock();
                 (void)mProducer->cancelBuffer(slot, hFenceWrapper.getHandle()).isOk();
+                syncVar->lock();
                 syncVar->notifyQueuedLocked();
                 syncVar->unlock();
                 if (c2Fence) {
@@ -813,11 +817,10 @@
         if (mGeneration == mCurrentGeneration && mBqId == mCurrentBqId && !mOwner.expired()) {
             C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
             if (syncVar) {
+                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
                 syncVar->lock();
-                if (syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_ACTIVE) {
-                    mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
-                    syncVar->notifyQueuedLocked();
-                }
+                syncVar->notifyQueuedLocked(nullptr,
+                        syncVar->getSyncStatusLocked() == C2SyncVariables::STATUS_ACTIVE);
                 syncVar->unlock();
             } else {
                 mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
@@ -826,11 +829,10 @@
     } else if (!mOwner.expired()) {
         C2SyncVariables *syncVar = mSyncMem ? mSyncMem->mem() : nullptr;
         if (syncVar) {
+            mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
             syncVar->lock();
-            if (syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_SWITCHING) {
-                mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
-                syncVar->notifyQueuedLocked();
-            }
+            syncVar->notifyQueuedLocked(nullptr,
+                    syncVar->getSyncStatusLocked() != C2SyncVariables::STATUS_SWITCHING);
             syncVar->unlock();
         } else {
             mIgbp->cancelBuffer(mBqSlot, hidl_handle{}).isOk();
diff --git a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
index 2115cc3..bf4ca32 100644
--- a/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
+++ b/media/codec2/vndk/platform/C2SurfaceSyncObj.cpp
@@ -64,6 +64,11 @@
     }
 
     HandleSyncMem *o = static_cast<HandleSyncMem*>(handle);
+    if (o->size() < sizeof(C2SyncVariables)) {
+        android_errorWriteLog(0x534e4554, "240140929");
+        return nullptr;
+    }
+
     void *ptr = mmap(NULL, o->size(), PROT_READ | PROT_WRITE, MAP_SHARED, o->memFd(), 0);
 
     if (ptr == MAP_FAILED) {
@@ -177,12 +182,14 @@
     return true;
 }
 
-bool C2SyncVariables::notifyQueuedLocked(uint32_t *waitId) {
+bool C2SyncVariables::notifyQueuedLocked(uint32_t *waitId, bool notify) {
     // Note. thundering herds may occur. Edge trigged signalling.
     // But one waiter will guarantee to dequeue. others may wait again.
     // Minimize futex syscall(trap) for the main use case(one waiter case).
     if (mMaxDequeueCount == mCurDequeueCount--) {
-        broadcast();
+        if (notify) {
+            broadcast();
+        }
         return true;
     }
 
diff --git a/media/extractors/mp4/MPEG4Extractor.cpp b/media/extractors/mp4/MPEG4Extractor.cpp
index e278984..a026153 100644
--- a/media/extractors/mp4/MPEG4Extractor.cpp
+++ b/media/extractors/mp4/MPEG4Extractor.cpp
@@ -6723,7 +6723,7 @@
         const Sample *smpl = &mCurrentSamples[mCurrentSampleIndex];
         offset = smpl->offset;
         size = smpl->size;
-        cts = mCurrentTime + smpl->compositionOffset;
+        cts = (int64_t)mCurrentTime + (int64_t)smpl->compositionOffset;
 
         if (mElstInitialEmptyEditTicks > 0) {
             cts += mElstInitialEmptyEditTicks;
diff --git a/media/extractors/ogg/OggExtractor.cpp b/media/extractors/ogg/OggExtractor.cpp
index eb2246d..11f93b5 100644
--- a/media/extractors/ogg/OggExtractor.cpp
+++ b/media/extractors/ogg/OggExtractor.cpp
@@ -34,6 +34,9 @@
 #include <system/audio.h>
 #include <utils/String8.h>
 
+#include <inttypes.h>
+#include <stdint.h>
+
 extern "C" {
     #include <Tremolo/codec_internal.h>
 
@@ -346,66 +349,118 @@
         off64_t startOffset, off64_t *pageOffset) {
     *pageOffset = startOffset;
 
-    for (;;) {
-        char signature[4];
-        ssize_t n = mSource->readAt(*pageOffset, &signature, 4);
+    // balance between larger reads and reducing how much we over-read.
+    const int FIND_BUF_SIZE = 2048;
+    const int lenOggS = strlen("OggS");
+    while(1) {
 
-        if (n < 4) {
+        // work with big buffers to amortize readAt() costs
+        char signatureBuffer[FIND_BUF_SIZE];
+        ssize_t n = mSource->readAt(*pageOffset, &signatureBuffer, sizeof(signatureBuffer));
+
+        if (n < lenOggS) {
             *pageOffset = 0;
-
             return (n < 0) ? n : (status_t)ERROR_END_OF_STREAM;
         }
 
-        if (!memcmp(signature, "OggS", 4)) {
-            if (*pageOffset > startOffset) {
-                ALOGV("skipped %lld bytes of junk to reach next frame",
-                     (long long)(*pageOffset - startOffset));
-            }
-
-            return OK;
-        }
-
-        // see how far ahead to skip; avoid some fruitless comparisons
-        unsigned int i;
-        for (i = 1; i < 4 ; i++) {
-            if (signature[i] == 'O')
+        for(int i = 0; i < n - (lenOggS - 1) ; i++) {
+            // fast scan for 1st character in a signature
+            char *p = (char *)memchr(&signatureBuffer[i], 'O', n - (lenOggS - 1) - i);
+            if (p == NULL) {
+                // no signature start in the rest of this buffer.
                 break;
+            }
+            int jump = (p-&signatureBuffer[i]);
+            i += jump;
+            if (memcmp("OggS", &signatureBuffer[i], lenOggS) == 0) {
+                *pageOffset += i;
+                if (*pageOffset > startOffset) {
+                    ALOGD("skipped %" PRIu64 " bytes of junk to reach next frame",
+                         (*pageOffset - startOffset));
+                }
+                return OK;
+            }
         }
-        *pageOffset += i;
+
+        // on to next block. buffer didn't end with "OggS", but could end with "Ogg".
+        // overlap enough to detect this. n >= lenOggS, so this always advances.
+        *pageOffset += n - (lenOggS - 1);
     }
+    return (status_t)ERROR_END_OF_STREAM;
 }
 
 // Given the offset of the "current" page, find the page immediately preceding
 // it (if any) and return its granule position.
 // To do this we back up from the "current" page's offset until we find any
 // page preceding it and then scan forward to just before the current page.
+//
 status_t MyOggExtractor::findPrevGranulePosition(
         off64_t pageOffset, uint64_t *granulePos) {
     *granulePos = 0;
 
-    off64_t prevPageOffset = 0;
-    off64_t prevGuess = pageOffset;
-    for (;;) {
-        if (prevGuess >= 5000) {
-            prevGuess -= 5000;
+    const int FIND_BUF_SIZE = 2048;
+    const int lenOggS = strlen("OggS");
+
+    if (pageOffset == 0) {
+        ALOGV("no page before the first page");
+        return UNKNOWN_ERROR;
+    }
+
+    off64_t prevPageOffset = pageOffset;
+
+    // we start our search on the byte immediately in front of pageOffset
+    // which could mean "O" immediately before and "ggS" starting at pageOffset
+    //
+    // if there was an "OggS" at pageOffset, we'll have scanned a few extra bytes
+    // but if pageOffset was chosen by a seek operation, we don't know that it
+    // reflects the beginning of a page. By choosing to scan 3 possibly unneeded
+    // bytes at the start we cover both cases.
+    //
+    off64_t firstAfter = pageOffset + lenOggS - 1;    // NOT within our buffer
+    off64_t nextOffset = pageOffset;
+
+    while(prevPageOffset == pageOffset) {
+        // work with big buffers to amortize readAt() costs
+        char signatureBuffer[FIND_BUF_SIZE];
+
+        ssize_t desired = sizeof(signatureBuffer);
+        if (firstAfter >= desired) {
+            nextOffset = firstAfter - desired;
         } else {
-            prevGuess = 0;
+            nextOffset = 0;
+            desired = firstAfter;
         }
+        ssize_t n = mSource->readAt(nextOffset, &signatureBuffer, desired);
 
-        ALOGV("backing up %lld bytes", (long long)(pageOffset - prevGuess));
-
-        status_t err = findNextPage(prevGuess, &prevPageOffset);
-        if (err == ERROR_END_OF_STREAM) {
-            // We are at the last page and didn't back off enough;
-            // back off 5000 bytes more and try again.
-            continue;
-        } else if (err != OK) {
-            return err;
-        }
-
-        if (prevPageOffset < pageOffset || prevGuess == 0) {
+        if (n < lenOggS) {
+            ALOGD("short read, get out");
             break;
         }
+
+        // work backwards
+        // loop control ok for n >= 0
+        for(int i = n - lenOggS; i >= 0 ; i--) {
+            // fast scan for 1st character in the signature
+            char *p = (char *)memrchr(&signatureBuffer[0], 'O', i);
+            if (p == NULL) {
+                // no signature start in the rest of this buffer.
+                break;
+            }
+            i = (p-&signatureBuffer[0]);
+            // loop start chosen to ensure we will always have lenOggS bytes
+            if (memcmp("OggS", &signatureBuffer[i], lenOggS) == 0) {
+                prevPageOffset = nextOffset + i;
+                break;
+            }
+        }
+
+        // back up for next read; make sure we catch overlaps
+        if (nextOffset == 0) {
+            // can't back up any further
+            break;
+        }
+        // current buffer might start with "ggS", include those bytes in the next iteration
+        firstAfter = nextOffset + lenOggS - 1;
     }
 
     if (prevPageOffset == pageOffset) {
@@ -413,8 +468,8 @@
         return UNKNOWN_ERROR;
     }
 
-    ALOGV("prevPageOffset at %lld, pageOffset at %lld",
-            (long long)prevPageOffset, (long long)pageOffset);
+    ALOGV("prevPageOffset at %" PRIu64 ", pageOffset at %" PRIu64,
+          prevPageOffset, pageOffset);
     uint8_t flag = 0;
     for (;;) {
         Page prevPage;
diff --git a/media/janitors/media_solutions_OWNERS b/media/janitors/media_solutions_OWNERS
index 8dc1c7b..e0c87f7 100644
--- a/media/janitors/media_solutions_OWNERS
+++ b/media/janitors/media_solutions_OWNERS
@@ -1,10 +1,21 @@
 # Bug component: 1344
 # go/android-fwk-media-solutions for info on areas of ownership.
 
-# Main owners:
+# MediaRouter and native mirroring only:
+adadukin@google.com
 aquilescanta@google.com
-krocard@google.com
+bishoygendy@google.com
+ivanbuper@google.com
 
-# In case of emergency:
-andrewlewis@google.com #{LAST_RESORT_SUGGESTION}
-olly@google.com #{LAST_RESORT_SUGGESTION}
+# MediaMuxer, MediaRecorder, and seamless transcoding only:
+andrewlewis@google.com
+claincly@google.com
+
+# Everything in go/android-fwk-media-solutions not covered above:
+bachinger@google.com
+christosts@google.com
+ibaker@google.com
+michaelkatz@google.com
+rohks@google.com
+tianyifeng@google.com
+tonihei@google.com
diff --git a/media/libeffects/lvm/tests/Android.bp b/media/libeffects/lvm/tests/Android.bp
index 7d7f8b9..8870bb0 100644
--- a/media/libeffects/lvm/tests/Android.bp
+++ b/media/libeffects/lvm/tests/Android.bp
@@ -14,6 +14,10 @@
     vendor: true,
     gtest: true,
     host_supported: true,
+    // TODO(b/269868814)
+    test_options: {
+        unit_test: false,
+    },
     srcs: [
         "EffectReverbTest.cpp",
         "EffectTestHelper.cpp",
diff --git a/media/libmedia/tests/codeclist/Android.bp b/media/libmedia/tests/codeclist/Android.bp
index 2ed3126..6f3010c 100644
--- a/media/libmedia/tests/codeclist/Android.bp
+++ b/media/libmedia/tests/codeclist/Android.bp
@@ -25,7 +25,7 @@
 
 cc_test {
     name: "CodecListTest",
-    test_suites: ["device-tests", "mts"],
+    test_suites: ["device-tests"],
     gtest: true,
 
     // Support multilib variants (using different suffix per sub-architecture), which is needed on
diff --git a/media/libmedia/xsd/api/current.txt b/media/libmedia/xsd/api/current.txt
index 73b5f8d..35aa213 100644
--- a/media/libmedia/xsd/api/current.txt
+++ b/media/libmedia/xsd/api/current.txt
@@ -47,7 +47,9 @@
     method public java.util.List<media.profiles.EncoderProfile> getEncoderProfile_optional();
     method public java.util.List<media.profiles.CamcorderProfiles.ImageDecodingOptional> getImageDecoding_optional();
     method public java.util.List<media.profiles.CamcorderProfiles.ImageEncodingOptional> getImageEncoding_optional();
+    method public int getStartOffsetMs();
     method public void setCameraId(int);
+    method public void setStartOffsetMs(int);
   }
 
   public static class CamcorderProfiles.ImageDecodingOptional {
diff --git a/media/libmedia/xsd/media_profiles.xsd b/media/libmedia/xsd/media_profiles.xsd
index 9664456..dcc3028 100644
--- a/media/libmedia/xsd/media_profiles.xsd
+++ b/media/libmedia/xsd/media_profiles.xsd
@@ -49,6 +49,7 @@
             </xs:element>
         </xs:choice>
         <xs:attribute name="cameraId" type="xs:int"/>
+        <xs:attribute name="startOffsetMs" type="xs:int"/>
     </xs:complexType>
     <xs:complexType name="EncoderProfile">
         <xs:sequence>
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index e50880a..e63e574 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -864,6 +864,9 @@
             return NAME_NOT_FOUND;
         };
     }
+
+    // we want an empty metrics record for any early getMetrics() call
+    // this should be the *only* initMediametrics() call that's not on the Looper thread
     initMediametrics();
 }
 
@@ -872,8 +875,17 @@
     mResourceManagerProxy->removeClient();
 
     flushMediametrics();
+
+    // clean any saved metrics info we stored as part of configure()
+    if (mConfigureMsg != nullptr) {
+        mediametrics_handle_t metricsHandle;
+        if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
+            mediametrics_delete(metricsHandle);
+        }
+    }
 }
 
+// except for in constructor, called from the looper thread (and therefore mutexed)
 void MediaCodec::initMediametrics() {
     if (mMetricsHandle == 0) {
         mMetricsHandle = mediametrics_create(kCodecKeyName);
@@ -903,11 +915,12 @@
 }
 
 void MediaCodec::updateMediametrics() {
-    ALOGV("MediaCodec::updateMediametrics");
     if (mMetricsHandle == 0) {
         return;
     }
 
+    Mutex::Autolock _lock(mMetricsLock);
+
     if (mLatencyHist.getCount() != 0 ) {
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMax, mLatencyHist.getMax());
         mediametrics_setInt64(mMetricsHandle, kCodecLatencyMin, mLatencyHist.getMin());
@@ -1020,6 +1033,8 @@
 }
 
 
+// called to update info being passed back via getMetrics(), which is a
+// unique copy for that call, no concurrent access worries.
 void MediaCodec::updateEphemeralMediametrics(mediametrics_handle_t item) {
     ALOGD("MediaCodec::updateEphemeralMediametrics()");
 
@@ -1059,7 +1074,13 @@
 }
 
 void MediaCodec::flushMediametrics() {
+    ALOGD("flushMediametrics");
+
+    // update does its own mutex locking
     updateMediametrics();
+
+    // ensure mutex while we do our own work
+    Mutex::Autolock _lock(mMetricsLock);
     if (mMetricsHandle != 0) {
         if (mediametrics_count(mMetricsHandle) > 0) {
             mediametrics_selfRecord(mMetricsHandle);
@@ -1575,6 +1596,8 @@
     }
     msg->setString("name", name);
 
+    // initial naming setup covers the period before the first call to ::configure().
+    // after that, we manage this through ::configure() and the setup message.
     if (mMetricsHandle != 0) {
         mediametrics_setCString(mMetricsHandle, kCodecCodec, name.c_str());
         mediametrics_setCString(mMetricsHandle, kCodecMode, toCodecMode(mDomain));
@@ -1647,23 +1670,28 @@
         const sp<IDescrambler> &descrambler,
         uint32_t flags) {
     sp<AMessage> msg = new AMessage(kWhatConfigure, this);
+    mediametrics_handle_t nextMetricsHandle = mediametrics_create(kCodecKeyName);
 
     // TODO: validity check log-session-id: it should be a 32-hex-digit.
     format->findString("log-session-id", &mLogSessionId);
 
-    if (mMetricsHandle != 0) {
+    if (nextMetricsHandle != 0) {
         int32_t profile = 0;
         if (format->findInt32("profile", &profile)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecProfile, profile);
+            mediametrics_setInt32(nextMetricsHandle, kCodecProfile, profile);
         }
         int32_t level = 0;
         if (format->findInt32("level", &level)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecLevel, level);
+            mediametrics_setInt32(nextMetricsHandle, kCodecLevel, level);
         }
-        mediametrics_setInt32(mMetricsHandle, kCodecEncoder,
+        mediametrics_setInt32(nextMetricsHandle, kCodecEncoder,
                               (flags & CONFIGURE_FLAG_ENCODE) ? 1 : 0);
 
-        mediametrics_setCString(mMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
+        mediametrics_setCString(nextMetricsHandle, kCodecLogSessionId, mLogSessionId.c_str());
+
+        // moved here from ::init()
+        mediametrics_setCString(nextMetricsHandle, kCodecCodec, mInitName.c_str());
+        mediametrics_setCString(nextMetricsHandle, kCodecMode, toCodecMode(mDomain));
     }
 
     if (mDomain == DOMAIN_VIDEO || mDomain == DOMAIN_IMAGE) {
@@ -1673,38 +1701,38 @@
             mRotationDegrees = 0;
         }
 
-        if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecWidth, mWidth);
-            mediametrics_setInt32(mMetricsHandle, kCodecHeight, mHeight);
-            mediametrics_setInt32(mMetricsHandle, kCodecRotation, mRotationDegrees);
+        if (nextMetricsHandle != 0) {
+            mediametrics_setInt32(nextMetricsHandle, kCodecWidth, mWidth);
+            mediametrics_setInt32(nextMetricsHandle, kCodecHeight, mHeight);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRotation, mRotationDegrees);
             int32_t maxWidth = 0;
             if (format->findInt32("max-width", &maxWidth)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecMaxWidth, maxWidth);
+                mediametrics_setInt32(nextMetricsHandle, kCodecMaxWidth, maxWidth);
             }
             int32_t maxHeight = 0;
             if (format->findInt32("max-height", &maxHeight)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecMaxHeight, maxHeight);
+                mediametrics_setInt32(nextMetricsHandle, kCodecMaxHeight, maxHeight);
             }
             int32_t colorFormat = -1;
             if (format->findInt32("color-format", &colorFormat)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecColorFormat, colorFormat);
+                mediametrics_setInt32(nextMetricsHandle, kCodecColorFormat, colorFormat);
             }
             if (mDomain == DOMAIN_VIDEO) {
                 float frameRate = -1.0;
                 if (format->findFloat("frame-rate", &frameRate)) {
-                    mediametrics_setDouble(mMetricsHandle, kCodecFrameRate, frameRate);
+                    mediametrics_setDouble(nextMetricsHandle, kCodecFrameRate, frameRate);
                 }
                 float captureRate = -1.0;
                 if (format->findFloat("capture-rate", &captureRate)) {
-                    mediametrics_setDouble(mMetricsHandle, kCodecCaptureRate, captureRate);
+                    mediametrics_setDouble(nextMetricsHandle, kCodecCaptureRate, captureRate);
                 }
                 float operatingRate = -1.0;
                 if (format->findFloat("operating-rate", &operatingRate)) {
-                    mediametrics_setDouble(mMetricsHandle, kCodecOperatingRate, operatingRate);
+                    mediametrics_setDouble(nextMetricsHandle, kCodecOperatingRate, operatingRate);
                 }
                 int32_t priority = -1;
                 if (format->findInt32("priority", &priority)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecPriority, priority);
+                    mediametrics_setInt32(nextMetricsHandle, kCodecPriority, priority);
                 }
             }
             int32_t colorStandard = -1;
@@ -1735,14 +1763,14 @@
         }
 
     } else {
-        if (mMetricsHandle != 0) {
+        if (nextMetricsHandle != 0) {
             int32_t channelCount;
             if (format->findInt32(KEY_CHANNEL_COUNT, &channelCount)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecChannelCount, channelCount);
+                mediametrics_setInt32(nextMetricsHandle, kCodecChannelCount, channelCount);
             }
             int32_t sampleRate;
             if (format->findInt32(KEY_SAMPLE_RATE, &sampleRate)) {
-                mediametrics_setInt32(mMetricsHandle, kCodecSampleRate, sampleRate);
+                mediametrics_setInt32(nextMetricsHandle, kCodecSampleRate, sampleRate);
             }
         }
     }
@@ -1752,41 +1780,41 @@
                                                  enableMediaFormatShapingDefault);
         if (!enableShaping) {
             ALOGI("format shaping disabled, property '%s'", enableMediaFormatShapingProperty);
-            if (mMetricsHandle != 0) {
-                mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, -1);
+            if (nextMetricsHandle != 0) {
+                mediametrics_setInt32(nextMetricsHandle, kCodecShapingEnhanced, -1);
             }
         } else {
-            (void) shapeMediaFormat(format, flags);
+            (void) shapeMediaFormat(format, flags, nextMetricsHandle);
             // XXX: do we want to do this regardless of shaping enablement?
             mapFormat(mComponentName, format, nullptr, false);
         }
     }
 
     // push min/max QP to MediaMetrics after shaping
-    if (mDomain == DOMAIN_VIDEO && mMetricsHandle != 0) {
+    if (mDomain == DOMAIN_VIDEO && nextMetricsHandle != 0) {
         int32_t qpIMin = -1;
         if (format->findInt32("video-qp-i-min", &qpIMin)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMin, qpIMin);
         }
         int32_t qpIMax = -1;
         if (format->findInt32("video-qp-i-max", &qpIMax)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPIMax, qpIMax);
         }
         int32_t qpPMin = -1;
         if (format->findInt32("video-qp-p-min", &qpPMin)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMin, qpPMin);
         }
         int32_t qpPMax = -1;
         if (format->findInt32("video-qp-p-max", &qpPMax)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPPMax, qpPMax);
         }
         int32_t qpBMin = -1;
         if (format->findInt32("video-qp-b-min", &qpBMin)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMin, qpBMin);
         }
         int32_t qpBMax = -1;
         if (format->findInt32("video-qp-b-max", &qpBMax)) {
-            mediametrics_setInt32(mMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
+            mediametrics_setInt32(nextMetricsHandle, kCodecRequestedVideoQPBMax, qpBMax);
         }
     }
 
@@ -1802,13 +1830,23 @@
         } else {
             msg->setPointer("descrambler", descrambler.get());
         }
-        if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecCrypto, 1);
+        if (nextMetricsHandle != 0) {
+            mediametrics_setInt32(nextMetricsHandle, kCodecCrypto, 1);
         }
     } else if (mFlags & kFlagIsSecure) {
         ALOGW("Crypto or descrambler should be given for secure codec");
     }
 
+    if (mConfigureMsg != nullptr) {
+        // if re-configuring, we have one of these from before.
+        // Recover the space before we discard the old mConfigureMsg
+        mediametrics_handle_t metricsHandle;
+        if (mConfigureMsg->findInt64("metrics", &metricsHandle)) {
+            mediametrics_delete(metricsHandle);
+        }
+    }
+    msg->setInt64("metrics", nextMetricsHandle);
+
     // save msg for reset
     mConfigureMsg = msg;
 
@@ -2135,7 +2173,8 @@
 
 status_t MediaCodec::shapeMediaFormat(
             const sp<AMessage> &format,
-            uint32_t flags) {
+            uint32_t flags,
+            mediametrics_handle_t metricsHandle) {
     ALOGV("shapeMediaFormat entry");
 
     if (!(flags & CONFIGURE_FLAG_ENCODE)) {
@@ -2191,39 +2230,39 @@
         sp<AMessage> deltas = updatedFormat->changesFrom(format, false /* deep */);
         size_t changeCount = deltas->countEntries();
         ALOGD("shapeMediaFormat: deltas(%zu): %s", changeCount, deltas->debugString(2).c_str());
-        if (mMetricsHandle != 0) {
-            mediametrics_setInt32(mMetricsHandle, kCodecShapingEnhanced, changeCount);
+        if (metricsHandle != 0) {
+            mediametrics_setInt32(metricsHandle, kCodecShapingEnhanced, changeCount);
         }
         if (changeCount > 0) {
-            if (mMetricsHandle != 0) {
+            if (metricsHandle != 0) {
                 // save some old properties before we fold in the new ones
                 int32_t bitrate;
                 if (format->findInt32(KEY_BIT_RATE, &bitrate)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalBitrate, bitrate);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalBitrate, bitrate);
                 }
                 int32_t qpIMin = -1;
                 if (format->findInt32("original-video-qp-i-min", &qpIMin)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMin, qpIMin);
                 }
                 int32_t qpIMax = -1;
                 if (format->findInt32("original-video-qp-i-max", &qpIMax)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPIMax, qpIMax);
                 }
                 int32_t qpPMin = -1;
                 if (format->findInt32("original-video-qp-p-min", &qpPMin)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMin, qpPMin);
                 }
                 int32_t qpPMax = -1;
                 if (format->findInt32("original-video-qp-p-max", &qpPMax)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPPMax, qpPMax);
                 }
                  int32_t qpBMin = -1;
                 if (format->findInt32("original-video-qp-b-min", &qpBMin)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMin, qpBMin);
                 }
                 int32_t qpBMax = -1;
                 if (format->findInt32("original-video-qp-b-max", &qpBMax)) {
-                    mediametrics_setInt32(mMetricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
+                    mediametrics_setInt32(metricsHandle, kCodecOriginalVideoQPBMax, qpBMax);
                 }
             }
             // NB: for any field in both format and deltas, the deltas copy wins
@@ -2809,26 +2848,44 @@
     return OK;
 }
 
+// this is the user-callable entry point
 status_t MediaCodec::getMetrics(mediametrics_handle_t &reply) {
 
     reply = 0;
 
-    // shouldn't happen, but be safe
-    if (mMetricsHandle == 0) {
-        return UNKNOWN_ERROR;
+    sp<AMessage> msg = new AMessage(kWhatGetMetrics, this);
+    sp<AMessage> response;
+    status_t err;
+    if ((err = PostAndAwaitResponse(msg, &response)) != OK) {
+        return err;
     }
 
-    // update any in-flight data that's not carried within the record
-    updateMediametrics();
-
-    // send it back to the caller.
-    reply = mediametrics_dup(mMetricsHandle);
-
-    updateEphemeralMediametrics(reply);
+    CHECK(response->findInt64("metrics", &reply));
 
     return OK;
 }
 
+// runs on the looper thread (for mutex purposes)
+void MediaCodec::onGetMetrics(const sp<AMessage>& msg) {
+
+    mediametrics_handle_t results = 0;
+
+    sp<AReplyToken> replyID;
+    CHECK(msg->senderAwaitsResponse(&replyID));
+
+    if (mMetricsHandle != 0) {
+        updateMediametrics();
+        results = mediametrics_dup(mMetricsHandle);
+        updateEphemeralMediametrics(results);
+    } else {
+        results = mediametrics_dup(mMetricsHandle);
+    }
+
+    sp<AMessage> response = new AMessage;
+    response->setInt64("metrics", results);
+    response->postReply(replyID);
+}
+
 status_t MediaCodec::getInputBuffers(Vector<sp<MediaCodecBuffer> > *buffers) const {
     sp<AMessage> msg = new AMessage(kWhatGetBuffers, this);
     msg->setInt32("portIndex", kPortIndexInput);
@@ -3879,6 +3936,13 @@
             break;
         }
 
+        case kWhatGetMetrics:
+        {
+            onGetMetrics(msg);
+            break;
+        }
+
+
         case kWhatConfigure:
         {
             if (mState != INITIALIZED) {
@@ -3899,6 +3963,18 @@
             sp<AMessage> format;
             CHECK(msg->findMessage("format", &format));
 
+            // start with a copy of the passed metrics info for use in this run
+            mediametrics_handle_t handle;
+            CHECK(msg->findInt64("metrics", &handle));
+            if (handle != 0) {
+                if (mMetricsHandle != 0) {
+                    flushMediametrics();
+                }
+                mMetricsHandle = mediametrics_dup(handle);
+                // and set some additional metrics values
+                initMediametrics();
+            }
+
             int32_t push;
             if (msg->findInt32("push-blank-buffers-on-shutdown", &push) && push != 0) {
                 mFlags |= kFlagPushBlankBuffersOnShutdown;
@@ -5508,6 +5584,9 @@
 }
 
 status_t MediaCodec::onSetParameters(const sp<AMessage> &params) {
+    if (mState == UNINITIALIZED || mState == INITIALIZING) {
+        return NO_INIT;
+    }
     updateLowLatency(params);
     mapFormat(mComponentName, params, nullptr, false);
     updateTunnelPeek(params);
diff --git a/media/libstagefright/NuMediaExtractor.cpp b/media/libstagefright/NuMediaExtractor.cpp
index 2b45f2d..5b39618 100644
--- a/media/libstagefright/NuMediaExtractor.cpp
+++ b/media/libstagefright/NuMediaExtractor.cpp
@@ -639,9 +639,11 @@
         numPageSamples = -1;
     }
 
+    // insert, including accounting for the space used.
     memcpy((uint8_t *)buffer->data() + mbuf->range_length(),
            &numPageSamples,
            sizeof(numPageSamples));
+    buffer->setRange(buffer->offset(), buffer->size() + sizeof(numPageSamples));
 
     uint32_t type;
     const void *data;
@@ -690,6 +692,8 @@
 
     ssize_t minIndex = fetchAllTrackSamples();
 
+    buffer->setRange(0, 0);     // start with an empty buffer
+
     if (minIndex < 0) {
         return ERROR_END_OF_STREAM;
     }
@@ -705,25 +709,25 @@
         sampleSize += sizeof(int32_t);
     }
 
+    // capacity() is ok since we cleared out the buffer
     if (buffer->capacity() < sampleSize) {
         return -ENOMEM;
     }
 
+    const size_t srclen = it->mBuffer->range_length();
     const uint8_t *src =
         (const uint8_t *)it->mBuffer->data()
             + it->mBuffer->range_offset();
 
-    memcpy((uint8_t *)buffer->data(), src, it->mBuffer->range_length());
+    memcpy((uint8_t *)buffer->data(), src, srclen);
+    buffer->setRange(0, srclen);
 
     status_t err = OK;
     if (info->mTrackFlags & kIsVorbis) {
+        // adjusts range when it inserts the extra bits
         err = appendVorbisNumPageSamples(it->mBuffer, buffer);
     }
 
-    if (err == OK) {
-        buffer->setRange(0, sampleSize);
-    }
-
     return err;
 }
 
diff --git a/media/libstagefright/include/media/stagefright/MediaCodec.h b/media/libstagefright/include/media/stagefright/MediaCodec.h
index 1d2d711..bc6765c 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodec.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodec.h
@@ -356,6 +356,7 @@
         kWhatSetNotification                = 'setN',
         kWhatDrmReleaseCrypto               = 'rDrm',
         kWhatCheckBatteryStats              = 'chkB',
+        kWhatGetMetrics                     = 'getM',
     };
 
     enum {
@@ -426,6 +427,7 @@
     sp<Surface> mSurface;
     SoftwareRenderer *mSoftRenderer;
 
+    Mutex mMetricsLock;
     mediametrics_handle_t mMetricsHandle = 0;
     nsecs_t mLifetimeStartNs = 0;
     void initMediametrics();
@@ -433,6 +435,7 @@
     void flushMediametrics();
     void updateEphemeralMediametrics(mediametrics_handle_t item);
     void updateLowLatency(const sp<AMessage> &msg);
+    void onGetMetrics(const sp<AMessage>& msg);
     constexpr const char *asString(TunnelPeekState state, const char *default_string="?");
     void updateTunnelPeek(const sp<AMessage> &msg);
     void updatePlaybackDuration(const sp<AMessage> &msg);
@@ -471,7 +474,8 @@
     // the (possibly) updated format is returned in place.
     status_t shapeMediaFormat(
             const sp<AMessage> &format,
-            uint32_t flags);
+            uint32_t flags,
+            mediametrics_handle_t handle);
 
     // populate the format shaper library with information for this codec encoding
     // for the indicated media type
diff --git a/media/libstagefright/rtsp/AAVCAssembler.cpp b/media/libstagefright/rtsp/AAVCAssembler.cpp
index ddf797c..88f7be7 100644
--- a/media/libstagefright/rtsp/AAVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AAVCAssembler.cpp
@@ -332,6 +332,11 @@
 }
 
 bool AAVCAssembler::dropFramesUntilIframe(const sp<ABuffer> &buffer) {
+    if (buffer->size() == 0) {
+        ALOGE("b/230630526 buffer->size() == 0");
+        android_errorWriteLog(0x534e4554, "230630526");
+        return false;
+    }
     const uint8_t *data = buffer->data();
     unsigned nalType = data[0] & 0x1f;
     if (!mFirstIFrameProvided && nalType < 0x5) {
@@ -624,8 +629,7 @@
     int32_t firstSeqNo = buffer->int32Data();
 
     // This only works for FU-A type & non-start sequence
-    int32_t nalType = buffer->size() >= 1 ? buffer->data()[0] & 0x1f : -1;
-    if (nalType != 28 || (buffer->size() >= 2 && buffer->data()[1] & 0x80)) {
+    if (buffer->size() < 2 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[1] & 0x80) {
         return firstSeqNo;
     }
 
diff --git a/media/libstagefright/rtsp/AHEVCAssembler.cpp b/media/libstagefright/rtsp/AHEVCAssembler.cpp
index bb42d1f..72dd981 100644
--- a/media/libstagefright/rtsp/AHEVCAssembler.cpp
+++ b/media/libstagefright/rtsp/AHEVCAssembler.cpp
@@ -629,13 +629,13 @@
 
 int32_t AHEVCAssembler::pickStartSeq(const Queue *queue,
         uint32_t first, int64_t play, int64_t jit) {
+    CHECK(!queue->empty());
     // pick the first sequence number has the start bit.
     sp<ABuffer> buffer = *(queue->begin());
     int32_t firstSeqNo = buffer->int32Data();
 
     // This only works for FU-A type & non-start sequence
-    unsigned nalType = buffer->data()[0] & 0x1f;
-    if (nalType != 28 || buffer->data()[2] & 0x80) {
+    if (buffer->size() < 3 || (buffer->data()[0] & 0x1f) != 28 || buffer->data()[2] & 0x80) {
         return firstSeqNo;
     }
 
@@ -645,7 +645,7 @@
         if (rtpTime + jit >= play) {
             break;
         }
-        if ((data[2] & 0x80)) {
+        if (it->size() >= 3 && (data[2] & 0x80)) {
             const int32_t seqNo = it->int32Data();
             ALOGE("finding [HEAD] pkt. \t Seq# (%d ~ )[%d", firstSeqNo, seqNo);
             firstSeqNo = seqNo;
diff --git a/media/utils/ServiceUtilities.cpp b/media/utils/ServiceUtilities.cpp
index 07f4529..83b84e3 100644
--- a/media/utils/ServiceUtilities.cpp
+++ b/media/utils/ServiceUtilities.cpp
@@ -103,11 +103,10 @@
     AttributionSourceState myAttributionSource;
     myAttributionSource.uid = VALUE_OR_FATAL(android::legacy2aidl_uid_t_int32_t(getuid()));
     myAttributionSource.pid = VALUE_OR_FATAL(android::legacy2aidl_pid_t_int32_t(getpid()));
-    if (callerAttributionSource.token != nullptr) {
-        myAttributionSource.token = callerAttributionSource.token;
-    } else {
-        myAttributionSource.token = sp<BBinder>::make();
-    }
+    // Create a static token for audioserver requests, which identifies the
+    // audioserver to the app ops system
+    static sp<BBinder> appOpsToken = sp<BBinder>::make();
+    myAttributionSource.token = appOpsToken;
     myAttributionSource.next.push_back(nextAttributionSource);
 
     return std::optional<AttributionSourceState>{myAttributionSource};
diff --git a/services/audioflinger/AudioFlinger.cpp b/services/audioflinger/AudioFlinger.cpp
index f7576f6..23a3a36 100644
--- a/services/audioflinger/AudioFlinger.cpp
+++ b/services/audioflinger/AudioFlinger.cpp
@@ -280,7 +280,6 @@
                 return opPackageLegacy == package; }) == packages.end()) {
             ALOGW("The package name(%s) provided does not correspond to the uid %d",
                     attributionSource.packageName.value_or("").c_str(), attributionSource.uid);
-            checkedAttributionSource.packageName = std::optional<std::string>();
         }
     }
     return checkedAttributionSource;
@@ -579,6 +578,33 @@
     audio_io_handle_t io = AUDIO_IO_HANDLE_NONE;
     audio_port_handle_t portId = AUDIO_PORT_HANDLE_NONE;
     audio_attributes_t localAttr = *attr;
+
+    // TODO b/182392553: refactor or make clearer
+    pid_t clientPid =
+        VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(client.attributionSource.pid));
+    bool updatePid = (clientPid == (pid_t)-1);
+    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
+
+    AttributionSourceState adjAttributionSource = client.attributionSource;
+    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
+        uid_t clientUid =
+            VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_uid_t(client.attributionSource.uid));
+        ALOGW_IF(clientUid != callingUid,
+                "%s uid %d tried to pass itself off as %d",
+                __FUNCTION__, callingUid, clientUid);
+        adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
+        updatePid = true;
+    }
+    if (updatePid) {
+        const pid_t callingPid = IPCThreadState::self()->getCallingPid();
+        ALOGW_IF(clientPid != (pid_t)-1 && clientPid != callingPid,
+                 "%s uid %d pid %d tried to pass itself off as pid %d",
+                 __func__, callingUid, callingPid, clientPid);
+        adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
+    }
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            adjAttributionSource);
+
     if (direction == MmapStreamInterface::DIRECTION_OUTPUT) {
         audio_config_t fullConfig = AUDIO_CONFIG_INITIALIZER;
         fullConfig.sample_rate = config->sample_rate;
@@ -588,7 +614,7 @@
         bool isSpatialized;
         ret = AudioSystem::getOutputForAttr(&localAttr, &io,
                                             actualSessionId,
-                                            &streamType, client.attributionSource,
+                                            &streamType, adjAttributionSource,
                                             &fullConfig,
                                             (audio_output_flags_t)(AUDIO_OUTPUT_FLAG_MMAP_NOIRQ |
                                                     AUDIO_OUTPUT_FLAG_DIRECT),
@@ -599,7 +625,7 @@
         ret = AudioSystem::getInputForAttr(&localAttr, &io,
                                               RECORD_RIID_INVALID,
                                               actualSessionId,
-                                              client.attributionSource,
+                                              adjAttributionSource,
                                               config,
                                               AUDIO_INPUT_FLAG_MMAP_NOIRQ, deviceId, &portId);
     }
@@ -1048,7 +1074,7 @@
     audio_attributes_t localAttr = input.attr;
 
     AttributionSourceState adjAttributionSource = input.clientInfo.attributionSource;
-    if (!isAudioServerOrMediaServerUid(callingUid)) {
+    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
         ALOGW_IF(clientUid != callingUid,
                 "%s uid %d tried to pass itself off as %d",
                 __FUNCTION__, callingUid, clientUid);
@@ -1064,6 +1090,8 @@
         clientPid = callingPid;
         adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
     }
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            adjAttributionSource);
 
     audio_session_t sessionId = input.sessionId;
     if (sessionId == AUDIO_SESSION_ALLOCATE) {
@@ -2229,7 +2257,7 @@
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     const uid_t currentUid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(
            adjAttributionSource.uid));
-    if (!isAudioServerOrMediaServerUid(callingUid)) {
+    if (!isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
         ALOGW_IF(currentUid != callingUid,
                 "%s uid %d tried to pass itself off as %d",
                 __FUNCTION__, callingUid, currentUid);
@@ -2245,7 +2273,8 @@
                  __func__, callingUid, callingPid, currentPid);
         adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
     }
-
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(
+            adjAttributionSource);
     // we don't yet support anything other than linear PCM
     if (!audio_is_valid_format(input.config.format) || !audio_is_linear_pcm(input.config.format)) {
         ALOGE("createRecord() invalid format %#x", input.config.format);
@@ -3862,7 +3891,7 @@
     const uid_t callingUid = IPCThreadState::self()->getCallingUid();
     adjAttributionSource.uid = VALUE_OR_RETURN_STATUS(legacy2aidl_uid_t_int32_t(callingUid));
     pid_t currentPid = VALUE_OR_RETURN_STATUS(aidl2legacy_int32_t_pid_t(adjAttributionSource.pid));
-    if (currentPid == -1 || !isAudioServerOrMediaServerUid(callingUid)) {
+    if (currentPid == -1 || !isAudioServerOrMediaServerOrSystemServerOrRootUid(callingUid)) {
         const pid_t callingPid = IPCThreadState::self()->getCallingPid();
         ALOGW_IF(currentPid != -1 && currentPid != callingPid,
                  "%s uid %d pid %d tried to pass itself off as pid %d",
@@ -3870,6 +3899,7 @@
         adjAttributionSource.pid = VALUE_OR_RETURN_STATUS(legacy2aidl_pid_t_int32_t(callingPid));
         currentPid = callingPid;
     }
+    adjAttributionSource = AudioFlinger::checkAttributionSourcePackage(adjAttributionSource);
 
     ALOGV("createEffect pid %d, effectClient %p, priority %d, sessionId %d, io %d, factory %p",
           adjAttributionSource.pid, effectClient.get(), priority, sessionId, io,
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 07e82a8..683e320 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -8165,8 +8165,6 @@
     audio_input_flags_t inputFlags = mInput->flags;
     audio_input_flags_t requestedFlags = *flags;
     uint32_t sampleRate;
-    AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
-            attributionSource);
 
     lStatus = initCheck();
     if (lStatus != NO_ERROR) {
@@ -8181,7 +8179,7 @@
     }
 
     if (maxSharedAudioHistoryMs != 0) {
-        if (!captureHotwordAllowed(checkedAttributionSource)) {
+        if (!captureHotwordAllowed(attributionSource)) {
             lStatus = PERMISSION_DENIED;
             goto Exit;
         }
@@ -8302,16 +8300,16 @@
         Mutex::Autolock _l(mLock);
         int32_t startFrames = -1;
         if (!mSharedAudioPackageName.empty()
-                && mSharedAudioPackageName == checkedAttributionSource.packageName
+                && mSharedAudioPackageName == attributionSource.packageName
                 && mSharedAudioSessionId == sessionId
-                && captureHotwordAllowed(checkedAttributionSource)) {
+                && captureHotwordAllowed(attributionSource)) {
             startFrames = mSharedAudioStartFrames;
         }
 
         track = new RecordTrack(this, client, attr, sampleRate,
                       format, channelMask, frameCount,
                       nullptr /* buffer */, (size_t)0 /* bufferSize */, sessionId, creatorPid,
-                      checkedAttributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
+                      attributionSource, *flags, TrackBase::TYPE_DEFAULT, portId,
                       startFrames);
 
         lStatus = track->initCheck();
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index 6135020..83a8bb0 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -529,10 +529,7 @@
             id, attr.flags);
         return nullptr;
     }
-
-    AttributionSourceState checkedAttributionSource = AudioFlinger::checkAttributionSourcePackage(
-            attributionSource);
-    return new OpPlayAudioMonitor(checkedAttributionSource, attr.usage, id);
+    return new OpPlayAudioMonitor(attributionSource, attr.usage, id);
 }
 
 AudioFlinger::PlaybackThread::OpPlayAudioMonitor::OpPlayAudioMonitor(
diff --git a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
index df49bba..49224c5 100644
--- a/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
+++ b/services/audiopolicy/service/AudioPolicyInterfaceImpl.cpp
@@ -352,31 +352,20 @@
     ALOGV("%s()", __func__);
     Mutex::Autolock _l(mLock);
 
-    // TODO b/182392553: refactor or remove
-    AttributionSourceState adjAttributionSource = attributionSource;
-    const uid_t callingUid = IPCThreadState::self()->getCallingUid();
-    if (!isAudioServerOrMediaServerUid(callingUid) || attributionSource.uid == -1) {
-        int32_t callingUidAidl = VALUE_OR_RETURN_BINDER_STATUS(
-            legacy2aidl_uid_t_int32_t(callingUid));
-        ALOGW_IF(attributionSource.uid != -1 && attributionSource.uid != callingUidAidl,
-                "%s uid %d tried to pass itself off as %d", __func__,
-                callingUidAidl, attributionSource.uid);
-        adjAttributionSource.uid = callingUidAidl;
-    }
     if (!mPackageManager.allowPlaybackCapture(VALUE_OR_RETURN_BINDER_STATUS(
-        aidl2legacy_int32_t_uid_t(adjAttributionSource.uid)))) {
+        aidl2legacy_int32_t_uid_t(attributionSource.uid)))) {
         attr.flags = static_cast<audio_flags_mask_t>(attr.flags | AUDIO_FLAG_NO_MEDIA_PROJECTION);
     }
     if (((attr.flags & (AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE)) != 0)
-            && !bypassInterruptionPolicyAllowed(adjAttributionSource)) {
+            && !bypassInterruptionPolicyAllowed(attributionSource)) {
         attr.flags = static_cast<audio_flags_mask_t>(
                 attr.flags & ~(AUDIO_FLAG_BYPASS_INTERRUPTION_POLICY|AUDIO_FLAG_BYPASS_MUTE));
     }
 
     if (attr.content_type == AUDIO_CONTENT_TYPE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(adjAttributionSource)) {
+        if (!accessUltrasoundAllowed(attributionSource)) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
-                    __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+                    __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
         }
     }
@@ -386,7 +375,7 @@
     bool isSpatialized = false;
     status_t result = mAudioPolicyManager->getOutputForAttr(&attr, &output, session,
                                                             &stream,
-                                                            adjAttributionSource,
+                                                            attributionSource,
                                                             &config,
                                                             &flags, &selectedDeviceId, &portId,
                                                             &secondaryOutputs,
@@ -401,20 +390,20 @@
             break;
         case AudioPolicyInterface::API_OUTPUT_TELEPHONY_TX:
             if (((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0)
-                && !callAudioInterceptionAllowed(adjAttributionSource)) {
+                && !callAudioInterceptionAllowed(attributionSource)) {
                 ALOGE("%s() permission denied: call redirection not allowed for uid %d",
-                    __func__, adjAttributionSource.uid);
+                    __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
-            } else if (!modifyPhoneStateAllowed(adjAttributionSource)) {
+            } else if (!modifyPhoneStateAllowed(attributionSource)) {
                 ALOGE("%s() permission denied: modify phone state not allowed for uid %d",
-                    __func__, adjAttributionSource.uid);
+                    __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
         case AudioPolicyInterface::API_OUT_MIX_PLAYBACK:
-            if (!modifyAudioRoutingAllowed(adjAttributionSource)) {
+            if (!modifyAudioRoutingAllowed(attributionSource)) {
                 ALOGE("%s() permission denied: modify audio routing not allowed for uid %d",
-                    __func__, adjAttributionSource.uid);
+                    __func__, attributionSource.uid);
                 result = PERMISSION_DENIED;
             }
             break;
@@ -427,7 +416,7 @@
 
     if (result == NO_ERROR) {
         sp<AudioPlaybackClient> client =
-                new AudioPlaybackClient(attr, output, adjAttributionSource, session,
+                new AudioPlaybackClient(attr, output, attributionSource, session,
                     portId, selectedDeviceId, stream, isSpatialized);
         mAudioPlaybackClients.add(portId, client);
 
@@ -613,33 +602,8 @@
         return binderStatusFromStatusT(BAD_VALUE);
     }
 
-    // Make sure attribution source represents the current caller
-    AttributionSourceState adjAttributionSource = attributionSource;
-    // TODO b/182392553: refactor or remove
-    bool updatePid = (attributionSource.pid == -1);
-    const uid_t callingUid =IPCThreadState::self()->getCallingUid();
-    const uid_t currentUid = VALUE_OR_RETURN_BINDER_STATUS(aidl2legacy_int32_t_uid_t(
-            attributionSource.uid));
-    if (!isAudioServerOrMediaServerUid(callingUid)) {
-        ALOGW_IF(currentUid != (uid_t)-1 && currentUid != callingUid,
-                "%s uid %d tried to pass itself off as %d", __FUNCTION__, callingUid,
-                currentUid);
-        adjAttributionSource.uid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_uid_t_int32_t(
-                callingUid));
-        updatePid = true;
-    }
-
-    if (updatePid) {
-        const int32_t callingPid = VALUE_OR_RETURN_BINDER_STATUS(legacy2aidl_pid_t_int32_t(
-            IPCThreadState::self()->getCallingPid()));
-        ALOGW_IF(attributionSource.pid != -1 && attributionSource.pid != callingPid,
-                 "%s uid %d pid %d tried to pass itself off as pid %d",
-                 __func__, adjAttributionSource.uid, callingPid, attributionSource.pid);
-        adjAttributionSource.pid = callingPid;
-    }
-
     RETURN_IF_BINDER_ERROR(binderStatusFromStatusT(validateUsage(attr,
-            adjAttributionSource)));
+            attributionSource)));
 
     // check calling permissions.
     // Capturing from the following sources does not require permission RECORD_AUDIO
@@ -650,17 +614,17 @@
     // type is API_INPUT_MIX_EXT_POLICY_REROUTE and by AudioService if a media projection
     // is used and input type is API_INPUT_MIX_PUBLIC_CAPTURE_PLAYBACK
     // - ECHO_REFERENCE source is controlled by captureAudioOutputAllowed()
-    if (!(recordingAllowed(adjAttributionSource, inputSource)
+    if (!(recordingAllowed(attributionSource, inputSource)
             || inputSource == AUDIO_SOURCE_FM_TUNER
             || inputSource == AUDIO_SOURCE_REMOTE_SUBMIX
             || inputSource == AUDIO_SOURCE_ECHO_REFERENCE)) {
         ALOGE("%s permission denied: recording not allowed for %s",
-                __func__, adjAttributionSource.toString().c_str());
+                __func__, attributionSource.toString().c_str());
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureOutput = captureAudioOutputAllowed(adjAttributionSource);
-    bool canInterceptCallAudio = callAudioInterceptionAllowed(adjAttributionSource);
+    bool canCaptureOutput = captureAudioOutputAllowed(attributionSource);
+    bool canInterceptCallAudio = callAudioInterceptionAllowed(attributionSource);
     bool isCallAudioSource = inputSource == AUDIO_SOURCE_VOICE_UPLINK
              || inputSource == AUDIO_SOURCE_VOICE_DOWNLINK
              || inputSource == AUDIO_SOURCE_VOICE_CALL;
@@ -674,11 +638,11 @@
     }
     if (inputSource == AUDIO_SOURCE_FM_TUNER
         && !canCaptureOutput
-        && !captureTunerAudioInputAllowed(adjAttributionSource)) {
+        && !captureTunerAudioInputAllowed(attributionSource)) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
-    bool canCaptureHotword = captureHotwordAllowed(adjAttributionSource);
+    bool canCaptureHotword = captureHotwordAllowed(attributionSource);
     if ((inputSource == AUDIO_SOURCE_HOTWORD) && !canCaptureHotword) {
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
@@ -686,14 +650,14 @@
     if (((flags & AUDIO_INPUT_FLAG_HW_HOTWORD) != 0)
             && !canCaptureHotword) {
         ALOGE("%s: permission denied: hotword mode not allowed"
-              " for uid %d pid %d", __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+              " for uid %d pid %d", __func__, attributionSource.uid, attributionSource.pid);
         return binderStatusFromStatusT(PERMISSION_DENIED);
     }
 
     if (attr.source == AUDIO_SOURCE_ULTRASOUND) {
-        if (!accessUltrasoundAllowed(adjAttributionSource)) {
+        if (!accessUltrasoundAllowed(attributionSource)) {
             ALOGE("%s: permission denied: ultrasound not allowed for uid %d pid %d",
-                    __func__, adjAttributionSource.uid, adjAttributionSource.pid);
+                    __func__, attributionSource.uid, attributionSource.pid);
             return binderStatusFromStatusT(PERMISSION_DENIED);
         }
     }
@@ -708,7 +672,7 @@
             AutoCallerClear acc;
             // the audio_in_acoustics_t parameter is ignored by get_input()
             status = mAudioPolicyManager->getInputForAttr(&attr, &input, riid, session,
-                                                          adjAttributionSource, &config,
+                                                          attributionSource, &config,
                                                           flags, &selectedDeviceId,
                                                           &inputType, &portId);
 
@@ -737,7 +701,7 @@
                 }
                 break;
             case AudioPolicyInterface::API_INPUT_MIX_EXT_POLICY_REROUTE:
-                if (!(modifyAudioRoutingAllowed(adjAttributionSource)
+                if (!(modifyAudioRoutingAllowed(attributionSource)
                         || ((attr.flags & AUDIO_FLAG_CALL_REDIRECTION) != 0
                             && canInterceptCallAudio))) {
                     ALOGE("%s permission denied for remote submix capture", __func__);
@@ -760,7 +724,7 @@
         }
 
         sp<AudioRecordClient> client = new AudioRecordClient(attr, input, session, portId,
-                                                             selectedDeviceId, adjAttributionSource,
+                                                             selectedDeviceId, attributionSource,
                                                              canCaptureOutput, canCaptureHotword,
                                                              mOutputCommandThread);
         mAudioRecordClients.add(portId, client);