Merge "Fix codec output format for conversion"
diff --git a/camera/ndk/include/camera/NdkCameraCaptureSession.h b/camera/ndk/include/camera/NdkCameraCaptureSession.h
index 78e062a..5e0db60 100644
--- a/camera/ndk/include/camera/NdkCameraCaptureSession.h
+++ b/camera/ndk/include/camera/NdkCameraCaptureSession.h
@@ -434,7 +434,7 @@
  *
  */
 camera_status_t ACameraCaptureSession_getDevice(
-        ACameraCaptureSession* session, /*out*/ACameraDevice** device);
+        ACameraCaptureSession* session, /*out*/ACameraDevice** device) __INTRODUCED_IN(24);
 
 /**
  * Submit an array of requests to be captured in sequence as a burst in the minimum of time possible.
@@ -472,7 +472,7 @@
         ACameraCaptureSession* session,
         /*optional*/ACameraCaptureSession_captureCallbacks* callbacks,
         int numRequests, ACaptureRequest** requests,
-        /*optional*/int* captureSequenceId);
+        /*optional*/int* captureSequenceId) __INTRODUCED_IN(24);
 
 /**
  * Request endlessly repeating capture of a sequence of images by this capture session.
@@ -526,7 +526,7 @@
         ACameraCaptureSession* session,
         /*optional*/ACameraCaptureSession_captureCallbacks* callbacks,
         int numRequests, ACaptureRequest** requests,
-        /*optional*/int* captureSequenceId);
+        /*optional*/int* captureSequenceId) __INTRODUCED_IN(24);
 
 /**
  * Cancel any ongoing repeating capture set by {@link ACameraCaptureSession_setRepeatingRequest}.
@@ -549,7 +549,8 @@
  *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error</li>
  *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
  */
-camera_status_t ACameraCaptureSession_stopRepeating(ACameraCaptureSession* session);
+camera_status_t ACameraCaptureSession_stopRepeating(ACameraCaptureSession* session)
+        __INTRODUCED_IN(24);
 
 /**
  * Discard all captures currently pending and in-progress as fast as possible.
@@ -589,7 +590,8 @@
  *         <li>{@link ACAMERA_ERROR_CAMERA_SERVICE} if the camera service encounters fatal error</li>
  *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
  */
-camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* session);
+camera_status_t ACameraCaptureSession_abortCaptures(ACameraCaptureSession* session)
+        __INTRODUCED_IN(24);
 
 #endif /* __ANDROID_API__ >= 24 */
 
@@ -638,7 +640,7 @@
  *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons</li></ul>
  */
 camera_status_t ACameraCaptureSession_updateSharedOutput(ACameraCaptureSession* session,
-        ACaptureSessionOutput* output);
+        ACaptureSessionOutput* output) __INTRODUCED_IN(28);
 #endif /* __ANDROID_API__ >= 28 */
 
 __END_DECLS
diff --git a/camera/ndk/include/camera/NdkCameraDevice.h b/camera/ndk/include/camera/NdkCameraDevice.h
index b715b12..7c13b34 100644
--- a/camera/ndk/include/camera/NdkCameraDevice.h
+++ b/camera/ndk/include/camera/NdkCameraDevice.h
@@ -176,7 +176,7 @@
  *         <li>{@link ACAMERA_OK} if the method call succeeds.</li>
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if device is NULL.</li></ul>
  */
-camera_status_t ACameraDevice_close(ACameraDevice* device);
+camera_status_t ACameraDevice_close(ACameraDevice* device) __INTRODUCED_IN(24);
 
 /**
  * Return the camera id associated with this camera device.
@@ -187,7 +187,7 @@
  * delete/free by the application. Also the returned string must not be used after the device
  * has been closed.
  */
-const char* ACameraDevice_getId(const ACameraDevice* device);
+const char* ACameraDevice_getId(const ACameraDevice* device) __INTRODUCED_IN(24);
 
 typedef enum {
     /**
@@ -290,7 +290,7 @@
  */
 camera_status_t ACameraDevice_createCaptureRequest(
         const ACameraDevice* device, ACameraDevice_request_template templateId,
-        /*out*/ACaptureRequest** request);
+        /*out*/ACaptureRequest** request) __INTRODUCED_IN(24);
 
 
 typedef struct ACaptureSessionOutputContainer ACaptureSessionOutputContainer;
@@ -313,7 +313,7 @@
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if container is NULL.</li></ul>
  */
 camera_status_t ACaptureSessionOutputContainer_create(
-        /*out*/ACaptureSessionOutputContainer** container);
+        /*out*/ACaptureSessionOutputContainer** container) __INTRODUCED_IN(24);
 
 /**
  * Free a capture session output container.
@@ -322,7 +322,8 @@
  *
  * @see ACaptureSessionOutputContainer_create
  */
-void            ACaptureSessionOutputContainer_free(ACaptureSessionOutputContainer* container);
+void            ACaptureSessionOutputContainer_free(ACaptureSessionOutputContainer* container)
+        __INTRODUCED_IN(24);
 
 /**
  * Create a ACaptureSessionOutput object.
@@ -344,7 +345,7 @@
  * @see ACaptureSessionOutputContainer_add
  */
 camera_status_t ACaptureSessionOutput_create(
-        ANativeWindow* anw, /*out*/ACaptureSessionOutput** output);
+        ANativeWindow* anw, /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(24);
 
 /**
  * Free a ACaptureSessionOutput object.
@@ -353,7 +354,7 @@
  *
  * @see ACaptureSessionOutput_create
  */
-void            ACaptureSessionOutput_free(ACaptureSessionOutput* output);
+void            ACaptureSessionOutput_free(ACaptureSessionOutput* output) __INTRODUCED_IN(24);
 
 /**
  * Add an {@link ACaptureSessionOutput} object to {@link ACaptureSessionOutputContainer}.
@@ -366,7 +367,8 @@
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if container or output is NULL.</li></ul>
  */
 camera_status_t ACaptureSessionOutputContainer_add(
-        ACaptureSessionOutputContainer* container, const ACaptureSessionOutput* output);
+        ACaptureSessionOutputContainer* container, const ACaptureSessionOutput* output)
+        __INTRODUCED_IN(24);
 
 /**
  * Remove an {@link ACaptureSessionOutput} object from {@link ACaptureSessionOutputContainer}.
@@ -382,7 +384,8 @@
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if container or output is NULL.</li></ul>
  */
 camera_status_t ACaptureSessionOutputContainer_remove(
-        ACaptureSessionOutputContainer* container, const ACaptureSessionOutput* output);
+        ACaptureSessionOutputContainer* container, const ACaptureSessionOutput* output)
+        __INTRODUCED_IN(24);
 
 /**
  * Create a new camera capture session by providing the target output set of {@link ANativeWindow}
@@ -663,7 +666,7 @@
         ACameraDevice* device,
         const ACaptureSessionOutputContainer*       outputs,
         const ACameraCaptureSession_stateCallbacks* callbacks,
-        /*out*/ACameraCaptureSession** session);
+        /*out*/ACameraCaptureSession** session) __INTRODUCED_IN(24);
 
 #endif /* __ANDROID_API__ >= 24 */
 
@@ -691,7 +694,7 @@
  * @see ACaptureSessionOutputContainer_add
  */
 camera_status_t ACaptureSessionSharedOutput_create(
-        ANativeWindow* anw, /*out*/ACaptureSessionOutput** output);
+        ANativeWindow* anw, /*out*/ACaptureSessionOutput** output) __INTRODUCED_IN(28);
 
 /**
  * Add a native window to shared ACaptureSessionOutput.
@@ -708,7 +711,8 @@
  *             window associated with ACaptureSessionOutput; or anw is already present inside
  *             ACaptureSessionOutput.</li></ul>
  */
-camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *output, ANativeWindow *anw);
+camera_status_t ACaptureSessionSharedOutput_add(ACaptureSessionOutput *output,
+        ANativeWindow *anw) __INTRODUCED_IN(28);
 
 /**
  * Remove a native window from shared ACaptureSessionOutput.
@@ -724,7 +728,7 @@
  *             ACaptureSessionOutput.</li></ul>
  */
 camera_status_t ACaptureSessionSharedOutput_remove(ACaptureSessionOutput *output,
-        ANativeWindow* anw);
+        ANativeWindow* anw) __INTRODUCED_IN(28);
 
 /**
  * Create a new camera capture session similar to {@link ACameraDevice_createCaptureSession}. This
@@ -757,7 +761,7 @@
         const ACaptureSessionOutputContainer* outputs,
         const ACaptureRequest* sessionParameters,
         const ACameraCaptureSession_stateCallbacks* callbacks,
-        /*out*/ACameraCaptureSession** session);
+        /*out*/ACameraCaptureSession** session) __INTRODUCED_IN(28);
 
 #endif /* __ANDROID_API__ >= 28 */
 
diff --git a/camera/ndk/include/camera/NdkCameraManager.h b/camera/ndk/include/camera/NdkCameraManager.h
index e5b3ad8..ea76738 100644
--- a/camera/ndk/include/camera/NdkCameraManager.h
+++ b/camera/ndk/include/camera/NdkCameraManager.h
@@ -65,14 +65,14 @@
  * @return a {@link ACameraManager} instance.
  *
  */
-ACameraManager* ACameraManager_create();
+ACameraManager* ACameraManager_create() __INTRODUCED_IN(24);
 
 /**
  * <p>Delete the {@link ACameraManager} instance and free its resources. </p>
  *
  * @param manager the {@link ACameraManager} instance to be deleted.
  */
-void ACameraManager_delete(ACameraManager* manager);
+void ACameraManager_delete(ACameraManager* manager) __INTRODUCED_IN(24);
 
 /// Struct to hold list of camera devices
 typedef struct ACameraIdList {
@@ -102,14 +102,14 @@
  *         <li>{@link ACAMERA_ERROR_NOT_ENOUGH_MEMORY} if allocating memory fails.</li></ul>
  */
 camera_status_t ACameraManager_getCameraIdList(ACameraManager* manager,
-                                              /*out*/ACameraIdList** cameraIdList);
+        /*out*/ACameraIdList** cameraIdList) __INTRODUCED_IN(24);
 
 /**
  * Delete a list of camera devices allocated via {@link ACameraManager_getCameraIdList}.
  *
  * @param cameraIdList the {@link ACameraIdList} to be deleted.
  */
-void ACameraManager_deleteCameraIdList(ACameraIdList* cameraIdList);
+void ACameraManager_deleteCameraIdList(ACameraIdList* cameraIdList) __INTRODUCED_IN(24);
 
 /**
  * Definition of camera availability callbacks.
@@ -120,7 +120,8 @@
  *                 argument is owned by camera framework and will become invalid immediately after
  *                 this callback returns.
  */
-typedef void (*ACameraManager_AvailabilityCallback)(void* context, const char* cameraId);
+typedef void (*ACameraManager_AvailabilityCallback)(void* context,
+        const char* cameraId);
 
 /**
  * A listener for camera devices becoming available or unavailable to open.
@@ -168,7 +169,8 @@
  *                  {ACameraManager_AvailabilityCallbacks#onCameraUnavailable} is NULL.</li></ul>
  */
 camera_status_t ACameraManager_registerAvailabilityCallback(
-        ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback);
+        ACameraManager* manager,
+        const ACameraManager_AvailabilityCallbacks* callback) __INTRODUCED_IN(24);
 
 /**
  * Unregister camera availability callbacks.
@@ -185,7 +187,8 @@
  *                  {ACameraManager_AvailabilityCallbacks#onCameraUnavailable} is NULL.</li></ul>
  */
 camera_status_t ACameraManager_unregisterAvailabilityCallback(
-        ACameraManager* manager, const ACameraManager_AvailabilityCallbacks* callback);
+        ACameraManager* manager,
+        const ACameraManager_AvailabilityCallbacks* callback) __INTRODUCED_IN(24);
 
 /**
  * Query the capabilities of a camera device. These capabilities are
@@ -211,7 +214,7 @@
  */
 camera_status_t ACameraManager_getCameraCharacteristics(
         ACameraManager* manager, const char* cameraId,
-        /*out*/ACameraMetadata** characteristics);
+        /*out*/ACameraMetadata** characteristics) __INTRODUCED_IN(24);
 
 /**
  * Open a connection to a camera with the given ID. The opened camera device will be
@@ -271,7 +274,7 @@
 camera_status_t ACameraManager_openCamera(
         ACameraManager* manager, const char* cameraId,
         ACameraDevice_StateCallbacks* callback,
-        /*out*/ACameraDevice** device);
+        /*out*/ACameraDevice** device) __INTRODUCED_IN(24);
 
 #endif /* __ANDROID_API__ >= 24 */
 
diff --git a/camera/ndk/include/camera/NdkCameraMetadata.h b/camera/ndk/include/camera/NdkCameraMetadata.h
index bdb1587..611e270 100644
--- a/camera/ndk/include/camera/NdkCameraMetadata.h
+++ b/camera/ndk/include/camera/NdkCameraMetadata.h
@@ -191,7 +191,8 @@
  *             of input tag value.</li></ul>
  */
 camera_status_t ACameraMetadata_getConstEntry(
-        const ACameraMetadata* metadata, uint32_t tag, /*out*/ACameraMetadata_const_entry* entry);
+        const ACameraMetadata* metadata,
+        uint32_t tag, /*out*/ACameraMetadata_const_entry* entry) __INTRODUCED_IN(24);
 
 /**
  * List all the entry tags in input {@link ACameraMetadata}.
@@ -208,7 +209,8 @@
  *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
  */
 camera_status_t ACameraMetadata_getAllTags(
-        const ACameraMetadata* metadata, /*out*/int32_t* numEntries, /*out*/const uint32_t** tags);
+        const ACameraMetadata* metadata,
+        /*out*/int32_t* numEntries, /*out*/const uint32_t** tags) __INTRODUCED_IN(24);
 
 /**
  * Create a copy of input {@link ACameraMetadata}.
@@ -220,14 +222,14 @@
  *
  * @return a valid ACameraMetadata pointer or NULL if the input metadata cannot be copied.
  */
-ACameraMetadata* ACameraMetadata_copy(const ACameraMetadata* src);
+ACameraMetadata* ACameraMetadata_copy(const ACameraMetadata* src) __INTRODUCED_IN(24);
 
 /**
  * Free a {@link ACameraMetadata} structure.
  *
  * @param metadata the {@link ACameraMetadata} to be freed.
  */
-void ACameraMetadata_free(ACameraMetadata* metadata);
+void ACameraMetadata_free(ACameraMetadata* metadata) __INTRODUCED_IN(24);
 
 #endif /* __ANDROID_API__ >= 24 */
 
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 3010646..c1f5ddc 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -479,11 +479,26 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of regions supported by the device is determined by the value
      * of android.control.maxRegionsAe.</p>
-     * <p>The coordinate system is based on the active pixel array,
-     * with (0,0) being the top-left pixel in the active pixel array, and
+     * <p>For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0,0) being
+     * the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
-     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the
-     * bottom-right pixel in the active pixel array.</p>
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+     * active pixel array.</p>
+     * <p>For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system depends on the mode being set.
+     * When the distortion correction mode is OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+     * <code>(0, 0)</code> being the top-left pixel of the pre-correction active array, and
+     * (ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.width - 1,
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right
+     * pixel in the pre-correction active pixel array.
+     * When the distortion correction mode is not OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+     * <code>(0, 0)</code> being the top-left pixel of the active array, and
+     * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+     * active pixel array.</p>
      * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
      * for every pixel in the area. This means that a large metering area
      * with the same weight as a smaller area will have more effect in
@@ -504,8 +519,10 @@
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
      * ymax.</p>
      *
+     * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      */
     ACAMERA_CONTROL_AE_REGIONS =                                // int32[5*area_count]
             ACAMERA_CONTROL_START + 4,
@@ -641,11 +658,26 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of focus areas supported by the device is determined by the value
      * of android.control.maxRegionsAf.</p>
-     * <p>The coordinate system is based on the active pixel array,
-     * with (0,0) being the top-left pixel in the active pixel array, and
+     * <p>For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0,0) being
+     * the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
-     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the
-     * bottom-right pixel in the active pixel array.</p>
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+     * active pixel array.</p>
+     * <p>For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system depends on the mode being set.
+     * When the distortion correction mode is OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+     * <code>(0, 0)</code> being the top-left pixel of the pre-correction active array, and
+     * (ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.width - 1,
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right
+     * pixel in the pre-correction active pixel array.
+     * When the distortion correction mode is not OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+     * <code>(0, 0)</code> being the top-left pixel of the active array, and
+     * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+     * active pixel array.</p>
      * <p>The weight must be within <code>[0, 1000]</code>, and represents a weight
      * for every pixel in the area. This means that a large metering area
      * with the same weight as a smaller area will have more effect in
@@ -667,8 +699,10 @@
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
      * ymax.</p>
      *
+     * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      */
     ACAMERA_CONTROL_AF_REGIONS =                                // int32[5*area_count]
             ACAMERA_CONTROL_START + 8,
@@ -800,11 +834,26 @@
      * Otherwise will always be present.</p>
      * <p>The maximum number of regions supported by the device is determined by the value
      * of android.control.maxRegionsAwb.</p>
-     * <p>The coordinate system is based on the active pixel array,
-     * with (0,0) being the top-left pixel in the active pixel array, and
+     * <p>For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with (0,0) being
+     * the top-left pixel in the active pixel array, and
      * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
-     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the
-     * bottom-right pixel in the active pixel array.</p>
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+     * active pixel array.</p>
+     * <p>For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system depends on the mode being set.
+     * When the distortion correction mode is OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+     * <code>(0, 0)</code> being the top-left pixel of the pre-correction active array, and
+     * (ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.width - 1,
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right
+     * pixel in the pre-correction active pixel array.
+     * When the distortion correction mode is not OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+     * <code>(0, 0)</code> being the top-left pixel of the active array, and
+     * (ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.width - 1,
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.height - 1) being the bottom-right pixel in the
+     * active pixel array.</p>
      * <p>The weight must range from 0 to 1000, and represents a weight
      * for every pixel in the area. This means that a large metering area
      * with the same weight as a smaller area will have more effect in
@@ -825,8 +874,10 @@
      * The rectangle is defined to be inclusive on xmin and ymin, but exclusive on xmax and
      * ymax.</p>
      *
+     * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      */
     ACAMERA_CONTROL_AWB_REGIONS =                               // int32[5*area_count]
             ACAMERA_CONTROL_START + 12,
@@ -2261,7 +2312,9 @@
      * <p>If this device is the largest or only camera device with a given facing, then this
      * position will be <code>(0, 0, 0)</code>; a camera device with a lens optical center located 3 cm
      * from the main sensor along the +X axis (to the right from the user's perspective) will
-     * report <code>(0.03, 0, 0)</code>.</p>
+     * report <code>(0.03, 0, 0)</code>.  Note that this means that, for many computer vision
+     * applications, the position needs to be negated to convert it to a translation from the
+     * camera to the origin.</p>
      * <p>To transform a pixel coordinates between two cameras facing the same direction, first
      * the source camera ACAMERA_LENS_DISTORTION must be corrected for.  Then the source
      * camera ACAMERA_LENS_INTRINSIC_CALIBRATION needs to be applied, followed by the
@@ -2273,7 +2326,8 @@
      * <p>To compare this against a real image from the destination camera, the destination camera
      * image then needs to be corrected for radial distortion before comparison or sampling.</p>
      * <p>When ACAMERA_LENS_POSE_REFERENCE is GYROSCOPE, then this position is relative to
-     * the center of the primary gyroscope on the device.</p>
+     * the center of the primary gyroscope on the device. The axis definitions are the same as
+     * with PRIMARY_CAMERA.</p>
      *
      * @see ACAMERA_LENS_DISTORTION
      * @see ACAMERA_LENS_INTRINSIC_CALIBRATION
@@ -2367,13 +2421,15 @@
      * </code></pre>
      * <p>which can then be combined with the camera pose rotation
      * <code>R</code> and translation <code>t</code> (ACAMERA_LENS_POSE_ROTATION and
-     * ACAMERA_LENS_POSE_TRANSLATION, respective) to calculate the
+     * ACAMERA_LENS_POSE_TRANSLATION, respectively) to calculate the
      * complete transform from world coordinates to pixel
      * coordinates:</p>
-     * <pre><code>P = [ K 0   * [ R t
-     *      0 1 ]     0 1 ]
+     * <pre><code>P = [ K 0   * [ R -Rt
+     *      0 1 ]      0 1 ]
      * </code></pre>
-     * <p>and with <code>p_w</code> being a point in the world coordinate system
+     * <p>(Note the negation of poseTranslation when mapping from camera
+     * to world coordinates, and multiplication by the rotation).</p>
+     * <p>With <code>p_w</code> being a point in the world coordinate system
      * and <code>p_s</code> being a point in the camera active pixel array
      * coordinate system, and with the mapping including the
      * homogeneous division by z:</p>
@@ -2395,6 +2451,13 @@
      * activeArraySize rectangle), to determine the final pixel
      * coordinate of the world point for processed (non-RAW)
      * output buffers.</p>
+     * <p>For camera devices, the center of pixel <code>(x,y)</code> is located at
+     * coordinate <code>(x + 0.5, y + 0.5)</code>.  So on a device with a
+     * precorrection active array of size <code>(10,10)</code>, the valid pixel
+     * indices go from <code>(0,0)-(9,9)</code>, and an perfectly-built camera would
+     * have an optical center at the exact center of the pixel grid, at
+     * coordinates <code>(5.0, 5.0)</code>, which is the top-left corner of pixel
+     * <code>(5,5)</code>.</p>
      *
      * @see ACAMERA_LENS_DISTORTION
      * @see ACAMERA_LENS_POSE_ROTATION
@@ -2979,9 +3042,17 @@
      * </ul></p>
      *
      * <p>This control can be used to implement digital zoom.</p>
-     * <p>The crop region coordinate system is based off
-     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with <code>(0, 0)</code> being the
-     * top-left corner of the sensor active array.</p>
+     * <p>For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with <code>(0, 0)</code> being
+     * the top-left pixel of the active array.</p>
+     * <p>For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system depends on the mode being set.
+     * When the distortion correction mode is OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+     * <code>(0, 0)</code> being the top-left pixel of the pre-correction active array.
+     * When the distortion correction mode is not OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+     * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
      * <p>Output streams use this rectangle to produce their output,
      * cropping to a smaller region if necessary to maintain the
      * stream's aspect ratio, then scaling the sensor input to
@@ -3000,18 +3071,26 @@
      * outputs will crop horizontally (pillarbox), and 16:9
      * streams will match exactly. These additional crops will
      * be centered within the crop region.</p>
-     * <p>The width and height of the crop region cannot
-     * be set to be smaller than
+     * <p>If the coordinate system is ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, the width and height
+     * of the crop region cannot be set to be smaller than
      * <code>floor( activeArraySize.width / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM )</code> and
      * <code>floor( activeArraySize.height / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM )</code>, respectively.</p>
+     * <p>If the coordinate system is ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, the width
+     * and height of the crop region cannot be set to be smaller than
+     * <code>floor( preCorrectionActiveArraySize.width / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM )</code>
+     * and
+     * <code>floor( preCorrectionActiveArraySize.height / ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM )</code>,
+     * respectively.</p>
      * <p>The camera device may adjust the crop region to account
      * for rounding and other hardware requirements; the final
      * crop region used will be included in the output capture
      * result.</p>
      * <p>The data representation is int[4], which maps to (left, top, width, height).</p>
      *
+     * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      */
     ACAMERA_SCALER_CROP_REGION =                                // int32[4]
             ACAMERA_SCALER_START,
@@ -3977,12 +4056,24 @@
      * ACAMERA_SCALER_CROP_REGION, is defined relative to the active array rectangle given in
      * this field, with <code>(0, 0)</code> being the top-left of this rectangle.</p>
      * <p>The active array may be smaller than the full pixel array, since the full array may
-     * include black calibration pixels or other inactive regions, and geometric correction
-     * resulting in scaling or cropping may have been applied.</p>
+     * include black calibration pixels or other inactive regions.</p>
+     * <p>For devices that do not support ACAMERA_DISTORTION_CORRECTION_MODE control, the active
+     * array must be the same as ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.</p>
+     * <p>For devices that support ACAMERA_DISTORTION_CORRECTION_MODE control, the active array must
+     * be enclosed by ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE. The difference between
+     * pre-correction active array and active array accounts for scaling or cropping caused
+     * by lens geometric distortion correction.</p>
+     * <p>In general, application should always refer to active array size for controls like
+     * metering regions or crop region. Two exceptions are when the application is dealing with
+     * RAW image buffers (RAW_SENSOR, RAW10, RAW12 etc), or when application explicitly set
+     * ACAMERA_DISTORTION_CORRECTION_MODE to OFF. In these cases, application should refer
+     * to ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE.</p>
      * <p>The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
      *
+     * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SCALER_CROP_REGION
      * @see ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      */
     ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE =                     // int32[4]
             ACAMERA_SENSOR_INFO_START,
@@ -4224,9 +4315,9 @@
      * <ol>
      * <li>ACAMERA_LENS_DISTORTION.</li>
      * </ol>
-     * <p>If all of the geometric distortion fields are no-ops, this rectangle will be the same
-     * as the post-distortion-corrected rectangle given in
-     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
+     * <p>If the camera device doesn't support geometric distortion correction, or all of the
+     * geometric distortion fields are no-ops, this rectangle will be the same as the
+     * post-distortion-corrected rectangle given in ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.</p>
      * <p>This rectangle is defined relative to the full pixel array; (0,0) is the top-left of
      * the full pixel array, and the size of the full pixel array is given by
      * ACAMERA_SENSOR_INFO_PIXEL_ARRAY_SIZE.</p>
@@ -4372,11 +4463,22 @@
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      * </ul></p>
      *
-     * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+     * <p>For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with <code>(0, 0)</code> being
+     * the top-left pixel of the active array.</p>
+     * <p>For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system depends on the mode being set.
+     * When the distortion correction mode is OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+     * <code>(0, 0)</code> being the top-left pixel of the pre-correction active array.
+     * When the distortion correction mode is not OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
      * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE == FULL</p>
      *
+     * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
      */
     ACAMERA_STATISTICS_FACE_LANDMARKS =                         // int32[n*6]
@@ -4392,12 +4494,23 @@
      *   <li>ACameraMetadata from ACameraCaptureSession_captureCallback_result callbacks</li>
      * </ul></p>
      *
-     * <p>The coordinate system is that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
+     * <p>For devices not supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system always follows that of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with <code>(0, 0)</code> being
+     * the top-left pixel of the active array.</p>
+     * <p>For devices supporting ACAMERA_DISTORTION_CORRECTION_MODE control, the coordinate
+     * system depends on the mode being set.
+     * When the distortion correction mode is OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE, with
+     * <code>(0, 0)</code> being the top-left pixel of the pre-correction active array.
+     * When the distortion correction mode is not OFF, the coordinate system follows
+     * ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE, with
      * <code>(0, 0)</code> being the top-left pixel of the active array.</p>
      * <p>Only available if ACAMERA_STATISTICS_FACE_DETECT_MODE != OFF
-     * The data representation is <code>int[4]</code>, which maps to <code>(left, top, width, height)</code>.</p>
+     * The data representation is <code>int[4]</code>, which maps to <code>(left, top, right, bottom)</code>.</p>
      *
+     * @see ACAMERA_DISTORTION_CORRECTION_MODE
      * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      * @see ACAMERA_STATISTICS_FACE_DETECT_MODE
      */
     ACAMERA_STATISTICS_FACE_RECTANGLES =                        // int32[n*4]
@@ -4574,8 +4687,8 @@
     ACAMERA_STATISTICS_LENS_SHADING_MAP_MODE =                  // byte (acamera_metadata_enum_android_statistics_lens_shading_map_mode_t)
             ACAMERA_STATISTICS_START + 16,
     /**
-     * <p>A control for selecting whether OIS position information is included in output
-     * result metadata.</p>
+     * <p>A control for selecting whether optical stabilization (OIS) position
+     * information is included in output result metadata.</p>
      *
      * <p>Type: byte (acamera_metadata_enum_android_statistics_ois_data_mode_t)</p>
      *
@@ -4585,6 +4698,12 @@
      *   <li>ACaptureRequest</li>
      * </ul></p>
      *
+     * <p>Since optical image stabilization generally involves motion much faster than the duration
+     * of individualq image exposure, multiple OIS samples can be included for a single capture
+     * result. For example, if the OIS reporting operates at 200 Hz, a typical camera operating
+     * at 30fps may have 6-7 OIS samples per capture result. This information can be combined
+     * with the rolling shutter skew to account for lens motion during image exposure in
+     * post-processing algorithms.</p>
      */
     ACAMERA_STATISTICS_OIS_DATA_MODE =                          // byte (acamera_metadata_enum_android_statistics_ois_data_mode_t)
             ACAMERA_STATISTICS_START + 17,
@@ -4616,11 +4735,15 @@
      * </ul></p>
      *
      * <p>The array contains the amount of shifts in x direction, in pixels, based on OIS samples.
-     * A positive value is a shift from left to right in active array coordinate system. For
-     * example, if the optical center is (1000, 500) in active array coordinates, a shift of
-     * (3, 0) puts the new optical center at (1003, 500).</p>
+     * A positive value is a shift from left to right in the pre-correction active array
+     * coordinate system. For example, if the optical center is (1000, 500) in pre-correction
+     * active array coordinates, a shift of (3, 0) puts the new optical center at (1003, 500).</p>
      * <p>The number of shifts must match the number of timestamps in
      * ACAMERA_STATISTICS_OIS_TIMESTAMPS.</p>
+     * <p>The OIS samples are not affected by whether lens distortion correction is enabled (on
+     * supporting devices). They are always reported in pre-correction active array coordinates,
+     * since the scaling of OIS shifts would depend on the specific spot on the sensor the shift
+     * is needed.</p>
      *
      * @see ACAMERA_STATISTICS_OIS_TIMESTAMPS
      */
@@ -4637,11 +4760,15 @@
      * </ul></p>
      *
      * <p>The array contains the amount of shifts in y direction, in pixels, based on OIS samples.
-     * A positive value is a shift from top to bottom in active array coordinate system. For
-     * example, if the optical center is (1000, 500) in active array coordinates, a shift of
-     * (0, 5) puts the new optical center at (1000, 505).</p>
+     * A positive value is a shift from top to bottom in pre-correction active array coordinate
+     * system. For example, if the optical center is (1000, 500) in active array coordinates, a
+     * shift of (0, 5) puts the new optical center at (1000, 505).</p>
      * <p>The number of shifts must match the number of timestamps in
      * ACAMERA_STATISTICS_OIS_TIMESTAMPS.</p>
+     * <p>The OIS samples are not affected by whether lens distortion correction is enabled (on
+     * supporting devices). They are always reported in pre-correction active array coordinates,
+     * since the scaling of OIS shifts would depend on the specific spot on the sensor the shift
+     * is needed.</p>
      *
      * @see ACAMERA_STATISTICS_OIS_TIMESTAMPS
      */
@@ -4993,12 +5120,26 @@
      * the following code snippet can be used:</p>
      * <pre><code>// Returns true if the device supports the required hardware level, or better.
      * boolean isHardwareLevelSupported(CameraCharacteristics c, int requiredLevel) {
+     *     final int[] sortedHwLevels = {
+     *         CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY,
+     *         CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_EXTERNAL,
+     *         CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED,
+     *         CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_FULL,
+     *         CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_3
+     *     };
      *     int deviceLevel = c.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
-     *     if (deviceLevel == CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL_LEGACY) {
-     *         return requiredLevel == deviceLevel;
+     *     if (requiredLevel == deviceLevel) {
+     *         return true;
      *     }
-     *     // deviceLevel is not LEGACY, can use numerical sort
-     *     return requiredLevel &lt;= deviceLevel;
+     *
+     *     for (int sortedlevel : sortedHwLevels) {
+     *         if (sortedlevel == requiredLevel) {
+     *             return true;
+     *         } else if (sortedlevel == deviceLevel) {
+     *             return false;
+     *         }
+     *     }
+     *     return false; // Should never reach here
      * }
      * </code></pre>
      * <p>At a high level, the levels are:</p>
@@ -5012,6 +5153,8 @@
      *   post-processing settings, and image capture at a high rate.</li>
      * <li><code>LEVEL_3</code> devices additionally support YUV reprocessing and RAW image capture, along
      *   with additional output stream configurations.</li>
+     * <li><code>EXTERNAL</code> devices are similar to <code>LIMITED</code> devices with exceptions like some sensor or
+     *   lens information not reorted or less stable framerates.</li>
      * </ul>
      * <p>See the individual level enums for full descriptions of the supported capabilities.  The
      * ACAMERA_REQUEST_AVAILABLE_CAPABILITIES entry describes the device's capabilities at a
@@ -5315,18 +5458,36 @@
      * any correction at all would slow down capture rate.  Every output stream will have a
      * similar amount of enhancement applied.</p>
      * <p>The correction only applies to processed outputs such as YUV, JPEG, or DEPTH16; it is not
-     * applied to any RAW output.  Metadata coordinates such as face rectangles or metering
-     * regions are also not affected by correction.</p>
-     * <p>Applications enabling distortion correction need to pay extra attention when converting
-     * image coordinates between corrected output buffers and the sensor array. For example, if
-     * the app supports tap-to-focus and enables correction, it then has to apply the distortion
-     * model described in ACAMERA_LENS_DISTORTION to the image buffer tap coordinates to properly
-     * calculate the tap position on the sensor active array to be used with
-     * ACAMERA_CONTROL_AF_REGIONS. The same applies in reverse to detected face rectangles if
-     * they need to be drawn on top of the corrected output buffers.</p>
+     * applied to any RAW output.</p>
+     * <p>This control will be on by default on devices that support this control. Applications
+     * disabling distortion correction need to pay extra attention with the coordinate system of
+     * metering regions, crop region, and face rectangles. When distortion correction is OFF,
+     * metadata coordinates follow the coordinate system of
+     * ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE. When distortion is not OFF, metadata
+     * coordinates follow the coordinate system of ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE.  The
+     * camera device will map these metadata fields to match the corrected image produced by the
+     * camera device, for both capture requests and results.  However, this mapping is not very
+     * precise, since rectangles do not generally map to rectangles when corrected.  Only linear
+     * scaling between the active array and precorrection active array coordinates is
+     * performed. Applications that require precise correction of metadata need to undo that
+     * linear scaling, and apply a more complete correction that takes into the account the app's
+     * own requirements.</p>
+     * <p>The full list of metadata that is affected in this way by distortion correction is:</p>
+     * <ul>
+     * <li>ACAMERA_CONTROL_AF_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AE_REGIONS</li>
+     * <li>ACAMERA_CONTROL_AWB_REGIONS</li>
+     * <li>ACAMERA_SCALER_CROP_REGION</li>
+     * <li>android.statistics.faces</li>
+     * </ul>
      *
+     * @see ACAMERA_CONTROL_AE_REGIONS
      * @see ACAMERA_CONTROL_AF_REGIONS
+     * @see ACAMERA_CONTROL_AWB_REGIONS
      * @see ACAMERA_LENS_DISTORTION
+     * @see ACAMERA_SCALER_CROP_REGION
+     * @see ACAMERA_SENSOR_INFO_ACTIVE_ARRAY_SIZE
+     * @see ACAMERA_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE
      */
     ACAMERA_DISTORTION_CORRECTION_MODE =                        // byte (acamera_metadata_enum_android_distortion_correction_mode_t)
             ACAMERA_DISTORTION_CORRECTION_START,
@@ -7056,11 +7217,11 @@
      * camera in the list of supported camera devices.</p>
      * <p>This capability requires the camera device to support the following:</p>
      * <ul>
-     * <li>This camera device must list the following static metadata entries in <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a>:<ul>
-     * <li>android.logicalMultiCamera.physicalIds</li>
-     * <li>ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE</li>
-     * </ul>
-     * </li>
+     * <li>The IDs of underlying physical cameras are returned via
+     *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html#getPhysicalCameraIds">CameraCharacteristics#getPhysicalCameraIds</a>.</li>
+     * <li>This camera device must list static metadata
+     *   ACAMERA_LOGICAL_MULTI_CAMERA_SENSOR_SYNC_TYPE in
+     *   <a href="https://developer.android.com/reference/android/hardware/camera2/CameraCharacteristics.html">CameraCharacteristics</a>.</li>
      * <li>The underlying physical cameras' static metadata must list the following entries,
      *   so that the application can correlate pixels from the physical streams:<ul>
      * <li>ACAMERA_LENS_POSE_REFERENCE</li>
diff --git a/camera/ndk/include/camera/NdkCaptureRequest.h b/camera/ndk/include/camera/NdkCaptureRequest.h
index 4961ce3..5340e76 100644
--- a/camera/ndk/include/camera/NdkCaptureRequest.h
+++ b/camera/ndk/include/camera/NdkCaptureRequest.h
@@ -101,7 +101,8 @@
  *
  * @see ACaptureRequest_addTarget
  */
-camera_status_t ACameraOutputTarget_create(ANativeWindow* window, ACameraOutputTarget** output);
+camera_status_t ACameraOutputTarget_create(ANativeWindow* window,
+        ACameraOutputTarget** output) __INTRODUCED_IN(24);
 
 /**
  * Free a ACameraOutputTarget object.
@@ -110,7 +111,7 @@
  *
  * @see ACameraOutputTarget_create
  */
-void ACameraOutputTarget_free(ACameraOutputTarget* output);
+void ACameraOutputTarget_free(ACameraOutputTarget* output) __INTRODUCED_IN(24);
 
 /**
  * Add an {@link ACameraOutputTarget} object to {@link ACaptureRequest}.
@@ -123,7 +124,7 @@
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request or output is NULL.</li></ul>
  */
 camera_status_t ACaptureRequest_addTarget(ACaptureRequest* request,
-        const ACameraOutputTarget* output);
+        const ACameraOutputTarget* output) __INTRODUCED_IN(24);
 
 /**
  * Remove an {@link ACameraOutputTarget} object from {@link ACaptureRequest}.
@@ -138,7 +139,7 @@
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request or output is NULL.</li></ul>
  */
 camera_status_t ACaptureRequest_removeTarget(ACaptureRequest* request,
-        const ACameraOutputTarget* output);
+        const ACameraOutputTarget* output) __INTRODUCED_IN(24);
 
 /**
  * Get a metadata entry from input {@link ACaptureRequest}.
@@ -158,7 +159,7 @@
  *             entry of input tag value.</li></ul>
  */
 camera_status_t ACaptureRequest_getConstEntry(
-        const ACaptureRequest* request, uint32_t tag, ACameraMetadata_const_entry* entry);
+        const ACaptureRequest* request, uint32_t tag, ACameraMetadata_const_entry* entry) __INTRODUCED_IN(24);
 
 /*
  * List all the entry tags in input {@link ACaptureRequest}.
@@ -179,7 +180,7 @@
  *         <li>{@link ACAMERA_ERROR_UNKNOWN} if the method fails for some other reasons.</li></ul>
  */
 camera_status_t ACaptureRequest_getAllTags(
-        const ACaptureRequest* request, /*out*/int32_t* numTags, /*out*/const uint32_t** tags);
+        const ACaptureRequest* request, /*out*/int32_t* numTags, /*out*/const uint32_t** tags) __INTRODUCED_IN(24);
 
 /**
  * Set/change a camera capture control entry with unsigned 8 bits data type.
@@ -198,7 +199,7 @@
  *             the tag is not controllable by application.</li></ul>
  */
 camera_status_t ACaptureRequest_setEntry_u8(
-        ACaptureRequest* request, uint32_t tag, uint32_t count, const uint8_t* data);
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const uint8_t* data) __INTRODUCED_IN(24);
 
 /**
  * Set/change a camera capture control entry with signed 32 bits data type.
@@ -217,7 +218,7 @@
  *             the tag is not controllable by application.</li></ul>
  */
 camera_status_t ACaptureRequest_setEntry_i32(
-        ACaptureRequest* request, uint32_t tag, uint32_t count, const int32_t* data);
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const int32_t* data) __INTRODUCED_IN(24);
 
 /**
  * Set/change a camera capture control entry with float data type.
@@ -236,7 +237,7 @@
  *             the tag is not controllable by application.</li></ul>
  */
 camera_status_t ACaptureRequest_setEntry_float(
-        ACaptureRequest* request, uint32_t tag, uint32_t count, const float* data);
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const float* data) __INTRODUCED_IN(24);
 
 /**
  * Set/change a camera capture control entry with signed 64 bits data type.
@@ -255,7 +256,7 @@
  *             the tag is not controllable by application.</li></ul>
  */
 camera_status_t ACaptureRequest_setEntry_i64(
-        ACaptureRequest* request, uint32_t tag, uint32_t count, const int64_t* data);
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const int64_t* data) __INTRODUCED_IN(24);
 
 /**
  * Set/change a camera capture control entry with double data type.
@@ -274,7 +275,7 @@
  *             the tag is not controllable by application.</li></ul>
  */
 camera_status_t ACaptureRequest_setEntry_double(
-        ACaptureRequest* request, uint32_t tag, uint32_t count, const double* data);
+        ACaptureRequest* request, uint32_t tag, uint32_t count, const double* data) __INTRODUCED_IN(24);
 
 /**
  * Set/change a camera capture control entry with rational data type.
@@ -294,14 +295,14 @@
  */
 camera_status_t ACaptureRequest_setEntry_rational(
         ACaptureRequest* request, uint32_t tag, uint32_t count,
-        const ACameraMetadata_rational* data);
+        const ACameraMetadata_rational* data) __INTRODUCED_IN(24);
 
 /**
  * Free a {@link ACaptureRequest} structure.
  *
  * @param request the {@link ACaptureRequest} to be freed.
  */
-void ACaptureRequest_free(ACaptureRequest* request);
+void ACaptureRequest_free(ACaptureRequest* request) __INTRODUCED_IN(24);
 
 #endif /* __ANDROID_API__ >= 24 */
 
@@ -325,7 +326,7 @@
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL.</li></ul>
  */
 camera_status_t ACaptureRequest_setUserContext(
-        ACaptureRequest* request, void* context);
+        ACaptureRequest* request, void* context) __INTRODUCED_IN(28);
 
 /**
  * Get the user context pointer of the {@link ACaptureRequest}
@@ -341,7 +342,7 @@
  *         <li>{@link ACAMERA_ERROR_INVALID_PARAMETER} if request is NULL.</li></ul>
  */
 camera_status_t ACaptureRequest_getUserContext(
-        const ACaptureRequest* request, /*out*/void** context);
+        const ACaptureRequest* request, /*out*/void** context) __INTRODUCED_IN(28);
 
 /**
  * Create a copy of input {@link ACaptureRequest}.
@@ -353,7 +354,7 @@
  *
  * @return a valid ACaptureRequest pointer or NULL if the input request cannot be copied.
  */
-ACaptureRequest* ACaptureRequest_copy(const ACaptureRequest* src);
+ACaptureRequest* ACaptureRequest_copy(const ACaptureRequest* src) __INTRODUCED_IN(28);
 
 #endif /* __ANDROID_API__ >= 28 */
 
diff --git a/camera/ndk/libcamera2ndk.map.txt b/camera/ndk/libcamera2ndk.map.txt
index d179aa0..a29e96d 100644
--- a/camera/ndk/libcamera2ndk.map.txt
+++ b/camera/ndk/libcamera2ndk.map.txt
@@ -6,11 +6,11 @@
     ACameraCaptureSession_getDevice;
     ACameraCaptureSession_setRepeatingRequest;
     ACameraCaptureSession_stopRepeating;
-    ACameraCaptureSession_updateSharedOutput;
+    ACameraCaptureSession_updateSharedOutput; # introduced=28
     ACameraDevice_close;
     ACameraDevice_createCaptureRequest;
     ACameraDevice_createCaptureSession;
-    ACameraDevice_createCaptureSessionWithSessionParameters;
+    ACameraDevice_createCaptureSessionWithSessionParameters; # introduced=28
     ACameraDevice_getId;
     ACameraManager_create;
     ACameraManager_delete;
@@ -27,11 +27,11 @@
     ACameraOutputTarget_create;
     ACameraOutputTarget_free;
     ACaptureRequest_addTarget;
-    ACaptureRequest_copy;
+    ACaptureRequest_copy; # introduced=28
     ACaptureRequest_free;
     ACaptureRequest_getAllTags;
     ACaptureRequest_getConstEntry;
-    ACaptureRequest_getUserContext;
+    ACaptureRequest_getUserContext; # introduced=28
     ACaptureRequest_removeTarget;
     ACaptureRequest_setEntry_double;
     ACaptureRequest_setEntry_float;
@@ -39,15 +39,15 @@
     ACaptureRequest_setEntry_i64;
     ACaptureRequest_setEntry_rational;
     ACaptureRequest_setEntry_u8;
-    ACaptureRequest_setUserContext;
+    ACaptureRequest_setUserContext; # introduced=28
     ACaptureSessionOutputContainer_add;
     ACaptureSessionOutputContainer_create;
     ACaptureSessionOutputContainer_free;
     ACaptureSessionOutputContainer_remove;
     ACaptureSessionOutput_create;
-    ACaptureSessionSharedOutput_create;
-    ACaptureSessionSharedOutput_add;
-    ACaptureSessionSharedOutput_remove;
+    ACaptureSessionSharedOutput_create; # introduced=28
+    ACaptureSessionSharedOutput_add; # introduced=28
+    ACaptureSessionSharedOutput_remove; # introduced=28
     ACaptureSessionOutput_free;
   local:
     *;
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index 6a58467..980d1d2 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -366,13 +366,13 @@
             case 'T':
             {
                 useTimestamp = true;
+                FALLTHROUGH_INTENDED;
             }
-            // fall through
             case 'R':
             {
                 renderSurface = true;
+                FALLTHROUGH_INTENDED;
             }
-            // fall through
             case 'S':
             {
                 useSurface = true;
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
index f24d2dd..630de25 100644
--- a/cmds/stagefright/mediafilter.cpp
+++ b/cmds/stagefright/mediafilter.cpp
@@ -706,13 +706,13 @@
             case 'T':
             {
                 useTimestamp = true;
+                FALLTHROUGH_INTENDED;
             }
-            // fall through
             case 'R':
             {
                 renderSurface = true;
+                FALLTHROUGH_INTENDED;
             }
-            // fall through
             case 'S':
             {
                 useSurface = true;
diff --git a/media/extractors/mpeg2/Android.bp b/media/extractors/mpeg2/Android.bp
index 5e4a592..40314dc 100644
--- a/media/extractors/mpeg2/Android.bp
+++ b/media/extractors/mpeg2/Android.bp
@@ -25,6 +25,10 @@
         "libstagefright_foundation",
     ],
 
+    header_libs: [
+        "libbase_headers",
+    ],
+
     static_libs: [
         "libstagefright_mpeg2support",
         "libutils",
diff --git a/media/extractors/mpeg2/MPEG2TSExtractor.cpp b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
index c83f7ce..9191386 100644
--- a/media/extractors/mpeg2/MPEG2TSExtractor.cpp
+++ b/media/extractors/mpeg2/MPEG2TSExtractor.cpp
@@ -20,6 +20,8 @@
 #include <inttypes.h>
 #include <utils/Log.h>
 
+#include <android-base/macros.h>
+
 #include "MPEG2TSExtractor.h"
 
 #include <media/DataSourceBase.h>
@@ -508,7 +510,7 @@
         case MediaTrack::ReadOptions::SEEK_CLOSEST:
             ALOGW("seekMode not supported: %d; falling back to PREVIOUS_SYNC",
                     seekMode);
-            // fall-through
+            FALLTHROUGH_INTENDED;
         case MediaTrack::ReadOptions::SEEK_PREVIOUS_SYNC:
             if (index == 0) {
                 ALOGW("Previous sync not found; starting from the earliest "
diff --git a/media/libaaudio/include/aaudio/AAudio.h b/media/libaaudio/include/aaudio/AAudio.h
index 5b29419..1493b26 100644
--- a/media/libaaudio/include/aaudio/AAudio.h
+++ b/media/libaaudio/include/aaudio/AAudio.h
@@ -423,7 +423,7 @@
  *
  * @return pointer to a text representation of an AAudio result code.
  */
-AAUDIO_API const char * AAudio_convertResultToText(aaudio_result_t returnCode);
+AAUDIO_API const char * AAudio_convertResultToText(aaudio_result_t returnCode) __INTRODUCED_IN(26);
 
 /**
  * The text is the ASCII symbol corresponding to the stream state,
@@ -433,7 +433,8 @@
  *
  * @return pointer to a text representation of an AAudio state.
  */
-AAUDIO_API const char * AAudio_convertStreamStateToText(aaudio_stream_state_t state);
+AAUDIO_API const char * AAudio_convertStreamStateToText(aaudio_stream_state_t state)
+        __INTRODUCED_IN(26);
 
 // ============================================================
 // StreamBuilder
@@ -451,7 +452,8 @@
  *
  * AAudioStreamBuilder_delete() must be called when you are done using the builder.
  */
-AAUDIO_API aaudio_result_t AAudio_createStreamBuilder(AAudioStreamBuilder** builder);
+AAUDIO_API aaudio_result_t AAudio_createStreamBuilder(AAudioStreamBuilder** builder)
+        __INTRODUCED_IN(26);
 
 /**
  * Request an audio device identified device using an ID.
@@ -464,7 +466,7 @@
  * @param deviceId device identifier or AAUDIO_UNSPECIFIED
  */
 AAUDIO_API void AAudioStreamBuilder_setDeviceId(AAudioStreamBuilder* builder,
-                                                     int32_t deviceId);
+                                                int32_t deviceId) __INTRODUCED_IN(26);
 
 /**
  * Request a sample rate in Hertz.
@@ -481,7 +483,7 @@
  * @param sampleRate frames per second. Common rates include 44100 and 48000 Hz.
  */
 AAUDIO_API void AAudioStreamBuilder_setSampleRate(AAudioStreamBuilder* builder,
-                                                       int32_t sampleRate);
+                                                  int32_t sampleRate) __INTRODUCED_IN(26);
 
 /**
  * Request a number of channels for the stream.
@@ -498,7 +500,7 @@
  * @param channelCount Number of channels desired.
  */
 AAUDIO_API void AAudioStreamBuilder_setChannelCount(AAudioStreamBuilder* builder,
-                                                   int32_t channelCount);
+                                                    int32_t channelCount) __INTRODUCED_IN(26);
 
 /**
  * Identical to AAudioStreamBuilder_setChannelCount().
@@ -507,7 +509,7 @@
  * @param samplesPerFrame Number of samples in a frame.
  */
 AAUDIO_API void AAudioStreamBuilder_setSamplesPerFrame(AAudioStreamBuilder* builder,
-                                                       int32_t samplesPerFrame);
+                                                       int32_t samplesPerFrame) __INTRODUCED_IN(26);
 
 /**
  * Request a sample data format, for example AAUDIO_FORMAT_PCM_I16.
@@ -524,7 +526,7 @@
  * @param format common formats are AAUDIO_FORMAT_PCM_FLOAT and AAUDIO_FORMAT_PCM_I16.
  */
 AAUDIO_API void AAudioStreamBuilder_setFormat(AAudioStreamBuilder* builder,
-                                                   aaudio_format_t format);
+                                              aaudio_format_t format) __INTRODUCED_IN(26);
 
 /**
  * Request a mode for sharing the device.
@@ -538,7 +540,7 @@
  * @param sharingMode AAUDIO_SHARING_MODE_SHARED or AAUDIO_SHARING_MODE_EXCLUSIVE
  */
 AAUDIO_API void AAudioStreamBuilder_setSharingMode(AAudioStreamBuilder* builder,
-                                                        aaudio_sharing_mode_t sharingMode);
+        aaudio_sharing_mode_t sharingMode) __INTRODUCED_IN(26);
 
 /**
  * Request the direction for a stream.
@@ -549,7 +551,7 @@
  * @param direction AAUDIO_DIRECTION_OUTPUT or AAUDIO_DIRECTION_INPUT
  */
 AAUDIO_API void AAudioStreamBuilder_setDirection(AAudioStreamBuilder* builder,
-                                                            aaudio_direction_t direction);
+        aaudio_direction_t direction) __INTRODUCED_IN(26);
 
 /**
  * Set the requested buffer capacity in frames.
@@ -561,7 +563,7 @@
  * @param numFrames the desired buffer capacity in frames or AAUDIO_UNSPECIFIED
  */
 AAUDIO_API void AAudioStreamBuilder_setBufferCapacityInFrames(AAudioStreamBuilder* builder,
-                                                                 int32_t numFrames);
+        int32_t numFrames) __INTRODUCED_IN(26);
 
 /**
  * Set the requested performance mode.
@@ -578,7 +580,7 @@
  * @param mode the desired performance mode, eg. AAUDIO_PERFORMANCE_MODE_LOW_LATENCY
  */
 AAUDIO_API void AAudioStreamBuilder_setPerformanceMode(AAudioStreamBuilder* builder,
-                                                aaudio_performance_mode_t mode);
+        aaudio_performance_mode_t mode) __INTRODUCED_IN(26);
 
 /**
  * Set the intended use case for the stream.
@@ -595,7 +597,7 @@
  * @param usage the desired usage, eg. AAUDIO_USAGE_GAME
  */
 AAUDIO_API void AAudioStreamBuilder_setUsage(AAudioStreamBuilder* builder,
-                                                       aaudio_usage_t usage);
+        aaudio_usage_t usage) __INTRODUCED_IN(28);
 
 /**
  * Set the type of audio data that the stream will carry.
@@ -612,7 +614,7 @@
  * @param contentType the type of audio data, eg. AAUDIO_CONTENT_TYPE_SPEECH
  */
 AAUDIO_API void AAudioStreamBuilder_setContentType(AAudioStreamBuilder* builder,
-                                             aaudio_content_type_t contentType);
+        aaudio_content_type_t contentType) __INTRODUCED_IN(28);
 
 /**
  * Set the input (capture) preset for the stream.
@@ -632,7 +634,7 @@
  * @param inputPreset the desired configuration for recording
  */
 AAUDIO_API void AAudioStreamBuilder_setInputPreset(AAudioStreamBuilder* builder,
-                                                   aaudio_input_preset_t inputPreset);
+        aaudio_input_preset_t inputPreset) __INTRODUCED_IN(28);
 
 /** Set the requested session ID.
  *
@@ -662,7 +664,7 @@
  * @param sessionId an allocated sessionID or AAUDIO_SESSION_ID_ALLOCATE
  */
 AAUDIO_API void AAudioStreamBuilder_setSessionId(AAudioStreamBuilder* builder,
-                                                aaudio_session_id_t sessionId);
+        aaudio_session_id_t sessionId) __INTRODUCED_IN(28);
 
 /**
  * Return one of these values from the data callback function.
@@ -760,8 +762,7 @@
  *          to the callback functions.
  */
 AAUDIO_API void AAudioStreamBuilder_setDataCallback(AAudioStreamBuilder* builder,
-                                                 AAudioStream_dataCallback callback,
-                                                 void *userData);
+        AAudioStream_dataCallback callback, void *userData) __INTRODUCED_IN(26);
 
 /**
  * Set the requested data callback buffer size in frames.
@@ -787,7 +788,7 @@
  * @param numFrames the desired buffer size in frames or AAUDIO_UNSPECIFIED
  */
 AAUDIO_API void AAudioStreamBuilder_setFramesPerDataCallback(AAudioStreamBuilder* builder,
-                                                             int32_t numFrames);
+                                                             int32_t numFrames) __INTRODUCED_IN(26);
 
 /**
  * Prototype for the callback function that is passed to
@@ -840,8 +841,7 @@
  *          to the callback functions.
  */
 AAUDIO_API void AAudioStreamBuilder_setErrorCallback(AAudioStreamBuilder* builder,
-                                                AAudioStream_errorCallback callback,
-                                                void *userData);
+        AAudioStream_errorCallback callback, void *userData) __INTRODUCED_IN(26);
 
 /**
  * Open a stream based on the options in the StreamBuilder.
@@ -854,7 +854,7 @@
  * @return AAUDIO_OK or a negative error.
  */
 AAUDIO_API aaudio_result_t  AAudioStreamBuilder_openStream(AAudioStreamBuilder* builder,
-                                                     AAudioStream** stream);
+        AAudioStream** stream) __INTRODUCED_IN(26);
 
 /**
  * Delete the resources associated with the StreamBuilder.
@@ -862,7 +862,8 @@
  * @param builder reference provided by AAudio_createStreamBuilder()
  * @return AAUDIO_OK or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStreamBuilder_delete(AAudioStreamBuilder* builder);
+AAUDIO_API aaudio_result_t  AAudioStreamBuilder_delete(AAudioStreamBuilder* builder)
+    __INTRODUCED_IN(26);
 
 // ============================================================
 // Stream Control
@@ -874,7 +875,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return AAUDIO_OK or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_close(AAudioStream* stream);
+AAUDIO_API aaudio_result_t  AAudioStream_close(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Asynchronously request to start playing the stream. For output streams, one should
@@ -885,7 +886,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return AAUDIO_OK or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_requestStart(AAudioStream* stream);
+AAUDIO_API aaudio_result_t  AAudioStream_requestStart(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Asynchronous request for the stream to pause.
@@ -899,7 +900,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return AAUDIO_OK or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_requestPause(AAudioStream* stream);
+AAUDIO_API aaudio_result_t  AAudioStream_requestPause(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Asynchronous request for the stream to flush.
@@ -913,7 +914,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return AAUDIO_OK or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_requestFlush(AAudioStream* stream);
+AAUDIO_API aaudio_result_t  AAudioStream_requestFlush(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Asynchronous request for the stream to stop.
@@ -923,7 +924,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return AAUDIO_OK or a negative error.
  */
-AAUDIO_API aaudio_result_t  AAudioStream_requestStop(AAudioStream* stream);
+AAUDIO_API aaudio_result_t  AAudioStream_requestStop(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Query the current state of the client, eg. AAUDIO_STREAM_STATE_PAUSING
@@ -935,7 +936,7 @@
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  */
-AAUDIO_API aaudio_stream_state_t AAudioStream_getState(AAudioStream* stream);
+AAUDIO_API aaudio_stream_state_t AAudioStream_getState(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Wait until the current state no longer matches the input state.
@@ -960,9 +961,8 @@
  * @return AAUDIO_OK or a negative error.
  */
 AAUDIO_API aaudio_result_t AAudioStream_waitForStateChange(AAudioStream* stream,
-                                            aaudio_stream_state_t inputState,
-                                            aaudio_stream_state_t *nextState,
-                                            int64_t timeoutNanoseconds);
+        aaudio_stream_state_t inputState, aaudio_stream_state_t *nextState,
+        int64_t timeoutNanoseconds) __INTRODUCED_IN(26);
 
 // ============================================================
 // Stream I/O
@@ -989,9 +989,7 @@
  * @return The number of frames actually read or a negative error.
  */
 AAUDIO_API aaudio_result_t AAudioStream_read(AAudioStream* stream,
-                               void *buffer,
-                               int32_t numFrames,
-                               int64_t timeoutNanoseconds);
+        void *buffer, int32_t numFrames, int64_t timeoutNanoseconds) __INTRODUCED_IN(26);
 
 /**
  * Write data to the stream.
@@ -1014,9 +1012,7 @@
  * @return The number of frames actually written or a negative error.
  */
 AAUDIO_API aaudio_result_t AAudioStream_write(AAudioStream* stream,
-                               const void *buffer,
-                               int32_t numFrames,
-                               int64_t timeoutNanoseconds);
+        const void *buffer, int32_t numFrames, int64_t timeoutNanoseconds) __INTRODUCED_IN(26);
 
 // ============================================================
 // Stream - queries
@@ -1039,7 +1035,7 @@
  * @return actual buffer size in frames or a negative error
  */
 AAUDIO_API aaudio_result_t AAudioStream_setBufferSizeInFrames(AAudioStream* stream,
-                                                      int32_t numFrames);
+        int32_t numFrames) __INTRODUCED_IN(26);
 
 /**
  * Query the maximum number of frames that can be filled without blocking.
@@ -1047,7 +1043,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return buffer size in frames.
  */
-AAUDIO_API int32_t AAudioStream_getBufferSizeInFrames(AAudioStream* stream);
+AAUDIO_API int32_t AAudioStream_getBufferSizeInFrames(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Query the number of frames that the application should read or write at
@@ -1062,7 +1058,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return burst size
  */
-AAUDIO_API int32_t AAudioStream_getFramesPerBurst(AAudioStream* stream);
+AAUDIO_API int32_t AAudioStream_getFramesPerBurst(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Query maximum buffer capacity in frames.
@@ -1070,7 +1066,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return  buffer capacity in frames
  */
-AAUDIO_API int32_t AAudioStream_getBufferCapacityInFrames(AAudioStream* stream);
+AAUDIO_API int32_t AAudioStream_getBufferCapacityInFrames(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Query the size of the buffer that will be passed to the dataProc callback
@@ -1091,7 +1087,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return callback buffer size in frames or AAUDIO_UNSPECIFIED
  */
-AAUDIO_API int32_t AAudioStream_getFramesPerDataCallback(AAudioStream* stream);
+AAUDIO_API int32_t AAudioStream_getFramesPerDataCallback(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * An XRun is an Underrun or an Overrun.
@@ -1108,13 +1104,13 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return the underrun or overrun count
  */
-AAUDIO_API int32_t AAudioStream_getXRunCount(AAudioStream* stream);
+AAUDIO_API int32_t AAudioStream_getXRunCount(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return actual sample rate
  */
-AAUDIO_API int32_t AAudioStream_getSampleRate(AAudioStream* stream);
+AAUDIO_API int32_t AAudioStream_getSampleRate(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * A stream has one or more channels of data.
@@ -1123,7 +1119,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return actual number of channels
  */
-AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* stream);
+AAUDIO_API int32_t AAudioStream_getChannelCount(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Identical to AAudioStream_getChannelCount().
@@ -1131,39 +1127,41 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return actual number of samples frame
  */
-AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream);
+AAUDIO_API int32_t AAudioStream_getSamplesPerFrame(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return actual device ID
  */
-AAUDIO_API int32_t AAudioStream_getDeviceId(AAudioStream* stream);
+AAUDIO_API int32_t AAudioStream_getDeviceId(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return actual data format
  */
-AAUDIO_API aaudio_format_t AAudioStream_getFormat(AAudioStream* stream);
+AAUDIO_API aaudio_format_t AAudioStream_getFormat(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Provide actual sharing mode.
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return  actual sharing mode
  */
-AAUDIO_API aaudio_sharing_mode_t AAudioStream_getSharingMode(AAudioStream* stream);
+AAUDIO_API aaudio_sharing_mode_t AAudioStream_getSharingMode(AAudioStream* stream)
+        __INTRODUCED_IN(26);
 
 /**
  * Get the performance mode used by the stream.
  *
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  */
-AAUDIO_API aaudio_performance_mode_t AAudioStream_getPerformanceMode(AAudioStream* stream);
+AAUDIO_API aaudio_performance_mode_t AAudioStream_getPerformanceMode(AAudioStream* stream)
+        __INTRODUCED_IN(26);
 
 /**
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return direction
  */
-AAUDIO_API aaudio_direction_t AAudioStream_getDirection(AAudioStream* stream);
+AAUDIO_API aaudio_direction_t AAudioStream_getDirection(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Passes back the number of frames that have been written since the stream was created.
@@ -1176,7 +1174,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return frames written
  */
-AAUDIO_API int64_t AAudioStream_getFramesWritten(AAudioStream* stream);
+AAUDIO_API int64_t AAudioStream_getFramesWritten(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Passes back the number of frames that have been read since the stream was created.
@@ -1189,7 +1187,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return frames read
  */
-AAUDIO_API int64_t AAudioStream_getFramesRead(AAudioStream* stream);
+AAUDIO_API int64_t AAudioStream_getFramesRead(AAudioStream* stream) __INTRODUCED_IN(26);
 
 /**
  * Passes back the session ID associated with this stream.
@@ -1213,7 +1211,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return session ID or AAUDIO_SESSION_ID_NONE
  */
-AAUDIO_API aaudio_session_id_t AAudioStream_getSessionId(AAudioStream* stream);
+AAUDIO_API aaudio_session_id_t AAudioStream_getSessionId(AAudioStream* stream) __INTRODUCED_IN(28);
 
 /**
  * Passes back the time at which a particular frame was presented.
@@ -1238,9 +1236,7 @@
  * @return AAUDIO_OK or a negative error
  */
 AAUDIO_API aaudio_result_t AAudioStream_getTimestamp(AAudioStream* stream,
-                                      clockid_t clockid,
-                                      int64_t *framePosition,
-                                      int64_t *timeNanoseconds);
+        clockid_t clockid, int64_t *framePosition, int64_t *timeNanoseconds) __INTRODUCED_IN(26);
 
 /**
  * Return the use case for the stream.
@@ -1250,7 +1246,7 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return frames read
  */
-AAUDIO_API aaudio_usage_t AAudioStream_getUsage(AAudioStream* stream);
+AAUDIO_API aaudio_usage_t AAudioStream_getUsage(AAudioStream* stream) __INTRODUCED_IN(28);
 
 /**
  * Return the content type for the stream.
@@ -1260,7 +1256,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return content type, for example AAUDIO_CONTENT_TYPE_MUSIC
  */
-AAUDIO_API aaudio_content_type_t AAudioStream_getContentType(AAudioStream* stream);
+AAUDIO_API aaudio_content_type_t AAudioStream_getContentType(AAudioStream* stream)
+        __INTRODUCED_IN(28);
 
 /**
  * Return the input preset for the stream.
@@ -1270,7 +1267,8 @@
  * @param stream reference provided by AAudioStreamBuilder_openStream()
  * @return input preset, for example AAUDIO_INPUT_PRESET_CAMCORDER
  */
-AAUDIO_API aaudio_input_preset_t AAudioStream_getInputPreset(AAudioStream* stream);
+AAUDIO_API aaudio_input_preset_t AAudioStream_getInputPreset(AAudioStream* stream)
+        __INTRODUCED_IN(28);
 
 #ifdef __cplusplus
 }
diff --git a/media/libaudioclient/Android.bp b/media/libaudioclient/Android.bp
index 2df37a8..f93d44b 100644
--- a/media/libaudioclient/Android.bp
+++ b/media/libaudioclient/Android.bp
@@ -53,7 +53,10 @@
     export_shared_lib_headers: ["libbinder"],
 
     local_include_dirs: ["include/media", "aidl"],
-    header_libs: ["libaudioclient_headers"],
+    header_libs: [
+        "libaudioclient_headers",
+        "libbase_headers",
+    ],
     export_header_lib_headers: ["libaudioclient_headers"],
 
     // for memory heap analysis
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index e2de8e7..0aabaf7 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -19,6 +19,7 @@
 #define LOG_TAG "AudioRecord"
 
 #include <inttypes.h>
+#include <android-base/macros.h>
 #include <sys/resource.h>
 
 #include <binder/IPCThreadState.h>
@@ -1446,7 +1447,7 @@
     case NS_WHENEVER:
         // Event driven: call wake() when callback notifications conditions change.
         ns = INT64_MAX;
-        // fall through
+        FALLTHROUGH_INTENDED;
     default:
         LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns);
         pauseInternal(ns);
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 76c9bfb..4881ddc 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -22,6 +22,7 @@
 #include <math.h>
 #include <sys/resource.h>
 
+#include <android-base/macros.h>
 #include <audio_utils/clock.h>
 #include <audio_utils/primitives.h>
 #include <binder/IPCThreadState.h>
@@ -2964,7 +2965,7 @@
         if (mProxy->getStreamEndDone()) {
             return true;
         }
-        // fall through
+        FALLTHROUGH_INTENDED;
     case STATE_ACTIVE:
     case STATE_STOPPING:
         break;
@@ -3083,7 +3084,7 @@
     case NS_WHENEVER:
         // Event driven: call wake() when callback notifications conditions change.
         ns = INT64_MAX;
-        // fall through
+        FALLTHROUGH_INTENDED;
     default:
         LOG_ALWAYS_FATAL_IF(ns < 0, "processAudioBuffer() returned %" PRId64, ns);
         pauseInternal(ns);
diff --git a/media/libaudioclient/AudioTrackShared.cpp b/media/libaudioclient/AudioTrackShared.cpp
index d40f193..01f400d 100644
--- a/media/libaudioclient/AudioTrackShared.cpp
+++ b/media/libaudioclient/AudioTrackShared.cpp
@@ -17,6 +17,7 @@
 #define LOG_TAG "AudioTrackShared"
 //#define LOG_NDEBUG 0
 
+#include <android-base/macros.h>
 #include <private/media/AudioTrackShared.h>
 #include <utils/Log.h>
 
@@ -247,7 +248,7 @@
                 ts = requested;
                 break;
             }
-            // fall through
+            FALLTHROUGH_INTENDED;
         case TIMEOUT_CONTINUE:
             // FIXME we do not retry if requested < 10ms? needs documentation on this state machine
             if (!measure || requested->tv_sec < total.tv_sec ||
@@ -505,7 +506,7 @@
                 ts = requested;
                 break;
             }
-            // fall through
+            FALLTHROUGH_INTENDED;
         case TIMEOUT_CONTINUE:
             // FIXME we do not retry if requested < 10ms? needs documentation on this state machine
             if (requested->tv_sec < total.tv_sec ||
diff --git a/media/libaudioclient/include/media/AudioPolicyHelper.h b/media/libaudioclient/include/media/AudioPolicyHelper.h
index 73ee0a7..fbe9546 100644
--- a/media/libaudioclient/include/media/AudioPolicyHelper.h
+++ b/media/libaudioclient/include/media/AudioPolicyHelper.h
@@ -16,6 +16,7 @@
 #ifndef AUDIO_POLICY_HELPER_H_
 #define AUDIO_POLICY_HELPER_H_
 
+#include <android-base/macros.h>
 #include <system/audio.h>
 
 static inline
@@ -81,7 +82,7 @@
         break;
     case AUDIO_STREAM_ENFORCED_AUDIBLE:
         attr->flags  |= AUDIO_FLAG_AUDIBILITY_ENFORCED;
-        // intended fall through, attributes in common with STREAM_SYSTEM
+        FALLTHROUGH_INTENDED; // attributes in common with STREAM_SYSTEM
     case AUDIO_STREAM_SYSTEM:
         attr->content_type = AUDIO_CONTENT_TYPE_SONIFICATION;
         attr->usage = AUDIO_USAGE_ASSISTANCE_SONIFICATION;
diff --git a/media/libaudioprocessing/tests/Android.mk b/media/libaudioprocessing/tests/Android.mk
index 23e1c3a..8e081a3 100644
--- a/media/libaudioprocessing/tests/Android.mk
+++ b/media/libaudioprocessing/tests/Android.mk
@@ -78,6 +78,8 @@
     liblog \
     libutils \
 
+LOCAL_HEADER_LIBRARIES := libbase_headers
+
 LOCAL_MODULE := test-resampler
 
 LOCAL_MODULE_TAGS := optional
diff --git a/media/libaudioprocessing/tests/test-resampler.cpp b/media/libaudioprocessing/tests/test-resampler.cpp
index fbc9326..f178bde 100644
--- a/media/libaudioprocessing/tests/test-resampler.cpp
+++ b/media/libaudioprocessing/tests/test-resampler.cpp
@@ -27,6 +27,7 @@
 #include <math.h>
 #include <audio_utils/primitives.h>
 #include <audio_utils/sndfile.h>
+#include <android-base/macros.h>
 #include <utils/Vector.h>
 #include <media/AudioBufferProvider.h>
 #include <media/AudioResampler.h>
@@ -87,14 +88,14 @@
                 }
                 return numValues;
             }
-            // fall through
+            FALLTHROUGH_INTENDED;
         case ',':
             if (hadDigit) {
                 hadDigit = false;
                 numValues++;
                 break;
             }
-            // fall through
+            FALLTHROUGH_INTENDED;
         default:
             return -1;
         }
diff --git a/media/libmedia/Android.bp b/media/libmedia/Android.bp
index 1b3a1be..8921b22 100644
--- a/media/libmedia/Android.bp
+++ b/media/libmedia/Android.bp
@@ -3,6 +3,7 @@
     vendor_available: true,
     export_include_dirs: ["include"],
     header_libs:[
+        "libbase_headers",
         "libgui_headers",
         "libstagefright_headers",
         "media_plugin_headers",
diff --git a/media/libmedia/MediaUtils.cpp b/media/libmedia/MediaUtils.cpp
index bcdc3bd..320c7a9 100644
--- a/media/libmedia/MediaUtils.cpp
+++ b/media/libmedia/MediaUtils.cpp
@@ -34,7 +34,7 @@
     size_t percentageOfTotalMem) {
 
     if (running_with_asan()) {
-        ALOGW("Running with ASan, skip enforcing memory limitations.");
+        ALOGW("Running with (HW)ASan, skip enforcing memory limitations.");
         return;
     }
 
diff --git a/media/libmedia/MediaUtils.h b/media/libmedia/MediaUtils.h
index a678bcc..26075c4 100644
--- a/media/libmedia/MediaUtils.h
+++ b/media/libmedia/MediaUtils.h
@@ -20,9 +20,10 @@
 namespace android {
 
 extern "C" void __asan_init(void) __attribute__((weak));
+extern "C" void __hwasan_init(void) __attribute__((weak));
 
 static inline int running_with_asan() {
-    return &__asan_init != 0;
+    return &__asan_init != 0 || &__hwasan_init != 0;
 }
 
 /**
diff --git a/media/libmedia/mediarecorder.cpp b/media/libmedia/mediarecorder.cpp
index 721a043..92cfb1c 100644
--- a/media/libmedia/mediarecorder.cpp
+++ b/media/libmedia/mediarecorder.cpp
@@ -20,6 +20,7 @@
 
 #include <inttypes.h>
 
+#include <android-base/macros.h>
 #include <utils/Log.h>
 #include <media/mediarecorder.h>
 #include <binder/IServiceManager.h>
@@ -597,7 +598,8 @@
             if (OK != ret) {
                 return ret;  // No need to continue
             }
-        }  // Intentional fall through
+            FALLTHROUGH_INTENDED;
+        }
         case MEDIA_RECORDER_INITIALIZED:
             ret = close();
             break;
diff --git a/media/libmediaplayer2/nuplayer2/Android.bp b/media/libmediaplayer2/nuplayer2/Android.bp
index 1634f35..c8ddc11 100644
--- a/media/libmediaplayer2/nuplayer2/Android.bp
+++ b/media/libmediaplayer2/nuplayer2/Android.bp
@@ -16,6 +16,7 @@
     ],
 
     header_libs: [
+        "libbase_headers",
         "libmediaplayer2_headers",
         "media_plugin_headers",
     ],
diff --git a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
index 03d17a5..bafa653 100644
--- a/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
+++ b/media/libmediaplayer2/nuplayer2/NuPlayer2Driver.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "NuPlayer2Driver"
 #include <inttypes.h>
+#include <android-base/macros.h>
 #include <utils/Log.h>
 #include <cutils/properties.h>
 
@@ -297,8 +298,7 @@
         case STATE_PREPARED:
         {
             mPlayer->start();
-
-            // fall through
+            FALLTHROUGH_INTENDED;
         }
 
         case STATE_RUNNING:
@@ -940,7 +940,7 @@
                     mPlayer->pause();
                     mState = STATE_PAUSED;
                 }
-                // fall through
+                FALLTHROUGH_INTENDED;
             }
 
             case MEDIA2_ERROR:
diff --git a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
index 3e5bdd6..f2c8f64 100644
--- a/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
+++ b/media/libmediaplayerservice/nuplayer/NuPlayerDriver.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "NuPlayerDriver"
 #include <inttypes.h>
+#include <android-base/macros.h>
 #include <utils/Log.h>
 #include <cutils/properties.h>
 
@@ -344,7 +345,7 @@
 
             CHECK_EQ(mState, STATE_PREPARED);
 
-            // fall through
+            FALLTHROUGH_INTENDED;
         }
 
         case STATE_PAUSED:
@@ -353,7 +354,7 @@
         {
             mPlayer->start();
 
-            // fall through
+            FALLTHROUGH_INTENDED;
         }
 
         case STATE_RUNNING:
@@ -382,7 +383,7 @@
     switch (mState) {
         case STATE_RUNNING:
             mPlayer->pause();
-            // fall through
+            FALLTHROUGH_INTENDED;
 
         case STATE_PAUSED:
             mState = STATE_STOPPED;
@@ -991,7 +992,7 @@
                 mPlayer->pause();
                 mState = STATE_PAUSED;
             }
-            // fall through
+            FALLTHROUGH_INTENDED;
         }
 
         case MEDIA_ERROR:
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index ead05a3..4b04408 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -1619,7 +1619,7 @@
             if (portIndex == kPortIndexOutput && mNativeWindow != NULL) {
                 (void)cancelBufferToNativeWindow(info);
             }
-            // fall through
+            FALLTHROUGH_INTENDED;
 
         case BufferInfo::OWNED_BY_NATIVE_WINDOW:
             err = mOMXNode->freeBuffer(portIndex, info->mBufferID);
@@ -5019,6 +5019,7 @@
                         }
                     }
                     // Fall through to set up mime.
+                    FALLTHROUGH_INTENDED;
                 }
 
                 default:
@@ -7849,7 +7850,7 @@
                 msg->setInt32("generation", mCodec->mStateGeneration);
                 msg->post(3000000);
             }
-            // fall-through
+            FALLTHROUGH_INTENDED;
         }
         case kWhatResume:
         case kWhatSetParameters:
diff --git a/media/libstagefright/MediaCodec.cpp b/media/libstagefright/MediaCodec.cpp
index 72eff94..da16fde 100644
--- a/media/libstagefright/MediaCodec.cpp
+++ b/media/libstagefright/MediaCodec.cpp
@@ -1821,8 +1821,8 @@
                             // the shutdown complete notification. If we
                             // don't, we'll timeout and force release.
                             sendErrorResponse = false;
+                            FALLTHROUGH_INTENDED;
                         }
-                        // fall-thru
                         case STOPPING:
                         {
                             if (mFlags & kFlagSawMediaServerDie) {
diff --git a/media/libstagefright/MediaExtractorFactory.cpp b/media/libstagefright/MediaExtractorFactory.cpp
index f6c61a0..06aedb7 100644
--- a/media/libstagefright/MediaExtractorFactory.cpp
+++ b/media/libstagefright/MediaExtractorFactory.cpp
@@ -309,12 +309,6 @@
 #endif
             "/extractors", *newList);
 
-    RegisterExtractorsInSystem("/vendor/lib"
-#ifdef __LP64__
-            "64"
-#endif
-            "/extractors", *newList);
-
     if (newUpdateApkPath != nullptr) {
         RegisterExtractorsInApk(newUpdateApkPath, *newList);
     }
diff --git a/media/libstagefright/StagefrightMetadataRetriever.cpp b/media/libstagefright/StagefrightMetadataRetriever.cpp
index e80ec3b..c51ac8d 100644
--- a/media/libstagefright/StagefrightMetadataRetriever.cpp
+++ b/media/libstagefright/StagefrightMetadataRetriever.cpp
@@ -20,6 +20,7 @@
 #include <inttypes.h>
 
 #include <utils/Log.h>
+#include <cutils/properties.h>
 
 #include "include/FrameDecoder.h"
 #include "include/StagefrightMetadataRetriever.h"
@@ -204,11 +205,14 @@
         trackMeta->setCString(kKeyMIMEType, mime);
     }
 
+    bool preferhw = property_get_bool(
+            "media.stagefright.thumbnail.prefer_hw_codecs", false);
+    uint32_t flags = preferhw ? 0 : MediaCodecList::kPreferSoftwareCodecs;
     Vector<AString> matchingCodecs;
     MediaCodecList::findMatchingCodecs(
             mime,
             false, /* encoder */
-            MediaCodecList::kPreferSoftwareCodecs,
+            flags,
             &matchingCodecs);
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
@@ -325,11 +329,14 @@
     const char *mime;
     CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
 
+    bool preferhw = property_get_bool(
+            "media.stagefright.thumbnail.prefer_hw_codecs", false);
+    uint32_t flags = preferhw ? 0 : MediaCodecList::kPreferSoftwareCodecs;
     Vector<AString> matchingCodecs;
     MediaCodecList::findMatchingCodecs(
             mime,
             false, /* encoder */
-            MediaCodecList::kPreferSoftwareCodecs,
+            flags,
             &matchingCodecs);
 
     for (size_t i = 0; i < matchingCodecs.size(); ++i) {
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
index bc0a69f..d393c7a 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.cpp
@@ -147,7 +147,7 @@
     mEndOfOutput = false;
     mOutputDelayCompensated = 0;
     mOutputDelayRingBufferSize = 2048 * MAX_CHANNEL_COUNT * kNumDelayBlocksMax;
-    mOutputDelayRingBuffer = new short[mOutputDelayRingBufferSize];
+    mOutputDelayRingBuffer = new int16_t[mOutputDelayRingBufferSize];
     mOutputDelayRingBufferWritePos = 0;
     mOutputDelayRingBufferReadPos = 0;
     mOutputDelayRingBufferFilled = 0;
diff --git a/media/libstagefright/codecs/aacdec/SoftAAC2.h b/media/libstagefright/codecs/aacdec/SoftAAC2.h
index 73a3965..5bee710 100644
--- a/media/libstagefright/codecs/aacdec/SoftAAC2.h
+++ b/media/libstagefright/codecs/aacdec/SoftAAC2.h
@@ -81,7 +81,7 @@
     bool mEndOfOutput;
     int32_t mOutputDelayCompensated;
     int32_t mOutputDelayRingBufferSize;
-    short *mOutputDelayRingBuffer;
+    int16_t *mOutputDelayRingBuffer;
     int32_t mOutputDelayRingBufferWritePos;
     int32_t mOutputDelayRingBufferReadPos;
     int32_t mOutputDelayRingBufferFilled;
diff --git a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.cpp b/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.cpp
index b8cfefa..ddc818e 100644
--- a/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.cpp
+++ b/media/libstagefright/codecs/amrwb/src/pvamrwbdecoder.cpp
@@ -455,7 +455,7 @@
                              &exc2[i_subfr],
                              0,
                              &synth16k[i_subfr *5/4],
-                             (short) 1,
+                             1,
                              HfIsf,
                              nb_bits,
                              newDTXState,
diff --git a/media/libstagefright/codecs/common/include/voType.h b/media/libstagefright/codecs/common/include/voType.h
index da208d4..73f24d0 100644
--- a/media/libstagefright/codecs/common/include/voType.h
+++ b/media/libstagefright/codecs/common/include/voType.h
@@ -22,6 +22,8 @@
 #ifndef __voType_H__
 #define __voType_H__
 
+#include <stdint.h>
+
 #ifdef __cplusplus
 extern "C" {
 #endif /* __cplusplus */
@@ -62,28 +64,28 @@
 typedef void VO_VOID;
 
 /** VO_U8 is an 8 bit unsigned quantity that is byte aligned */
-typedef unsigned char VO_U8;
+typedef uint8_t VO_U8;
 
 /** VO_BYTE is an 8 bit unsigned quantity that is byte aligned */
-typedef unsigned char VO_BYTE;
+typedef uint8_t VO_BYTE;
 
 /** VO_S8 is an 8 bit signed quantity that is byte aligned */
-typedef signed char VO_S8;
+typedef int8_t VO_S8;
 
 /** VO_CHAR is an 8 bit signed quantity that is byte aligned */
-typedef char VO_CHAR;
+typedef int8_t VO_CHAR;
 
 /** VO_U16 is a 16 bit unsigned quantity that is 16 bit word aligned */
-typedef unsigned short VO_U16;
+typedef uint16_t VO_U16;
 
 /** VO_S16 is a 16 bit signed quantity that is 16 bit word aligned */
-typedef signed short VO_S16;
+typedef int16_t VO_S16;
 
 /** VO_U32 is a 32 bit unsigned quantity that is 32 bit word aligned */
-typedef unsigned long VO_U32;
+typedef uint32_t VO_U32;
 
 /** VO_S32 is a 32 bit signed quantity that is 32 bit word aligned */
-typedef signed long VO_S32;
+typedef int32_t VO_S32;
 
 /* Users with compilers that cannot accept the "long long" designation should
    define the VO_SKIP64BIT macro.  It should be noted that this may cause
@@ -94,14 +96,14 @@
 #ifndef VO_SKIP64BIT
 #ifdef _MSC_VER
 /** VO_U64 is a 64 bit unsigned quantity that is 64 bit word aligned */
-typedef unsigned __int64  VO_U64;
+typedef uint64_t  VO_U64;
 /** VO_S64 is a 64 bit signed quantity that is 64 bit word aligned */
-typedef signed   __int64  VO_S64;
+typedef int64_t  VO_S64;
 #else // WIN32
 /** VO_U64 is a 64 bit unsigned quantity that is 64 bit word aligned */
-typedef unsigned long long VO_U64;
+typedef uint64_t VO_U64;
 /** VO_S64 is a 64 bit signed quantity that is 64 bit word aligned */
-typedef signed long long VO_S64;
+typedef int64_t VO_S64;
 #endif // WIN32
 #endif // VO_SKIP64BIT
 
diff --git a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
index 2c0f224..4db0060 100644
--- a/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
+++ b/media/libstagefright/codecs/flac/dec/SoftFlacDecoder.cpp
@@ -305,7 +305,7 @@
     while ((!inQueue.empty() || mSawInputEOS) && !outQueue.empty() && !mFinishedDecoder) {
         BufferInfo *outInfo = *outQueue.begin();
         OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader;
-        short *outBuffer = reinterpret_cast<short *>(outHeader->pBuffer + outHeader->nOffset);
+        int16_t *outBuffer = reinterpret_cast<int16_t *>(outHeader->pBuffer + outHeader->nOffset);
         size_t outBufferSize = outHeader->nAllocLen - outHeader->nOffset;
         int64_t timeStamp = 0;
 
diff --git a/media/libstagefright/codecs/flac/enc/Android.bp b/media/libstagefright/codecs/flac/enc/Android.bp
index 46b974d..73f0dac 100644
--- a/media/libstagefright/codecs/flac/enc/Android.bp
+++ b/media/libstagefright/codecs/flac/enc/Android.bp
@@ -30,6 +30,7 @@
         "liblog",
     ],
 
+    header_libs: ["libbase_headers"],
     static_libs: ["libFLAC"],
 
     name: "libstagefright_soft_flacenc",
diff --git a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
index fdc8975..955f211 100644
--- a/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
+++ b/media/libstagefright/codecs/flac/enc/SoftFlacEncoder.cpp
@@ -16,6 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "SoftFlacEncoder"
+#include <android-base/macros.h>
 #include <utils/Log.h>
 
 #include "SoftFlacEncoder.h"
@@ -335,7 +336,7 @@
                 }
             }
 
-            // fall through
+            FALLTHROUGH_INTENDED;
         }
 
         default:
diff --git a/media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h b/media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h
index 9451479..d5a3ff1 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/include/mp4enc_api.h
@@ -18,16 +18,17 @@
 #ifndef _MP4ENC_API_H_
 #define _MP4ENC_API_H_
 
+#include <stdint.h>
 #include <string.h>
 
 #ifndef _PV_TYPES_
 #define _PV_TYPES_
-typedef unsigned char UChar;
-typedef char Char;
+typedef uint8_t UChar;
+typedef int8_t Char;
 typedef unsigned int UInt;
 typedef int Int;
-typedef unsigned short UShort;
-typedef short Short;
+typedef uint16_t UShort;
+typedef int16_t Short;
 typedef unsigned int Bool;
 typedef uint32_t ULong;
 
diff --git a/media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h b/media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h
index 2d44482..dbd70dc 100644
--- a/media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h
+++ b/media/libstagefright/codecs/m4v_h263/enc/src/mp4def.h
@@ -52,13 +52,13 @@
 
 #ifndef _PV_TYPES_
 #define _PV_TYPES_
-typedef unsigned char UChar;
-typedef char Char;
+typedef uint8_t UChar;
+typedef int8_t Char;
 typedef unsigned int UInt;
 typedef int Int;
-typedef unsigned short UShort;
-typedef short Short;
-typedef short int SInt;
+typedef uint16_t UShort;
+typedef int16_t Short;
+typedef int16_t SInt;
 typedef unsigned int Bool;
 typedef uint32_t ULong;
 typedef void Void;
diff --git a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
index f6257b1..2dfba13 100644
--- a/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
+++ b/media/libstagefright/codecs/on2/enc/SoftVPXEncoder.cpp
@@ -21,6 +21,7 @@
 #include "SoftVP8Encoder.h"
 #include "SoftVP9Encoder.h"
 
+#include <android-base/macros.h>
 #include <utils/Log.h>
 #include <utils/misc.h>
 
@@ -557,7 +558,7 @@
               break;
           case kTemporalUpdateGoldenWithoutDependency:
               flags |= VP8_EFLAG_NO_REF_GF;
-              // Deliberately no break here.
+              FALLTHROUGH_INTENDED;
           case kTemporalUpdateGolden:
               flags |= VP8_EFLAG_NO_REF_ARF;
               flags |= VP8_EFLAG_NO_UPD_ARF;
@@ -566,14 +567,14 @@
           case kTemporalUpdateAltrefWithoutDependency:
               flags |= VP8_EFLAG_NO_REF_ARF;
               flags |= VP8_EFLAG_NO_REF_GF;
-              // Deliberately no break here.
+              FALLTHROUGH_INTENDED;
           case kTemporalUpdateAltref:
               flags |= VP8_EFLAG_NO_UPD_GF;
               flags |= VP8_EFLAG_NO_UPD_LAST;
               break;
           case kTemporalUpdateNoneNoRefAltref:
               flags |= VP8_EFLAG_NO_REF_ARF;
-              // Deliberately no break here.
+              FALLTHROUGH_INTENDED;
           case kTemporalUpdateNone:
               flags |= VP8_EFLAG_NO_UPD_GF;
               flags |= VP8_EFLAG_NO_UPD_ARF;
diff --git a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
index 8912f8a..d534f64 100644
--- a/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
+++ b/media/libstagefright/codecs/vorbis/dec/SoftVorbis.cpp
@@ -548,11 +548,21 @@
         // Make sure that the next buffer output does not still
         // depend on fragments from the last one decoded.
 
+        mInputBufferCount = 0;
         mNumFramesOutput = 0;
+        if (mState != NULL) {
+            vorbis_dsp_clear(mState);
+            delete mState;
+            mState = NULL;
+        }
+        if (mVi != NULL) {
+            vorbis_info_clear(mVi);
+            delete mVi;
+            mVi = NULL;
+        }
         mSawInputEos = false;
         mSignalledOutputEos = false;
         mNumFramesLeftOnPage = -1;
-        vorbis_dsp_restart(mState);
     }
 }
 
diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
index 05f4104..70f52c3 100644
--- a/media/libstagefright/colorconversion/ColorConverter.cpp
+++ b/media/libstagefright/colorconversion/ColorConverter.cpp
@@ -16,6 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "ColorConverter"
+#include <android-base/macros.h>
 #include <utils/Log.h>
 
 #include <media/stagefright/foundation/ADebug.h>
@@ -62,7 +63,7 @@
             if (mDstFormat == OMX_COLOR_FormatYUV444Y410) {
                 return true;
             }
-            // fall-thru
+            FALLTHROUGH_INTENDED;
         case OMX_COLOR_FormatYUV420Planar:
             return mDstFormat == OMX_COLOR_Format16bitRGB565
                     || mDstFormat == OMX_COLOR_Format32BitRGBA8888
diff --git a/media/libstagefright/flac/dec/FLACDecoder.cpp b/media/libstagefright/flac/dec/FLACDecoder.cpp
index a2b6ab7..dfdc41c 100644
--- a/media/libstagefright/flac/dec/FLACDecoder.cpp
+++ b/media/libstagefright/flac/dec/FLACDecoder.cpp
@@ -120,7 +120,7 @@
 // Copy samples from FLAC native 32-bit non-interleaved to 16-bit interleaved.
 // These are candidates for optimization if needed.
 static void copyMono8(
-        short *dst,
+        int16_t *dst,
         const int * src[FLACDecoder::kMaxChannels],
         unsigned nSamples,
         unsigned /* nChannels */) {
@@ -130,7 +130,7 @@
 }
 
 static void copyStereo8(
-        short *dst,
+        int16_t *dst,
         const int * src[FLACDecoder::kMaxChannels],
         unsigned nSamples,
         unsigned /* nChannels */) {
@@ -141,7 +141,7 @@
 }
 
 static void copyMultiCh8(
-        short *dst,
+        int16_t *dst,
         const int * src[FLACDecoder::kMaxChannels],
         unsigned nSamples,
         unsigned nChannels) {
@@ -153,7 +153,7 @@
 }
 
 static void copyMono16(
-        short *dst,
+        int16_t *dst,
         const int * src[FLACDecoder::kMaxChannels],
         unsigned nSamples,
         unsigned /* nChannels */) {
@@ -163,7 +163,7 @@
 }
 
 static void copyStereo16(
-        short *dst,
+        int16_t *dst,
         const int * src[FLACDecoder::kMaxChannels],
         unsigned nSamples,
         unsigned /* nChannels */) {
@@ -174,7 +174,7 @@
 }
 
 static void copyMultiCh16(
-        short *dst,
+        int16_t *dst,
         const int * src[FLACDecoder::kMaxChannels],
         unsigned nSamples,
         unsigned nChannels) {
@@ -187,7 +187,7 @@
 
 // TODO: 24-bit versions should do dithering or noise-shaping, here or in AudioFlinger
 static void copyMono24(
-        short *dst,
+        int16_t *dst,
         const int * src[FLACDecoder::kMaxChannels],
         unsigned nSamples,
         unsigned /* nChannels */) {
@@ -197,7 +197,7 @@
 }
 
 static void copyStereo24(
-        short *dst,
+        int16_t *dst,
         const int * src[FLACDecoder::kMaxChannels],
         unsigned nSamples,
         unsigned /* nChannels */) {
@@ -208,7 +208,7 @@
 }
 
 static void copyMultiCh24(
-        short *dst,
+        int16_t *dst,
         const int * src[FLACDecoder::kMaxChannels],
         unsigned nSamples,
         unsigned nChannels) {
@@ -391,7 +391,7 @@
     static const struct {
         unsigned mChannels;
         unsigned mBitsPerSample;
-        void (*mCopy)(short *dst, const int * src[kMaxChannels],
+        void (*mCopy)(int16_t *dst, const int * src[kMaxChannels],
                 unsigned nSamples, unsigned nChannels);
     } table[] = {
         { 1,  8, copyMono8     },
@@ -420,7 +420,7 @@
 }
 
 status_t FLACDecoder::decodeOneFrame(const uint8_t *inBuffer, size_t inBufferLen,
-        short *outBuffer, size_t *outBufferLen) {
+        int16_t *outBuffer, size_t *outBufferLen) {
     ALOGV("decodeOneFrame: input size(%zu)", inBufferLen);
 
     if (!mStreamInfoValid) {
@@ -469,12 +469,12 @@
         return ERROR_MALFORMED;
     }
 
-    size_t bufferSize = blocksize * getChannels() * sizeof(short);
+    size_t bufferSize = blocksize * getChannels() * sizeof(int16_t);
     if (bufferSize > *outBufferLen) {
         ALOGW("decodeOneFrame: output buffer holds only partial frame %zu:%zu",
                 *outBufferLen, bufferSize);
-        blocksize = *outBufferLen / (getChannels() * sizeof(short));
-        bufferSize = blocksize * getChannels() * sizeof(short);
+        blocksize = *outBufferLen / (getChannels() * sizeof(int16_t));
+        bufferSize = blocksize * getChannels() * sizeof(int16_t);
     }
 
     if (mCopy == nullptr) {
diff --git a/media/libstagefright/flac/dec/FLACDecoder.h b/media/libstagefright/flac/dec/FLACDecoder.h
index 1a33cae..af419a2 100644
--- a/media/libstagefright/flac/dec/FLACDecoder.h
+++ b/media/libstagefright/flac/dec/FLACDecoder.h
@@ -41,7 +41,7 @@
 
     status_t parseMetadata(const uint8_t *inBuffer, size_t inBufferLen);
     status_t decodeOneFrame(const uint8_t *inBuffer, size_t inBufferLen,
-            short *outBuffer, size_t *outBufferLen);
+            int16_t *outBuffer, size_t *outBufferLen);
     void flush();
     virtual ~FLACDecoder();
 
@@ -89,7 +89,7 @@
     // most recent error reported by libFLAC decoder
     FLAC__StreamDecoderErrorStatus mErrorStatus;
 
-    void (*mCopy)(short *dst, const int *src[kMaxChannels], unsigned nSamples, unsigned nChannels);
+    void (*mCopy)(int16_t *dst, const int *src[kMaxChannels], unsigned nSamples, unsigned nChannels);
 
     status_t init();
 
diff --git a/media/libstagefright/foundation/AString.cpp b/media/libstagefright/foundation/AString.cpp
index c6ef75f..a8adff5 100644
--- a/media/libstagefright/foundation/AString.cpp
+++ b/media/libstagefright/foundation/AString.cpp
@@ -125,12 +125,10 @@
 }
 
 void AString::clear() {
-    if (mData && mData != kEmptyString) {
+    if (mData != kEmptyString) {
         free(mData);
-        mData = NULL;
+        mData = (char *)kEmptyString;
     }
-
-    mData = (char *)kEmptyString;
     mSize = 0;
     mAllocSize = 1;
 }
diff --git a/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h b/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
index bac8fa9..a8b88fd 100644
--- a/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
+++ b/media/libstagefright/foundation/include/media/stagefright/foundation/ADebug.h
@@ -63,38 +63,21 @@
             __FILE__ ":" LITERAL_TO_STRING(__LINE__)    \
             " CHECK(" #condition ") failed.")
 
-#define MAKE_COMPARATOR(suffix,op)                          \
-    template<class A, class B>                              \
-    AString Compare_##suffix(const A &a, const B &b) {      \
-        AString res;                                        \
-        if (!(a op b)) {                                    \
-            res.append(a);                                  \
-            res.append(" vs. ");                            \
-            res.append(b);                                  \
-        }                                                   \
-        return res;                                         \
-    }
-
-MAKE_COMPARATOR(EQ,==)
-MAKE_COMPARATOR(NE,!=)
-MAKE_COMPARATOR(LE,<=)
-MAKE_COMPARATOR(GE,>=)
-MAKE_COMPARATOR(LT,<)
-MAKE_COMPARATOR(GT,>)
-
 #ifdef CHECK_OP
 #undef CHECK_OP
 #endif
 
 #define CHECK_OP(x,y,suffix,op)                                         \
     do {                                                                \
-        AString ___res = Compare_##suffix(x, y);                        \
-        if (!___res.empty()) {                                          \
+        const auto &a = x;                                              \
+        const auto &b = y;                                              \
+        if (!(a op b)) {                                                \
             AString ___full =                                           \
                 __FILE__ ":" LITERAL_TO_STRING(__LINE__)                \
                     " CHECK_" #suffix "( " #x "," #y ") failed: ";      \
-            ___full.append(___res);                                     \
-                                                                        \
+            ___full.append(a);                                          \
+            ___full.append(" vs. ");                                    \
+            ___full.append(b);                                          \
             LOG_ALWAYS_FATAL("%s", ___full.c_str());                    \
         }                                                               \
     } while (false)
diff --git a/media/libstagefright/httplive/Android.bp b/media/libstagefright/httplive/Android.bp
index 8a77401..2f933a0 100644
--- a/media/libstagefright/httplive/Android.bp
+++ b/media/libstagefright/httplive/Android.bp
@@ -46,6 +46,10 @@
         "android.hardware.cas.native@1.0",
     ],
 
+    header_libs: [
+        "libbase_headers",
+    ],
+
     static_libs: [
         "libstagefright_id3",
         "libstagefright_metadatautils",
diff --git a/media/libstagefright/httplive/LiveSession.cpp b/media/libstagefright/httplive/LiveSession.cpp
index 7eff8eb..86872c5 100644
--- a/media/libstagefright/httplive/LiveSession.cpp
+++ b/media/libstagefright/httplive/LiveSession.cpp
@@ -445,7 +445,7 @@
                return -EAGAIN;
             };
             (*accessUnit)->meta()->setInt32(
-                    "trackIndex", mPlaylist->getSelectedIndex());
+                    "track-index", mPlaylist->getSelectedIndex());
             (*accessUnit)->meta()->setInt64("baseUs", mRealTimeBaseUs);
         } else if (stream == STREAMTYPE_METADATA) {
             HLSTime mdTime((*accessUnit)->meta());
diff --git a/media/libstagefright/httplive/PlaylistFetcher.cpp b/media/libstagefright/httplive/PlaylistFetcher.cpp
index 9f39b5e..823f90e 100644
--- a/media/libstagefright/httplive/PlaylistFetcher.cpp
+++ b/media/libstagefright/httplive/PlaylistFetcher.cpp
@@ -16,6 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "PlaylistFetcher"
+#include <android-base/macros.h>
 #include <utils/Log.h>
 #include <utils/misc.h>
 
@@ -267,7 +268,7 @@
                 break;
             }
 
-            // fall through
+            FALLTHROUGH_INTENDED;
         }
 
         case FIRST_UNCHANGED_RELOAD_ATTEMPT:
diff --git a/media/libstagefright/omx/Android.bp b/media/libstagefright/omx/Android.bp
index 3e6942b..6250045 100644
--- a/media/libstagefright/omx/Android.bp
+++ b/media/libstagefright/omx/Android.bp
@@ -29,6 +29,7 @@
     ],
 
     header_libs: [
+        "libbase_headers",
         "media_plugin_headers",
     ],
 
diff --git a/media/libstagefright/omx/OMXNodeInstance.cpp b/media/libstagefright/omx/OMXNodeInstance.cpp
index 7d2c2dd..d3b551d 100644
--- a/media/libstagefright/omx/OMXNodeInstance.cpp
+++ b/media/libstagefright/omx/OMXNodeInstance.cpp
@@ -16,6 +16,7 @@
 
 //#define LOG_NDEBUG 0
 #define LOG_TAG "OMXNodeInstance"
+#include <android-base/macros.h>
 #include <utils/Log.h>
 
 #include <inttypes.h>
@@ -459,7 +460,7 @@
                 break;
             }
 
-            // fall through
+            FALLTHROUGH_INTENDED;
         }
 
         case OMX_StateIdle:
@@ -486,7 +487,7 @@
             }
             CHECK_EQ(err, OMX_ErrorNone);
 
-            // fall through
+            FALLTHROUGH_INTENDED;
         }
 
         case OMX_StateLoaded:
@@ -2192,8 +2193,8 @@
                     // bump internal-state debug level for 2 input and output frames
                     Mutex::Autolock _l(mDebugLock);
                     bumpDebugLevel_l(2 /* numInputBuffers */, 2 /* numOutputBuffers */);
+                    FALLTHROUGH_INTENDED;
                 }
-                // fall through
                 default:
                     arg2String = portString(arg2);
             }
@@ -2204,7 +2205,7 @@
             break;
         case OMX_EventPortSettingsChanged:
             arg2String = asString((OMX_INDEXEXTTYPE)arg2);
-            // fall through
+            FALLTHROUGH_INTENDED;
         default:
             arg1String = portString(arg1);
     }
diff --git a/media/libstagefright/omx/OMXUtils.cpp b/media/libstagefright/omx/OMXUtils.cpp
index f7b569d..9ed4a99 100644
--- a/media/libstagefright/omx/OMXUtils.cpp
+++ b/media/libstagefright/omx/OMXUtils.cpp
@@ -19,6 +19,7 @@
 
 #include <string.h>
 
+#include <android-base/macros.h>
 #include <media/stagefright/omx/OMXUtils.h>
 #include <media/stagefright/foundation/ADebug.h>
 #include <media/stagefright/foundation/AUtils.h>
@@ -273,6 +274,7 @@
                 break;
             } else {
                 // fall through as YV12 is used for YUV420Planar by some codecs
+                FALLTHROUGH_INTENDED;
             }
 
         case OMX_COLOR_FormatYUV420Planar:
diff --git a/media/libstagefright/omx/tests/Android.bp b/media/libstagefright/omx/tests/Android.bp
index 3b521ab..ef36982 100644
--- a/media/libstagefright/omx/tests/Android.bp
+++ b/media/libstagefright/omx/tests/Android.bp
@@ -27,6 +27,10 @@
         "frameworks/native/include/media/openmax",
     ],
 
+    header_libs: [
+        "libbase_headers",
+    ],
+
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/omx/tests/OMXHarness.cpp b/media/libstagefright/omx/tests/OMXHarness.cpp
index 895a4ce..15a7655 100644
--- a/media/libstagefright/omx/tests/OMXHarness.cpp
+++ b/media/libstagefright/omx/tests/OMXHarness.cpp
@@ -17,6 +17,7 @@
 //#define LOG_NDEBUG 0
 #define LOG_TAG "OMXHarness"
 #include <inttypes.h>
+#include <android-base/macros.h>
 #include <utils/Log.h>
 
 #include "OMXHarness.h"
@@ -844,7 +845,7 @@
 
             case '?':
                 fprintf(stderr, "\n");
-                // fall through
+                FALLTHROUGH_INTENDED;
 
             case 'h':
             default:
diff --git a/media/libstagefright/xmlparser/Android.bp b/media/libstagefright/xmlparser/Android.bp
index a4fa342..5737ac2 100644
--- a/media/libstagefright/xmlparser/Android.bp
+++ b/media/libstagefright/xmlparser/Android.bp
@@ -19,6 +19,10 @@
         "libstagefright_omx_utils",
     ],
 
+    header_libs: [
+        "libbase_headers",
+    ],
+
     cflags: [
         "-Werror",
         "-Wall",
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index ffd30ea..0832944 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -19,6 +19,7 @@
 
 #include <media/stagefright/xmlparser/MediaCodecsXmlParser.h>
 
+#include <android-base/macros.h>
 #include <utils/Log.h>
 #include <media/stagefright/MediaErrors.h>
 #include <media/stagefright/omx/OMXUtils.h>
@@ -340,7 +341,7 @@
             }
         }
         inType = false;
-        // fall through
+        FALLTHROUGH_INTENDED;
 
         case SECTION_DECODER_TYPE:
         case SECTION_ENCODER_TYPE:
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 19df760..f936118 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -36,13 +36,12 @@
 #ifndef _NDK_IMAGE_H
 #define _NDK_IMAGE_H
 
+#include <stdint.h>
 #include <sys/cdefs.h>
 
 #include "NdkMediaError.h"
 
-#if __ANDROID_API__ >= 26
 #include <android/hardware_buffer.h>
-#endif /* __ANDROID_API__ >= 26 */
 
 __BEGIN_DECLS
 
@@ -529,7 +528,7 @@
  *
  * @param image The {@link AImage} to be deleted.
  */
-void AImage_delete(AImage* image);
+void AImage_delete(AImage* image) __INTRODUCED_IN(24);
 
 /**
  * Query the width of the input {@link AImage}.
@@ -543,7 +542,7 @@
  *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
  *                 image has been deleted.</li></ul>
  */
-media_status_t AImage_getWidth(const AImage* image, /*out*/int32_t* width);
+media_status_t AImage_getWidth(const AImage* image, /*out*/int32_t* width) __INTRODUCED_IN(24);
 
 /**
  * Query the height of the input {@link AImage}.
@@ -557,7 +556,7 @@
  *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
  *                 image has been deleted.</li></ul>
  */
-media_status_t AImage_getHeight(const AImage* image, /*out*/int32_t* height);
+media_status_t AImage_getHeight(const AImage* image, /*out*/int32_t* height) __INTRODUCED_IN(24);
 
 /**
  * Query the format of the input {@link AImage}.
@@ -573,7 +572,7 @@
  *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
  *                 image has been deleted.</li></ul>
  */
-media_status_t AImage_getFormat(const AImage* image, /*out*/int32_t* format);
+media_status_t AImage_getFormat(const AImage* image, /*out*/int32_t* format) __INTRODUCED_IN(24);
 
 /**
  * Query the cropped rectangle of the input {@link AImage}.
@@ -590,7 +589,7 @@
  *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
  *                 image has been deleted.</li></ul>
  */
-media_status_t AImage_getCropRect(const AImage* image, /*out*/AImageCropRect* rect);
+media_status_t AImage_getCropRect(const AImage* image, /*out*/AImageCropRect* rect) __INTRODUCED_IN(24);
 
 /**
  * Query the timestamp of the input {@link AImage}.
@@ -614,7 +613,7 @@
  *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
  *                 image has been deleted.</li></ul>
  */
-media_status_t AImage_getTimestamp(const AImage* image, /*out*/int64_t* timestampNs);
+media_status_t AImage_getTimestamp(const AImage* image, /*out*/int64_t* timestampNs) __INTRODUCED_IN(24);
 
 /**
  * Query the number of planes of the input {@link AImage}.
@@ -632,7 +631,7 @@
  *         <li>{@link AMEDIA_ERROR_INVALID_OBJECT} if the {@link AImageReader} generated this
  *                 image has been deleted.</li></ul>
  */
-media_status_t AImage_getNumberOfPlanes(const AImage* image, /*out*/int32_t* numPlanes);
+media_status_t AImage_getNumberOfPlanes(const AImage* image, /*out*/int32_t* numPlanes) __INTRODUCED_IN(24);
 
 /**
  * Query the pixel stride of the input {@link AImage}.
@@ -660,7 +659,7 @@
  *                 for CPU access.</li></ul>
  */
 media_status_t AImage_getPlanePixelStride(
-        const AImage* image, int planeIdx, /*out*/int32_t* pixelStride);
+        const AImage* image, int planeIdx, /*out*/int32_t* pixelStride) __INTRODUCED_IN(24);
 
 /**
  * Query the row stride of the input {@link AImage}.
@@ -687,7 +686,7 @@
  *                 for CPU access.</li></ul>
  */
 media_status_t AImage_getPlaneRowStride(
-        const AImage* image, int planeIdx, /*out*/int32_t* rowStride);
+        const AImage* image, int planeIdx, /*out*/int32_t* rowStride) __INTRODUCED_IN(24);
 
 /**
  * Get the data pointer of the input image for direct application access.
@@ -712,7 +711,7 @@
  */
 media_status_t AImage_getPlaneData(
         const AImage* image, int planeIdx,
-        /*out*/uint8_t** data, /*out*/int* dataLength);
+        /*out*/uint8_t** data, /*out*/int* dataLength) __INTRODUCED_IN(24);
 
 #endif /* __ANDROID_API__ >= 24 */
 
@@ -732,7 +731,7 @@
  *
  * @see sync.h
  */
-void AImage_deleteAsync(AImage* image, int releaseFenceFd);
+void AImage_deleteAsync(AImage* image, int releaseFenceFd) __INTRODUCED_IN(26);
 
 /**
  * Get the hardware buffer handle of the input image intended for GPU and/or hardware access.
@@ -760,7 +759,7 @@
  *
  * @see AImageReader_ImageCallback
  */
-media_status_t AImage_getHardwareBuffer(const AImage* image, /*out*/AHardwareBuffer** buffer);
+media_status_t AImage_getHardwareBuffer(const AImage* image, /*out*/AHardwareBuffer** buffer) __INTRODUCED_IN(26);
 
 #endif /* __ANDROID_API__ >= 26 */
 
diff --git a/media/ndk/include/media/NdkImageReader.h b/media/ndk/include/media/NdkImageReader.h
index 571410b..68de176 100644
--- a/media/ndk/include/media/NdkImageReader.h
+++ b/media/ndk/include/media/NdkImageReader.h
@@ -88,7 +88,7 @@
  */
 media_status_t AImageReader_new(
         int32_t width, int32_t height, int32_t format, int32_t maxImages,
-        /*out*/AImageReader** reader);
+        /*out*/AImageReader** reader) __INTRODUCED_IN(24);
 
 /**
  * Delete an {@link AImageReader} and return all images generated by this reader to system.
@@ -100,7 +100,7 @@
  *
  * @param reader The image reader to be deleted.
  */
-void AImageReader_delete(AImageReader* reader);
+void AImageReader_delete(AImageReader* reader) __INTRODUCED_IN(24);
 
 /**
  * Get a {@link ANativeWindow} that can be used to produce {@link AImage} for this image reader.
@@ -114,7 +114,7 @@
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
  *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or window is NULL.</li></ul>
  */
-media_status_t AImageReader_getWindow(AImageReader* reader, /*out*/ANativeWindow** window);
+media_status_t AImageReader_getWindow(AImageReader* reader, /*out*/ANativeWindow** window) __INTRODUCED_IN(24);
 
 /**
  * Query the default width of the {@link AImage} generated by this reader, in pixels.
@@ -130,7 +130,7 @@
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
  *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or width is NULL.</li></ul>
  */
-media_status_t AImageReader_getWidth(const AImageReader* reader, /*out*/int32_t* width);
+media_status_t AImageReader_getWidth(const AImageReader* reader, /*out*/int32_t* width) __INTRODUCED_IN(24);
 
 /**
  * Query the default height of the {@link AImage} generated by this reader, in pixels.
@@ -146,7 +146,7 @@
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
  *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or height is NULL.</li></ul>
  */
-media_status_t AImageReader_getHeight(const AImageReader* reader, /*out*/int32_t* height);
+media_status_t AImageReader_getHeight(const AImageReader* reader, /*out*/int32_t* height) __INTRODUCED_IN(24);
 
 /**
  * Query the format of the {@link AImage} generated by this reader.
@@ -159,7 +159,7 @@
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
  *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or format is NULL.</li></ul>
  */
-media_status_t AImageReader_getFormat(const AImageReader* reader, /*out*/int32_t* format);
+media_status_t AImageReader_getFormat(const AImageReader* reader, /*out*/int32_t* format) __INTRODUCED_IN(24);
 
 /**
  * Query the maximum number of concurrently acquired {@link AImage}s of this reader.
@@ -172,7 +172,7 @@
  *         <li>{@link AMEDIA_OK} if the method call succeeds.</li>
  *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader or maxImages is NULL.</li></ul>
  */
-media_status_t AImageReader_getMaxImages(const AImageReader* reader, /*out*/int32_t* maxImages);
+media_status_t AImageReader_getMaxImages(const AImageReader* reader, /*out*/int32_t* maxImages) __INTRODUCED_IN(24);
 
 /**
  * Acquire the next {@link AImage} from the image reader's queue.
@@ -208,7 +208,7 @@
  *
  * @see AImageReader_acquireLatestImage
  */
-media_status_t AImageReader_acquireNextImage(AImageReader* reader, /*out*/AImage** image);
+media_status_t AImageReader_acquireNextImage(AImageReader* reader, /*out*/AImage** image) __INTRODUCED_IN(24);
 
 /**
 
@@ -252,7 +252,7 @@
  *
  * @see AImageReader_acquireNextImage
  */
-media_status_t AImageReader_acquireLatestImage(AImageReader* reader, /*out*/AImage** image);
+media_status_t AImageReader_acquireLatestImage(AImageReader* reader, /*out*/AImage** image) __INTRODUCED_IN(24);
 
 
 /**
@@ -296,7 +296,7 @@
  *         <li>{@link AMEDIA_ERROR_INVALID_PARAMETER} if reader is NULL.</li></ul>
  */
 media_status_t AImageReader_setImageListener(
-        AImageReader* reader, AImageReader_ImageListener* listener);
+        AImageReader* reader, AImageReader_ImageListener* listener) __INTRODUCED_IN(24);
 
 #endif /* __ANDROID_API__ >= 24 */
 
@@ -365,7 +365,7 @@
  */
 media_status_t AImageReader_newWithUsage(
         int32_t width, int32_t height, int32_t format, uint64_t usage, int32_t maxImages,
-        /*out*/ AImageReader** reader);
+        /*out*/ AImageReader** reader) __INTRODUCED_IN(26);
 
 /**
  * Acquire the next {@link AImage} from the image reader's queue asynchronously.
@@ -384,7 +384,7 @@
  * @see sync_get_fence_info
  */
 media_status_t AImageReader_acquireNextImageAsync(
-        AImageReader* reader, /*out*/AImage** image, /*out*/int* acquireFenceFd);
+        AImageReader* reader, /*out*/AImage** image, /*out*/int* acquireFenceFd) __INTRODUCED_IN(26);
 
 /**
  * Acquire the latest {@link AImage} from the image reader's queue asynchronously, dropping older
@@ -404,7 +404,7 @@
  * @see sync_get_fence_info
  */
 media_status_t AImageReader_acquireLatestImageAsync(
-        AImageReader* reader, /*out*/AImage** image, /*out*/int* acquireFenceFd);
+        AImageReader* reader, /*out*/AImage** image, /*out*/int* acquireFenceFd) __INTRODUCED_IN(26);
 /**
  * Signature of the callback which is called when {@link AImageReader} is about to remove a buffer.
  *
@@ -459,7 +459,7 @@
  * @see AImage_getHardwareBuffer
  */
 media_status_t AImageReader_setBufferRemovedListener(
-        AImageReader* reader, AImageReader_BufferRemovedListener* listener);
+        AImageReader* reader, AImageReader_BufferRemovedListener* listener) __INTRODUCED_IN(26);
 
 #endif /* __ANDROID_API__ >= 26 */
 
diff --git a/media/ndk/include/media/NdkMediaCodec.h b/media/ndk/include/media/NdkMediaCodec.h
index c49582d..9dc120d 100644
--- a/media/ndk/include/media/NdkMediaCodec.h
+++ b/media/ndk/include/media/NdkMediaCodec.h
@@ -128,23 +128,23 @@
  * When configuring, you will need to specify whether to use the codec as an
  * encoder or decoder.
  */
-AMediaCodec* AMediaCodec_createCodecByName(const char *name);
+AMediaCodec* AMediaCodec_createCodecByName(const char *name) __INTRODUCED_IN(21);
 
 /**
  * Create codec by mime type. Most applications will use this, specifying a
  * mime type obtained from media extractor.
  */
-AMediaCodec* AMediaCodec_createDecoderByType(const char *mime_type);
+AMediaCodec* AMediaCodec_createDecoderByType(const char *mime_type) __INTRODUCED_IN(21);
 
 /**
  * Create encoder by name.
  */
-AMediaCodec* AMediaCodec_createEncoderByType(const char *mime_type);
+AMediaCodec* AMediaCodec_createEncoderByType(const char *mime_type) __INTRODUCED_IN(21);
 
 /**
  * delete the codec and free its resources
  */
-media_status_t AMediaCodec_delete(AMediaCodec*);
+media_status_t AMediaCodec_delete(AMediaCodec*) __INTRODUCED_IN(21);
 
 /**
  * Configure the codec. For decoding you would typically get the format from an extractor.
@@ -154,43 +154,43 @@
         const AMediaFormat* format,
         ANativeWindow* surface,
         AMediaCrypto *crypto,
-        uint32_t flags);
+        uint32_t flags) __INTRODUCED_IN(21);
 
 /**
  * Start the codec. A codec must be configured before it can be started, and must be started
  * before buffers can be sent to it.
  */
-media_status_t AMediaCodec_start(AMediaCodec*);
+media_status_t AMediaCodec_start(AMediaCodec*) __INTRODUCED_IN(21);
 
 /**
  * Stop the codec.
  */
-media_status_t AMediaCodec_stop(AMediaCodec*);
+media_status_t AMediaCodec_stop(AMediaCodec*) __INTRODUCED_IN(21);
 
 /*
  * Flush the codec's input and output. All indices previously returned from calls to
  * AMediaCodec_dequeueInputBuffer and AMediaCodec_dequeueOutputBuffer become invalid.
  */
-media_status_t AMediaCodec_flush(AMediaCodec*);
+media_status_t AMediaCodec_flush(AMediaCodec*) __INTRODUCED_IN(21);
 
 /**
  * Get an input buffer. The specified buffer index must have been previously obtained from
  * dequeueInputBuffer, and not yet queued.
  */
-uint8_t* AMediaCodec_getInputBuffer(AMediaCodec*, size_t idx, size_t *out_size);
+uint8_t* AMediaCodec_getInputBuffer(AMediaCodec*, size_t idx, size_t *out_size) __INTRODUCED_IN(21);
 
 /**
  * Get an output buffer. The specified buffer index must have been previously obtained from
  * dequeueOutputBuffer, and not yet queued.
  */
-uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec*, size_t idx, size_t *out_size);
+uint8_t* AMediaCodec_getOutputBuffer(AMediaCodec*, size_t idx, size_t *out_size) __INTRODUCED_IN(21);
 
 /**
  * Get the index of the next available input buffer. An app will typically use this with
  * getInputBuffer() to get a pointer to the buffer, then copy the data to be encoded or decoded
  * into the buffer before passing it to the codec.
  */
-ssize_t AMediaCodec_dequeueInputBuffer(AMediaCodec*, int64_t timeoutUs);
+ssize_t AMediaCodec_dequeueInputBuffer(AMediaCodec*, int64_t timeoutUs) __INTRODUCED_IN(21);
 
 /*
  * __USE_FILE_OFFSET64 changes the type of off_t in LP32, which changes the ABI
@@ -221,7 +221,7 @@
  */
 media_status_t AMediaCodec_queueInputBuffer(AMediaCodec*, size_t idx,
                                             _off_t_compat offset, size_t size,
-                                            uint64_t time, uint32_t flags);
+                                            uint64_t time, uint32_t flags) __INTRODUCED_IN(21);
 
 /**
  * Send the specified buffer to the codec for processing.
@@ -229,7 +229,7 @@
 media_status_t AMediaCodec_queueSecureInputBuffer(AMediaCodec*, size_t idx,
                                                   _off_t_compat offset,
                                                   AMediaCodecCryptoInfo*,
-                                                  uint64_t time, uint32_t flags);
+                                                  uint64_t time, uint32_t flags) __INTRODUCED_IN(21);
 
 #undef _off_t_compat
 
@@ -237,21 +237,21 @@
  * Get the index of the next available buffer of processed data.
  */
 ssize_t AMediaCodec_dequeueOutputBuffer(AMediaCodec*, AMediaCodecBufferInfo *info,
-        int64_t timeoutUs);
-AMediaFormat* AMediaCodec_getOutputFormat(AMediaCodec*);
+        int64_t timeoutUs) __INTRODUCED_IN(21);
+AMediaFormat* AMediaCodec_getOutputFormat(AMediaCodec*) __INTRODUCED_IN(21);
 
 /**
  * Get format of the buffer. The specified buffer index must have been previously obtained from
  * dequeueOutputBuffer.
  */
-AMediaFormat* AMediaCodec_getBufferFormat(AMediaCodec*, size_t index);
+AMediaFormat* AMediaCodec_getBufferFormat(AMediaCodec*, size_t index) __INTRODUCED_IN(21);
 
 /**
  * If you are done with a buffer, use this call to return the buffer to
  * the codec. If you previously specified a surface when configuring this
  * video decoder you can optionally render the buffer.
  */
-media_status_t AMediaCodec_releaseOutputBuffer(AMediaCodec*, size_t idx, bool render);
+media_status_t AMediaCodec_releaseOutputBuffer(AMediaCodec*, size_t idx, bool render) __INTRODUCED_IN(21);
 
 /**
  * Dynamically sets the output surface of a codec.
@@ -263,7 +263,7 @@
  *
  * For more details, see the Java documentation for MediaCodec.setOutputSurface.
  */
-media_status_t AMediaCodec_setOutputSurface(AMediaCodec*, ANativeWindow* surface);
+media_status_t AMediaCodec_setOutputSurface(AMediaCodec*, ANativeWindow* surface) __INTRODUCED_IN(21);
 
 /**
  * If you are done with a buffer, use this call to update its surface timestamp
@@ -274,7 +274,7 @@
  * For more details, see the Java documentation for MediaCodec.releaseOutputBuffer.
  */
 media_status_t AMediaCodec_releaseOutputBufferAtTime(
-        AMediaCodec *mData, size_t idx, int64_t timestampNs);
+        AMediaCodec *mData, size_t idx, int64_t timestampNs) __INTRODUCED_IN(21);
 
 #if __ANDROID_API__ >= 26
 
@@ -290,7 +290,7 @@
  * For more details, see the Java documentation for MediaCodec.createInputSurface.
  */
 media_status_t AMediaCodec_createInputSurface(
-        AMediaCodec *mData, ANativeWindow **surface);
+        AMediaCodec *mData, ANativeWindow **surface) __INTRODUCED_IN(26);
 
 /**
  * Creates a persistent Surface that can be used as the input to encoder
@@ -306,7 +306,7 @@
  * For more details, see the Java documentation for MediaCodec.createPersistentInputSurface.
  */
 media_status_t AMediaCodec_createPersistentInputSurface(
-        ANativeWindow **surface);
+        ANativeWindow **surface) __INTRODUCED_IN(26);
 
 /**
  * Set a persistent-surface that can be used as the input to encoder, in place of input buffers
@@ -319,7 +319,7 @@
  * For more details, see the Java documentation for MediaCodec.setInputSurface.
  */
 media_status_t AMediaCodec_setInputSurface(
-        AMediaCodec *mData, ANativeWindow *surface);
+        AMediaCodec *mData, ANativeWindow *surface) __INTRODUCED_IN(26);
 
 /**
  * Signal additional parameters to the codec instance.
@@ -330,7 +330,7 @@
  * NOTE: Some of these parameter changes may silently fail to apply.
  */
 media_status_t AMediaCodec_setParameters(
-        AMediaCodec *mData, const AMediaFormat* params);
+        AMediaCodec *mData, const AMediaFormat* params) __INTRODUCED_IN(26);
 
 /**
  * Signals end-of-stream on input. Equivalent to submitting an empty buffer with
@@ -346,7 +346,7 @@
  *
  * For more details, see the Java documentation for MediaCodec.signalEndOfInputStream.
  */
-media_status_t AMediaCodec_signalEndOfInputStream(AMediaCodec *mData);
+media_status_t AMediaCodec_signalEndOfInputStream(AMediaCodec *mData) __INTRODUCED_IN(26);
 
 #endif /* __ANDROID_API__ >= 26 */
 
@@ -357,12 +357,12 @@
  * or createEncoderByType, what component is chosen is not known beforehand.
  * Caller shall call AMediaCodec_releaseName to free the returned pointer.
  */
-media_status_t AMediaCodec_getName(AMediaCodec*, char** out_name);
+media_status_t AMediaCodec_getName(AMediaCodec*, char** out_name) __INTRODUCED_IN(28);
 
 /**
  * Free the memory pointed by name which is returned by AMediaCodec_getName.
  */
-void AMediaCodec_releaseName(AMediaCodec*, char* name);
+void AMediaCodec_releaseName(AMediaCodec*, char* name) __INTRODUCED_IN(28);
 
 /**
  * Set an asynchronous callback for actionable AMediaCodec events.
@@ -386,32 +386,32 @@
 media_status_t AMediaCodec_setAsyncNotifyCallback(
         AMediaCodec*,
         AMediaCodecOnAsyncNotifyCallback callback,
-        void *userdata);
+        void *userdata) __INTRODUCED_IN(28);
 
 /**
  * Release the crypto if applicable.
  */
-media_status_t AMediaCodec_releaseCrypto(AMediaCodec*);
+media_status_t AMediaCodec_releaseCrypto(AMediaCodec*) __INTRODUCED_IN(28);
 
 /**
  * Call this after AMediaCodec_configure() returns successfully to get the input
  * format accepted by the codec. Do this to determine what optional configuration
  * parameters were supported by the codec.
  */
-AMediaFormat* AMediaCodec_getInputFormat(AMediaCodec*);
+AMediaFormat* AMediaCodec_getInputFormat(AMediaCodec*) __INTRODUCED_IN(28);
 
 /**
  * Returns true if the codec cannot proceed further, but can be recovered by stopping,
  * configuring, and starting again.
  */
-bool AMediaCodecActionCode_isRecoverable(int32_t actionCode);
+bool AMediaCodecActionCode_isRecoverable(int32_t actionCode) __INTRODUCED_IN(28);
 
 /**
  * Returns true if the codec error is a transient issue, perhaps due to
  * resource constraints, and that the method (or encoding/decoding) may be
  * retried at a later time.
  */
-bool AMediaCodecActionCode_isTransient(int32_t actionCode);
+bool AMediaCodecActionCode_isTransient(int32_t actionCode) __INTRODUCED_IN(28);
 
 #endif /* __ANDROID_API__ >= 28 */
 
@@ -447,51 +447,51 @@
         uint8_t iv[16],
         cryptoinfo_mode_t mode,
         size_t *clearbytes,
-        size_t *encryptedbytes);
+        size_t *encryptedbytes) __INTRODUCED_IN(21);
 
 /**
  * delete an AMediaCodecCryptoInfo created previously with AMediaCodecCryptoInfo_new, or
  * obtained from AMediaExtractor
  */
-media_status_t AMediaCodecCryptoInfo_delete(AMediaCodecCryptoInfo*);
+media_status_t AMediaCodecCryptoInfo_delete(AMediaCodecCryptoInfo*) __INTRODUCED_IN(21);
 
 /**
  * Set the crypto pattern on an AMediaCryptoInfo object
  */
 void AMediaCodecCryptoInfo_setPattern(
         AMediaCodecCryptoInfo *info,
-        cryptoinfo_pattern_t *pattern);
+        cryptoinfo_pattern_t *pattern) __INTRODUCED_IN(21);
 
 /**
  * The number of subsamples that make up the buffer's contents.
  */
-size_t AMediaCodecCryptoInfo_getNumSubSamples(AMediaCodecCryptoInfo*);
+size_t AMediaCodecCryptoInfo_getNumSubSamples(AMediaCodecCryptoInfo*) __INTRODUCED_IN(21);
 
 /**
  * A 16-byte opaque key
  */
-media_status_t AMediaCodecCryptoInfo_getKey(AMediaCodecCryptoInfo*, uint8_t *dst);
+media_status_t AMediaCodecCryptoInfo_getKey(AMediaCodecCryptoInfo*, uint8_t *dst) __INTRODUCED_IN(21);
 
 /**
  * A 16-byte initialization vector
  */
-media_status_t AMediaCodecCryptoInfo_getIV(AMediaCodecCryptoInfo*, uint8_t *dst);
+media_status_t AMediaCodecCryptoInfo_getIV(AMediaCodecCryptoInfo*, uint8_t *dst) __INTRODUCED_IN(21);
 
 /**
  * The type of encryption that has been applied,
  * one of AMEDIACODECRYPTOINFO_MODE_CLEAR or AMEDIACODECRYPTOINFO_MODE_AES_CTR.
  */
-cryptoinfo_mode_t AMediaCodecCryptoInfo_getMode(AMediaCodecCryptoInfo*);
+cryptoinfo_mode_t AMediaCodecCryptoInfo_getMode(AMediaCodecCryptoInfo*) __INTRODUCED_IN(21);
 
 /**
  * The number of leading unencrypted bytes in each subsample.
  */
-media_status_t AMediaCodecCryptoInfo_getClearBytes(AMediaCodecCryptoInfo*, size_t *dst);
+media_status_t AMediaCodecCryptoInfo_getClearBytes(AMediaCodecCryptoInfo*, size_t *dst) __INTRODUCED_IN(21);
 
 /**
  * The number of trailing encrypted bytes in each subsample.
  */
-media_status_t AMediaCodecCryptoInfo_getEncryptedBytes(AMediaCodecCryptoInfo*, size_t *dst);
+media_status_t AMediaCodecCryptoInfo_getEncryptedBytes(AMediaCodecCryptoInfo*, size_t *dst) __INTRODUCED_IN(21);
 
 #endif /* __ANDROID_API__ >= 21 */
 
diff --git a/media/ndk/include/media/NdkMediaCrypto.h b/media/ndk/include/media/NdkMediaCrypto.h
index 6f2926e..bcdf9a0 100644
--- a/media/ndk/include/media/NdkMediaCrypto.h
+++ b/media/ndk/include/media/NdkMediaCrypto.h
@@ -49,13 +49,13 @@
 
 #if __ANDROID_API__ >= 21
 
-bool AMediaCrypto_isCryptoSchemeSupported(const AMediaUUID uuid);
+bool AMediaCrypto_isCryptoSchemeSupported(const AMediaUUID uuid) __INTRODUCED_IN(21);
 
-bool AMediaCrypto_requiresSecureDecoderComponent(const char *mime);
+bool AMediaCrypto_requiresSecureDecoderComponent(const char *mime) __INTRODUCED_IN(21);
 
-AMediaCrypto* AMediaCrypto_new(const AMediaUUID uuid, const void *initData, size_t initDataSize);
+AMediaCrypto* AMediaCrypto_new(const AMediaUUID uuid, const void *initData, size_t initDataSize) __INTRODUCED_IN(21);
 
-void AMediaCrypto_delete(AMediaCrypto* crypto);
+void AMediaCrypto_delete(AMediaCrypto* crypto) __INTRODUCED_IN(21);
 
 #endif /* __ANDROID_API__ >= 21 */
 
diff --git a/media/ndk/include/media/NdkMediaDataSource.h b/media/ndk/include/media/NdkMediaDataSource.h
index 9e2e351..ea5ba0c 100644
--- a/media/ndk/include/media/NdkMediaDataSource.h
+++ b/media/ndk/include/media/NdkMediaDataSource.h
@@ -84,19 +84,19 @@
  * Create new media data source. Returns NULL if memory allocation
  * for the new data source object fails.
  */
-AMediaDataSource* AMediaDataSource_new();
+AMediaDataSource* AMediaDataSource_new() __INTRODUCED_IN(28);
 
 /**
  * Delete a previously created media data source.
  */
-void AMediaDataSource_delete(AMediaDataSource*);
+void AMediaDataSource_delete(AMediaDataSource*) __INTRODUCED_IN(28);
 
 /**
  * Set an user provided opaque handle. This opaque handle is passed as
  * the first argument to the data source callbacks.
  */
 void AMediaDataSource_setUserdata(
-        AMediaDataSource*, void *userdata);
+        AMediaDataSource*, void *userdata) __INTRODUCED_IN(28);
 
 /**
  * Set a custom callback for supplying random access media data to the
@@ -111,7 +111,7 @@
  */
 void AMediaDataSource_setReadAt(
         AMediaDataSource*,
-        AMediaDataSourceReadAt);
+        AMediaDataSourceReadAt) __INTRODUCED_IN(28);
 
 /**
  * Set a custom callback for supplying the size of the data source to the
@@ -122,7 +122,7 @@
  */
 void AMediaDataSource_setGetSize(
         AMediaDataSource*,
-        AMediaDataSourceGetSize);
+        AMediaDataSourceGetSize) __INTRODUCED_IN(28);
 
 /**
  * Set a custom callback to receive signal from the NDK media framework
@@ -133,7 +133,7 @@
  */
 void AMediaDataSource_setClose(
         AMediaDataSource*,
-        AMediaDataSourceClose);
+        AMediaDataSourceClose) __INTRODUCED_IN(28);
 
 #endif  /*__ANDROID_API__ >= 28 */
 
diff --git a/media/ndk/include/media/NdkMediaDrm.h b/media/ndk/include/media/NdkMediaDrm.h
index d45dc20..0209681 100644
--- a/media/ndk/include/media/NdkMediaDrm.h
+++ b/media/ndk/include/media/NdkMediaDrm.h
@@ -97,25 +97,27 @@
  * mimeType is the MIME type of the media container, e.g. "video/mp4".  If mimeType
  * is not known or required, it can be provided as NULL.
  */
-bool AMediaDrm_isCryptoSchemeSupported(const uint8_t *uuid, const char *mimeType);
+bool AMediaDrm_isCryptoSchemeSupported(const uint8_t *uuid,
+        const char *mimeType) __INTRODUCED_IN(21);
 
 /**
  * Create a MediaDrm instance from a UUID
  * uuid identifies the universal unique ID of the crypto scheme. uuid must be 16 bytes.
  */
-AMediaDrm* AMediaDrm_createByUUID(const uint8_t *uuid);
+AMediaDrm* AMediaDrm_createByUUID(const uint8_t *uuid) __INTRODUCED_IN(21);
 
 /**
  * Release a MediaDrm object
  */
-void AMediaDrm_release(AMediaDrm *);
+void AMediaDrm_release(AMediaDrm *) __INTRODUCED_IN(21);
 
 /**
  * Register a callback to be invoked when an event occurs
  *
  * listener is the callback that will be invoked on event
  */
-media_status_t AMediaDrm_setOnEventListener(AMediaDrm *, AMediaDrmEventListener listener);
+media_status_t AMediaDrm_setOnEventListener(AMediaDrm *,
+        AMediaDrmEventListener listener) __INTRODUCED_IN(21);
 
 /**
  * Open a new session with the MediaDrm object.  A session ID is returned.
@@ -123,13 +125,15 @@
  * returns MEDIADRM_NOT_PROVISIONED_ERROR if provisioning is needed
  * returns MEDIADRM_RESOURCE_BUSY_ERROR if required resources are in use
  */
-media_status_t AMediaDrm_openSession(AMediaDrm *, AMediaDrmSessionId *sessionId);
+media_status_t AMediaDrm_openSession(AMediaDrm *,
+        AMediaDrmSessionId *sessionId) __INTRODUCED_IN(21);
 
 /**
  * Close a session on the MediaDrm object that was previously opened
  * with AMediaDrm_openSession.
  */
-media_status_t AMediaDrm_closeSession(AMediaDrm *, const AMediaDrmSessionId *sessionId);
+media_status_t AMediaDrm_closeSession(AMediaDrm *,
+        const AMediaDrmSessionId *sessionId) __INTRODUCED_IN(21);
 
 typedef enum AMediaDrmKeyType {
     /**
@@ -208,7 +212,7 @@
 media_status_t AMediaDrm_getKeyRequest(AMediaDrm *, const AMediaDrmScope *scope,
         const uint8_t *init, size_t initSize, const char *mimeType, AMediaDrmKeyType keyType,
         const AMediaDrmKeyValue *optionalParameters, size_t numOptionalParameters,
-        const uint8_t **keyRequest, size_t *keyRequestSize);
+        const uint8_t **keyRequest, size_t *keyRequestSize) __INTRODUCED_IN(21);
 
 /**
  * A key response is received from the license server by the app, then it is
@@ -228,7 +232,8 @@
  */
 
 media_status_t AMediaDrm_provideKeyResponse(AMediaDrm *, const AMediaDrmScope *scope,
-        const uint8_t *response, size_t responseSize, AMediaDrmKeySetId *keySetId);
+        const uint8_t *response, size_t responseSize,
+        AMediaDrmKeySetId *keySetId) __INTRODUCED_IN(21);
 
 /**
  * Restore persisted offline keys into a new session.  keySetId identifies the
@@ -238,14 +243,15 @@
  * keySetId identifies the saved key set to restore
  */
 media_status_t AMediaDrm_restoreKeys(AMediaDrm *, const AMediaDrmSessionId *sessionId,
-        const AMediaDrmKeySetId *keySetId);
+        const AMediaDrmKeySetId *keySetId) __INTRODUCED_IN(21);
 
 /**
  * Remove the current keys from a session.
  *
  * keySetId identifies keys to remove
  */
-media_status_t AMediaDrm_removeKeys(AMediaDrm *, const AMediaDrmSessionId *keySetId);
+media_status_t AMediaDrm_removeKeys(AMediaDrm *,
+        const AMediaDrmSessionId *keySetId) __INTRODUCED_IN(21);
 
 /**
  * Request an informative description of the key status for the session.  The status is
@@ -261,7 +267,7 @@
  * and numPairs will be set to the number of pairs available.
  */
 media_status_t AMediaDrm_queryKeyStatus(AMediaDrm *, const AMediaDrmSessionId *sessionId,
-        AMediaDrmKeyValue *keyValuePairs, size_t *numPairs);
+        AMediaDrmKeyValue *keyValuePairs, size_t *numPairs) __INTRODUCED_IN(21);
 
 
 /**
@@ -280,7 +286,7 @@
  *       the next call to getProvisionRequest.
  */
 media_status_t AMediaDrm_getProvisionRequest(AMediaDrm *, const uint8_t **provisionRequest,
-        size_t *provisionRequestSize, const char **serverUrl);
+        size_t *provisionRequestSize, const char **serverUrl) __INTRODUCED_IN(21);
 
 
 /**
@@ -295,7 +301,7 @@
  * server rejected the request
  */
 media_status_t AMediaDrm_provideProvisionResponse(AMediaDrm *,
-        const uint8_t *response, size_t responseSize);
+        const uint8_t *response, size_t responseSize) __INTRODUCED_IN(21);
 
 
 /**
@@ -320,7 +326,7 @@
  * number required.
  */
 media_status_t AMediaDrm_getSecureStops(AMediaDrm *,
-        AMediaDrmSecureStop *secureStops, size_t *numSecureStops);
+        AMediaDrmSecureStop *secureStops, size_t *numSecureStops) __INTRODUCED_IN(21);
 
 /**
  * Process the SecureStop server response message ssRelease.  After authenticating
@@ -329,7 +335,7 @@
  * ssRelease is the server response indicating which secure stops to release
  */
 media_status_t AMediaDrm_releaseSecureStops(AMediaDrm *,
-        const AMediaDrmSecureStop *ssRelease);
+        const AMediaDrmSecureStop *ssRelease) __INTRODUCED_IN(21);
 
 /**
  * String property name: identifies the maker of the DRM engine plugin
@@ -362,7 +368,7 @@
  * will remain valid until the next call to AMediaDrm_getPropertyString.
  */
 media_status_t AMediaDrm_getPropertyString(AMediaDrm *, const char *propertyName,
-        const char **propertyValue);
+        const char **propertyValue) __INTRODUCED_IN(21);
 
 /**
  * Byte array property name: the device unique identifier is established during
@@ -377,19 +383,19 @@
  * will remain valid until the next call to AMediaDrm_getPropertyByteArray.
  */
 media_status_t AMediaDrm_getPropertyByteArray(AMediaDrm *, const char *propertyName,
-        AMediaDrmByteArray *propertyValue);
+        AMediaDrmByteArray *propertyValue) __INTRODUCED_IN(21);
 
 /**
  * Set a DRM engine plugin String property value.
  */
 media_status_t AMediaDrm_setPropertyString(AMediaDrm *, const char *propertyName,
-        const char *value);
+        const char *value) __INTRODUCED_IN(21);
 
 /**
  * Set a DRM engine plugin byte array property value.
  */
 media_status_t AMediaDrm_setPropertyByteArray(AMediaDrm *, const char *propertyName,
-        const uint8_t *value, size_t valueSize);
+        const uint8_t *value, size_t valueSize) __INTRODUCED_IN(21);
 
 /**
  * In addition to supporting decryption of DASH Common Encrypted Media, the
@@ -418,7 +424,7 @@
  */
 media_status_t AMediaDrm_encrypt(AMediaDrm *, const AMediaDrmSessionId *sessionId,
         const char *cipherAlgorithm, uint8_t *keyId, uint8_t *iv,
-        const uint8_t *input, uint8_t *output, size_t dataSize);
+        const uint8_t *input, uint8_t *output, size_t dataSize) __INTRODUCED_IN(21);
 
 /*
  * Decrypt the data referenced by input of length dataSize using algorithm specified
@@ -429,7 +435,7 @@
  */
 media_status_t AMediaDrm_decrypt(AMediaDrm *, const AMediaDrmSessionId *sessionId,
         const char *cipherAlgorithm, uint8_t *keyId, uint8_t *iv,
-        const uint8_t *input, uint8_t *output, size_t dataSize);
+        const uint8_t *input, uint8_t *output, size_t dataSize) __INTRODUCED_IN(21);
 
 /*
  * Generate a signature using the specified macAlgorithm over the message data
@@ -442,7 +448,7 @@
  */
 media_status_t AMediaDrm_sign(AMediaDrm *, const AMediaDrmSessionId *sessionId,
         const char *macAlgorithm, uint8_t *keyId, uint8_t *message, size_t messageSize,
-        uint8_t *signature, size_t *signatureSize);
+        uint8_t *signature, size_t *signatureSize) __INTRODUCED_IN(21);
 
 /*
  * Perform a signature verification using the specified macAlgorithm over the message
@@ -453,7 +459,7 @@
  */
 media_status_t AMediaDrm_verify(AMediaDrm *, const AMediaDrmSessionId *sessionId,
         const char *macAlgorithm, uint8_t *keyId, const uint8_t *message, size_t messageSize,
-        const uint8_t *signature, size_t signatureSize);
+        const uint8_t *signature, size_t signatureSize) __INTRODUCED_IN(21);
 
 #endif /* __ANDROID_API__ >= 21 */
 
diff --git a/media/ndk/include/media/NdkMediaExtractor.h b/media/ndk/include/media/NdkMediaExtractor.h
index 3452cc9..6a1796f 100644
--- a/media/ndk/include/media/NdkMediaExtractor.h
+++ b/media/ndk/include/media/NdkMediaExtractor.h
@@ -54,23 +54,24 @@
 /**
  * Create new media extractor
  */
-AMediaExtractor* AMediaExtractor_new();
+AMediaExtractor* AMediaExtractor_new() __INTRODUCED_IN(21);
 
 /**
  * Delete a previously created media extractor
  */
-media_status_t AMediaExtractor_delete(AMediaExtractor*);
+media_status_t AMediaExtractor_delete(AMediaExtractor*) __INTRODUCED_IN(21);
 
 /**
  *  Set the file descriptor from which the extractor will read.
  */
 media_status_t AMediaExtractor_setDataSourceFd(AMediaExtractor*, int fd, off64_t offset,
-        off64_t length);
+        off64_t length) __INTRODUCED_IN(21);
 
 /**
  * Set the URI from which the extractor will read.
  */
-media_status_t AMediaExtractor_setDataSource(AMediaExtractor*, const char *location);
+media_status_t AMediaExtractor_setDataSource(AMediaExtractor*,
+        const char *location) __INTRODUCED_IN(21);
         // TODO support headers
 
 #if __ANDROID_API__ >= 28
@@ -78,19 +79,20 @@
 /**
  * Set the custom data source implementation from which the extractor will read.
  */
-media_status_t AMediaExtractor_setDataSourceCustom(AMediaExtractor*, AMediaDataSource *src);
+media_status_t AMediaExtractor_setDataSourceCustom(AMediaExtractor*,
+        AMediaDataSource *src) __INTRODUCED_IN(28);
 
 #endif /* __ANDROID_API__ >= 28 */
 
 /**
  * Return the number of tracks in the previously specified media file
  */
-size_t AMediaExtractor_getTrackCount(AMediaExtractor*);
+size_t AMediaExtractor_getTrackCount(AMediaExtractor*) __INTRODUCED_IN(21);
 
 /**
  * Return the format of the specified track. The caller must free the returned format
  */
-AMediaFormat* AMediaExtractor_getTrackFormat(AMediaExtractor*, size_t idx);
+AMediaFormat* AMediaExtractor_getTrackFormat(AMediaExtractor*, size_t idx) __INTRODUCED_IN(21);
 
 /**
  * Select the specified track. Subsequent calls to readSampleData, getSampleTrackIndex and
@@ -98,41 +100,42 @@
  * Selecting the same track multiple times has no effect, the track is
  * only selected once.
  */
-media_status_t AMediaExtractor_selectTrack(AMediaExtractor*, size_t idx);
+media_status_t AMediaExtractor_selectTrack(AMediaExtractor*, size_t idx) __INTRODUCED_IN(21);
 
 /**
  * Unselect the specified track. Subsequent calls to readSampleData, getSampleTrackIndex and
  * getSampleTime only retrieve information for the subset of tracks selected..
  */
-media_status_t AMediaExtractor_unselectTrack(AMediaExtractor*, size_t idx);
+media_status_t AMediaExtractor_unselectTrack(AMediaExtractor*, size_t idx) __INTRODUCED_IN(21);
 
 /**
  * Read the current sample.
  */
-ssize_t AMediaExtractor_readSampleData(AMediaExtractor*, uint8_t *buffer, size_t capacity);
+ssize_t AMediaExtractor_readSampleData(AMediaExtractor*,
+        uint8_t *buffer, size_t capacity) __INTRODUCED_IN(21);
 
 /**
  * Read the current sample's flags.
  */
-uint32_t AMediaExtractor_getSampleFlags(AMediaExtractor*); // see definitions below
+uint32_t AMediaExtractor_getSampleFlags(AMediaExtractor*) __INTRODUCED_IN(21);
 
 /**
  * Returns the track index the current sample originates from (or -1
  * if no more samples are available)
  */
-int AMediaExtractor_getSampleTrackIndex(AMediaExtractor*);
+int AMediaExtractor_getSampleTrackIndex(AMediaExtractor*) __INTRODUCED_IN(21);
 
 /**
  * Returns the current sample's presentation time in microseconds.
  * or -1 if no more samples are available.
  */
-int64_t AMediaExtractor_getSampleTime(AMediaExtractor*);
+int64_t AMediaExtractor_getSampleTime(AMediaExtractor*) __INTRODUCED_IN(21);
 
 /**
  * Advance to the next sample. Returns false if no more sample data
  * is available (end of stream).
  */
-bool AMediaExtractor_advance(AMediaExtractor*);
+bool AMediaExtractor_advance(AMediaExtractor*) __INTRODUCED_IN(21);
 
 typedef enum {
     AMEDIAEXTRACTOR_SEEK_PREVIOUS_SYNC,
@@ -143,7 +146,8 @@
 /**
  *
  */
-media_status_t AMediaExtractor_seekTo(AMediaExtractor*, int64_t seekPosUs, SeekMode mode);
+media_status_t AMediaExtractor_seekTo(AMediaExtractor*,
+        int64_t seekPosUs, SeekMode mode) __INTRODUCED_IN(21);
 
 /**
  * mapping of crypto scheme uuid to the scheme specific data for that scheme
@@ -165,10 +169,10 @@
 /**
  * Get the PSSH info if present.
  */
-PsshInfo* AMediaExtractor_getPsshInfo(AMediaExtractor*);
+PsshInfo* AMediaExtractor_getPsshInfo(AMediaExtractor*) __INTRODUCED_IN(21);
 
 
-AMediaCodecCryptoInfo *AMediaExtractor_getSampleCryptoInfo(AMediaExtractor *);
+AMediaCodecCryptoInfo *AMediaExtractor_getSampleCryptoInfo(AMediaExtractor *) __INTRODUCED_IN(21);
 
 enum {
     AMEDIAEXTRACTOR_SAMPLE_FLAG_SYNC = 1,
@@ -184,7 +188,7 @@
  * This function will always return a format; however, the format could be empty
  * (no key-value pairs) if the media container does not provide format information.
  */
-AMediaFormat* AMediaExtractor_getFileFormat(AMediaExtractor*);
+AMediaFormat* AMediaExtractor_getFileFormat(AMediaExtractor*) __INTRODUCED_IN(28);
 
 /**
  * Returns the size of the current sample in bytes, or -1 when no samples are
@@ -196,7 +200,7 @@
  * AMediaExtractor_readSampleData(ex, buf, sampleSize);
  *
  */
-ssize_t AMediaExtractor_getSampleSize(AMediaExtractor*);
+ssize_t AMediaExtractor_getSampleSize(AMediaExtractor*) __INTRODUCED_IN(28);
 
 /**
  * Returns the duration of cached media samples downloaded from a network data source
@@ -209,7 +213,7 @@
  * cached duration cannot be calculated (bitrate, duration, and file size information
  * not available).
  */
-int64_t AMediaExtractor_getCachedDuration(AMediaExtractor *);
+int64_t AMediaExtractor_getCachedDuration(AMediaExtractor *) __INTRODUCED_IN(28);
 
 /**
  * Read the current sample's metadata format into |fmt|. Examples of sample metadata are
@@ -220,7 +224,8 @@
  * Existing key-value pairs in |fmt| would be removed if this API returns AMEDIA_OK.
  * The contents of |fmt| is undefined if this API returns AMEDIA_ERROR_*.
  */
-media_status_t AMediaExtractor_getSampleFormat(AMediaExtractor *ex, AMediaFormat *fmt);
+media_status_t AMediaExtractor_getSampleFormat(AMediaExtractor *ex,
+        AMediaFormat *fmt) __INTRODUCED_IN(28);
 
 #endif /* __ANDROID_API__ >= 28 */
 
diff --git a/media/ndk/include/media/NdkMediaFormat.h b/media/ndk/include/media/NdkMediaFormat.h
index f510dff..5f7804d 100644
--- a/media/ndk/include/media/NdkMediaFormat.h
+++ b/media/ndk/include/media/NdkMediaFormat.h
@@ -48,126 +48,126 @@
 
 #if __ANDROID_API__ >= 21
 
-AMediaFormat *AMediaFormat_new();
-media_status_t AMediaFormat_delete(AMediaFormat*);
+AMediaFormat *AMediaFormat_new() __INTRODUCED_IN(21);
+media_status_t AMediaFormat_delete(AMediaFormat*) __INTRODUCED_IN(21);
 
 /**
  * Human readable representation of the format. The returned string is owned by the format,
  * and remains valid until the next call to toString, or until the format is deleted.
  */
-const char* AMediaFormat_toString(AMediaFormat*);
+const char* AMediaFormat_toString(AMediaFormat*) __INTRODUCED_IN(21);
 
-bool AMediaFormat_getInt32(AMediaFormat*, const char *name, int32_t *out);
-bool AMediaFormat_getInt64(AMediaFormat*, const char *name, int64_t *out);
-bool AMediaFormat_getFloat(AMediaFormat*, const char *name, float *out);
-bool AMediaFormat_getSize(AMediaFormat*, const char *name, size_t *out);
+bool AMediaFormat_getInt32(AMediaFormat*, const char *name, int32_t *out) __INTRODUCED_IN(21);
+bool AMediaFormat_getInt64(AMediaFormat*, const char *name, int64_t *out) __INTRODUCED_IN(21);
+bool AMediaFormat_getFloat(AMediaFormat*, const char *name, float *out) __INTRODUCED_IN(21);
+bool AMediaFormat_getSize(AMediaFormat*, const char *name, size_t *out) __INTRODUCED_IN(21);
 /**
  * The returned data is owned by the format and remains valid as long as the named entry
  * is part of the format.
  */
-bool AMediaFormat_getBuffer(AMediaFormat*, const char *name, void** data, size_t *size);
+bool AMediaFormat_getBuffer(AMediaFormat*, const char *name, void** data, size_t *size) __INTRODUCED_IN(21);
 /**
  * The returned string is owned by the format, and remains valid until the next call to getString,
  * or until the format is deleted.
  */
-bool AMediaFormat_getString(AMediaFormat*, const char *name, const char **out);
+bool AMediaFormat_getString(AMediaFormat*, const char *name, const char **out) __INTRODUCED_IN(21);
 
 
-void AMediaFormat_setInt32(AMediaFormat*, const char* name, int32_t value);
-void AMediaFormat_setInt64(AMediaFormat*, const char* name, int64_t value);
-void AMediaFormat_setFloat(AMediaFormat*, const char* name, float value);
+void AMediaFormat_setInt32(AMediaFormat*, const char* name, int32_t value) __INTRODUCED_IN(21);
+void AMediaFormat_setInt64(AMediaFormat*, const char* name, int64_t value) __INTRODUCED_IN(21);
+void AMediaFormat_setFloat(AMediaFormat*, const char* name, float value) __INTRODUCED_IN(21);
 /**
  * The provided string is copied into the format.
  */
-void AMediaFormat_setString(AMediaFormat*, const char* name, const char* value);
+void AMediaFormat_setString(AMediaFormat*, const char* name, const char* value) __INTRODUCED_IN(21);
 /**
  * The provided data is copied into the format.
  */
-void AMediaFormat_setBuffer(AMediaFormat*, const char* name, void* data, size_t size);
+void AMediaFormat_setBuffer(AMediaFormat*, const char* name, void* data, size_t size) __INTRODUCED_IN(21);
 
 
 
 /**
  * XXX should these be ints/enums that we look up in a table as needed?
  */
-extern const char* AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR;
-extern const char* AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR;
-extern const char* AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION;
-extern const char* AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL;
-extern const char* AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL;
-extern const char* AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT;
-extern const char* AMEDIAFORMAT_KEY_AAC_PROFILE;
-extern const char* AMEDIAFORMAT_KEY_AAC_SBR_MODE;
-extern const char* AMEDIAFORMAT_KEY_AUDIO_SESSION_ID;
-extern const char* AMEDIAFORMAT_KEY_BITRATE_MODE;
-extern const char* AMEDIAFORMAT_KEY_BIT_RATE;
-extern const char* AMEDIAFORMAT_KEY_CAPTURE_RATE;
-extern const char* AMEDIAFORMAT_KEY_CHANNEL_COUNT;
-extern const char* AMEDIAFORMAT_KEY_CHANNEL_MASK;
-extern const char* AMEDIAFORMAT_KEY_COLOR_FORMAT;
-extern const char* AMEDIAFORMAT_KEY_COLOR_RANGE;
-extern const char* AMEDIAFORMAT_KEY_COLOR_STANDARD;
-extern const char* AMEDIAFORMAT_KEY_COLOR_TRANSFER;
-extern const char* AMEDIAFORMAT_KEY_COMPLEXITY;
-extern const char* AMEDIAFORMAT_KEY_CSD;
-extern const char* AMEDIAFORMAT_KEY_CSD_0;
-extern const char* AMEDIAFORMAT_KEY_CSD_1;
-extern const char* AMEDIAFORMAT_KEY_CSD_2;
-extern const char* AMEDIAFORMAT_KEY_DISPLAY_CROP;
-extern const char* AMEDIAFORMAT_KEY_DISPLAY_HEIGHT;
-extern const char* AMEDIAFORMAT_KEY_DISPLAY_WIDTH;
-extern const char* AMEDIAFORMAT_KEY_DURATION;
-extern const char* AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL;
-extern const char* AMEDIAFORMAT_KEY_FRAME_RATE;
-extern const char* AMEDIAFORMAT_KEY_GRID_COLUMNS;
-extern const char* AMEDIAFORMAT_KEY_GRID_ROWS;
-extern const char* AMEDIAFORMAT_KEY_HDR_STATIC_INFO;
-extern const char* AMEDIAFORMAT_KEY_HEIGHT;
-extern const char* AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD;
-extern const char* AMEDIAFORMAT_KEY_IS_ADTS;
-extern const char* AMEDIAFORMAT_KEY_IS_AUTOSELECT;
-extern const char* AMEDIAFORMAT_KEY_IS_DEFAULT;
-extern const char* AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE;
-extern const char* AMEDIAFORMAT_KEY_I_FRAME_INTERVAL;
-extern const char* AMEDIAFORMAT_KEY_LANGUAGE;
-extern const char* AMEDIAFORMAT_KEY_LATENCY;
-extern const char* AMEDIAFORMAT_KEY_LEVEL;
-extern const char* AMEDIAFORMAT_KEY_MAX_HEIGHT;
-extern const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE;
-extern const char* AMEDIAFORMAT_KEY_MAX_WIDTH;
-extern const char* AMEDIAFORMAT_KEY_MIME;
-extern const char* AMEDIAFORMAT_KEY_MPEG_USER_DATA;
-extern const char* AMEDIAFORMAT_KEY_OPERATING_RATE;
-extern const char* AMEDIAFORMAT_KEY_PCM_ENCODING;
-extern const char* AMEDIAFORMAT_KEY_PRIORITY;
-extern const char* AMEDIAFORMAT_KEY_PROFILE;
-extern const char* AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP;
-extern const char* AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER;
-extern const char* AMEDIAFORMAT_KEY_ROTATION;
-extern const char* AMEDIAFORMAT_KEY_SAMPLE_RATE;
-extern const char* AMEDIAFORMAT_KEY_SEI;
-extern const char* AMEDIAFORMAT_KEY_SLICE_HEIGHT;
-extern const char* AMEDIAFORMAT_KEY_STRIDE;
-extern const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID;
-extern const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYERING;
-extern const char* AMEDIAFORMAT_KEY_TILE_HEIGHT;
-extern const char* AMEDIAFORMAT_KEY_TILE_WIDTH;
-extern const char* AMEDIAFORMAT_KEY_TIME_US;
-extern const char* AMEDIAFORMAT_KEY_TRACK_ID;
-extern const char* AMEDIAFORMAT_KEY_TRACK_INDEX;
-extern const char* AMEDIAFORMAT_KEY_WIDTH;
+extern const char* AMEDIAFORMAT_KEY_AAC_DRC_ATTENUATION_FACTOR __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_AAC_DRC_BOOST_FACTOR __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_AAC_DRC_HEAVY_COMPRESSION __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_AAC_PROFILE __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_AAC_SBR_MODE __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_AUDIO_SESSION_ID __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_BITRATE_MODE __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_BIT_RATE __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_CAPTURE_RATE __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_CHANNEL_COUNT __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_CHANNEL_MASK __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_COLOR_FORMAT __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_COLOR_RANGE __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_COLOR_STANDARD __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_COLOR_TRANSFER __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_COMPLEXITY __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_CSD __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_CSD_0 __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_CSD_1 __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_CSD_2 __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_DISPLAY_CROP __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_DISPLAY_HEIGHT __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_DISPLAY_WIDTH __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_DURATION __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_FRAME_RATE __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_GRID_COLUMNS __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_GRID_ROWS __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_HDR_STATIC_INFO __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_HEIGHT __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_IS_ADTS __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_IS_AUTOSELECT __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_IS_DEFAULT __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_I_FRAME_INTERVAL __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_LANGUAGE __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_LATENCY __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_LEVEL __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_MAX_HEIGHT __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_MAX_INPUT_SIZE __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_MAX_WIDTH __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_MIME __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_MPEG_USER_DATA __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_OPERATING_RATE __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_PCM_ENCODING __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_PRIORITY __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_PROFILE __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_ROTATION __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_SAMPLE_RATE __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_SEI __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_SLICE_HEIGHT __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_STRIDE __INTRODUCED_IN(21);
+extern const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_TEMPORAL_LAYERING __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_TILE_HEIGHT __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_TILE_WIDTH __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_TIME_US __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_TRACK_ID __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_TRACK_INDEX __INTRODUCED_IN(28);
+extern const char* AMEDIAFORMAT_KEY_WIDTH __INTRODUCED_IN(21);
 
 #endif /* __ANDROID_API__ >= 21 */
 
 #if __ANDROID_API__ >= 28
-bool AMediaFormat_getDouble(AMediaFormat*, const char *name, double *out);
+bool AMediaFormat_getDouble(AMediaFormat*, const char *name, double *out) __INTRODUCED_IN(28);
 bool AMediaFormat_getRect(AMediaFormat*, const char *name,
-                          int32_t *left, int32_t *top, int32_t *right, int32_t *bottom);
+        int32_t *left, int32_t *top, int32_t *right, int32_t *bottom) __INTRODUCED_IN(28);
 
-void AMediaFormat_setDouble(AMediaFormat*, const char* name, double value);
-void AMediaFormat_setSize(AMediaFormat*, const char* name, size_t value);
+void AMediaFormat_setDouble(AMediaFormat*, const char* name, double value) __INTRODUCED_IN(28);
+void AMediaFormat_setSize(AMediaFormat*, const char* name, size_t value) __INTRODUCED_IN(28);
 void AMediaFormat_setRect(AMediaFormat*, const char* name,
-                          int32_t left, int32_t top, int32_t right, int32_t bottom);
+        int32_t left, int32_t top, int32_t right, int32_t bottom) __INTRODUCED_IN(28);
 #endif /* __ANDROID_API__ >= 28 */
 
 __END_DECLS
diff --git a/media/ndk/include/media/NdkMediaMuxer.h b/media/ndk/include/media/NdkMediaMuxer.h
index dc9e0ac..7393867 100644
--- a/media/ndk/include/media/NdkMediaMuxer.h
+++ b/media/ndk/include/media/NdkMediaMuxer.h
@@ -58,12 +58,12 @@
 /**
  * Create new media muxer
  */
-AMediaMuxer* AMediaMuxer_new(int fd, OutputFormat format);
+AMediaMuxer* AMediaMuxer_new(int fd, OutputFormat format) __INTRODUCED_IN(21);
 
 /**
  * Delete a previously created media muxer
  */
-media_status_t AMediaMuxer_delete(AMediaMuxer*);
+media_status_t AMediaMuxer_delete(AMediaMuxer*) __INTRODUCED_IN(21);
 
 /**
  * Set and store the geodata (latitude and longitude) in the output file.
@@ -76,7 +76,8 @@
  * Latitude must be in the range [-90, 90].
  * Longitude must be in the range [-180, 180].
  */
-media_status_t AMediaMuxer_setLocation(AMediaMuxer*, float latitude, float longitude);
+media_status_t AMediaMuxer_setLocation(AMediaMuxer*,
+        float latitude, float longitude) __INTRODUCED_IN(21);
 
 /**
  * Sets the orientation hint for output video playback.
@@ -90,26 +91,26 @@
  * The angle is specified in degrees, clockwise.
  * The supported angles are 0, 90, 180, and 270 degrees.
  */
-media_status_t AMediaMuxer_setOrientationHint(AMediaMuxer*, int degrees);
+media_status_t AMediaMuxer_setOrientationHint(AMediaMuxer*, int degrees) __INTRODUCED_IN(21);
 
 /**
  * Adds a track with the specified format.
  * Returns the index of the new track or a negative value in case of failure,
  * which can be interpreted as a media_status_t.
  */
-ssize_t AMediaMuxer_addTrack(AMediaMuxer*, const AMediaFormat* format);
+ssize_t AMediaMuxer_addTrack(AMediaMuxer*, const AMediaFormat* format) __INTRODUCED_IN(21);
 
 /**
  * Start the muxer. Should be called after AMediaMuxer_addTrack and
  * before AMediaMuxer_writeSampleData.
  */
-media_status_t AMediaMuxer_start(AMediaMuxer*);
+media_status_t AMediaMuxer_start(AMediaMuxer*) __INTRODUCED_IN(21);
 
 /**
  * Stops the muxer.
  * Once the muxer stops, it can not be restarted.
  */
-media_status_t AMediaMuxer_stop(AMediaMuxer*);
+media_status_t AMediaMuxer_stop(AMediaMuxer*) __INTRODUCED_IN(21);
 
 /**
  * Writes an encoded sample into the muxer.
@@ -119,7 +120,8 @@
  * by the encoder.)
  */
 media_status_t AMediaMuxer_writeSampleData(AMediaMuxer *muxer,
-        size_t trackIdx, const uint8_t *data, const AMediaCodecBufferInfo *info);
+        size_t trackIdx, const uint8_t *data,
+        const AMediaCodecBufferInfo *info) __INTRODUCED_IN(21);
 
 #endif /* __ANDROID_API__ >= 21 */
 
diff --git a/media/ndk/libmediandk.map.txt b/media/ndk/libmediandk.map.txt
index fb56694..d828d6a 100644
--- a/media/ndk/libmediandk.map.txt
+++ b/media/ndk/libmediandk.map.txt
@@ -32,55 +32,66 @@
     AMEDIAFORMAT_KEY_AAC_DRC_TARGET_REFERENCE_LEVEL; # var introduced=28
     AMEDIAFORMAT_KEY_AAC_ENCODED_TARGET_LEVEL; # var introduced=28
     AMEDIAFORMAT_KEY_AAC_MAX_OUTPUT_CHANNEL_COUNT; # var introduced=28
-    AMEDIAFORMAT_KEY_AAC_PROFILE; # var
+    AMEDIAFORMAT_KEY_AAC_PROFILE; # var introduced=21
     AMEDIAFORMAT_KEY_AAC_SBR_MODE; # var introduced=28
     AMEDIAFORMAT_KEY_AUDIO_SESSION_ID; # var introduced=28
     AMEDIAFORMAT_KEY_BITRATE_MODE; # var introduced=28
-    AMEDIAFORMAT_KEY_BIT_RATE; # var
+    AMEDIAFORMAT_KEY_BIT_RATE; # var introduced=21
     AMEDIAFORMAT_KEY_CAPTURE_RATE; # var introduced=28
-    AMEDIAFORMAT_KEY_CHANNEL_COUNT; # var
-    AMEDIAFORMAT_KEY_CHANNEL_MASK; # var
-    AMEDIAFORMAT_KEY_COLOR_FORMAT; # var
+    AMEDIAFORMAT_KEY_CHANNEL_COUNT; # var introduced=21
+    AMEDIAFORMAT_KEY_CHANNEL_MASK; # var introduced=21
+    AMEDIAFORMAT_KEY_COLOR_FORMAT; # var introduced=21
     AMEDIAFORMAT_KEY_COLOR_RANGE; # var introduced=28
     AMEDIAFORMAT_KEY_COLOR_STANDARD; # var introduced=28
     AMEDIAFORMAT_KEY_COLOR_TRANSFER; # var introduced=28
     AMEDIAFORMAT_KEY_COMPLEXITY; # var introduced=28
+    AMEDIAFORMAT_KEY_CSD; # var introduced=28
+    AMEDIAFORMAT_KEY_CSD_0; # var introduced=28
+    AMEDIAFORMAT_KEY_CSD_1; # var introduced=28
+    AMEDIAFORMAT_KEY_CSD_2; # var introduced=28
     AMEDIAFORMAT_KEY_DISPLAY_CROP; # var introduced=28
-    AMEDIAFORMAT_KEY_DURATION; # var
-    AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL; # var
-    AMEDIAFORMAT_KEY_FRAME_RATE; # var
+    AMEDIAFORMAT_KEY_DISPLAY_HEIGHT; # var introduced=28
+    AMEDIAFORMAT_KEY_DISPLAY_WIDTH; # var introduced=28
+    AMEDIAFORMAT_KEY_DURATION; # var introduced=21
+    AMEDIAFORMAT_KEY_FLAC_COMPRESSION_LEVEL; # var introduced=21
+    AMEDIAFORMAT_KEY_FRAME_RATE; # var introduced=21
     AMEDIAFORMAT_KEY_GRID_COLUMNS; # var introduced=28
     AMEDIAFORMAT_KEY_GRID_ROWS; # var introduced=28
     AMEDIAFORMAT_KEY_HDR_STATIC_INFO; # var introduced=28
-    AMEDIAFORMAT_KEY_HEIGHT; # var
+    AMEDIAFORMAT_KEY_HEIGHT; # var introduced=21
     AMEDIAFORMAT_KEY_INTRA_REFRESH_PERIOD; # var introduced=28
-    AMEDIAFORMAT_KEY_IS_ADTS; # var
-    AMEDIAFORMAT_KEY_IS_AUTOSELECT; # var
-    AMEDIAFORMAT_KEY_IS_DEFAULT; # var
-    AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE; # var
-    AMEDIAFORMAT_KEY_I_FRAME_INTERVAL; # var
-    AMEDIAFORMAT_KEY_LANGUAGE; # var
+    AMEDIAFORMAT_KEY_IS_ADTS; # var introduced=21
+    AMEDIAFORMAT_KEY_IS_AUTOSELECT; # var introduced=21
+    AMEDIAFORMAT_KEY_IS_DEFAULT; # var introduced=21
+    AMEDIAFORMAT_KEY_IS_FORCED_SUBTITLE; # var introduced=21
+    AMEDIAFORMAT_KEY_I_FRAME_INTERVAL; # var introduced=21
+    AMEDIAFORMAT_KEY_LANGUAGE; # var introduced=21
     AMEDIAFORMAT_KEY_LATENCY; # var introduced=28
     AMEDIAFORMAT_KEY_LEVEL; # var introduced=28
-    AMEDIAFORMAT_KEY_MAX_HEIGHT; # var
-    AMEDIAFORMAT_KEY_MAX_INPUT_SIZE; # var
-    AMEDIAFORMAT_KEY_MAX_WIDTH; # var
-    AMEDIAFORMAT_KEY_MIME; # var
+    AMEDIAFORMAT_KEY_MAX_HEIGHT; # var introduced=21
+    AMEDIAFORMAT_KEY_MAX_INPUT_SIZE; # var introduced=21
+    AMEDIAFORMAT_KEY_MAX_WIDTH; # var introduced=21
+    AMEDIAFORMAT_KEY_MIME; # var introduced=21
+    AMEDIAFORMAT_KEY_MPEG_USER_DATA; # var introduced=28
     AMEDIAFORMAT_KEY_OPERATING_RATE; # var introduced=28
     AMEDIAFORMAT_KEY_PCM_ENCODING; # var introduced=28
     AMEDIAFORMAT_KEY_PRIORITY; # var introduced=28
     AMEDIAFORMAT_KEY_PROFILE; # var introduced=28
-    AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP; # var
-    AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER; # var
+    AMEDIAFORMAT_KEY_PUSH_BLANK_BUFFERS_ON_STOP; # var introduced=21
+    AMEDIAFORMAT_KEY_REPEAT_PREVIOUS_FRAME_AFTER; # var introduced=21
     AMEDIAFORMAT_KEY_ROTATION; # var introduced=28
-    AMEDIAFORMAT_KEY_SAMPLE_RATE; # var
+    AMEDIAFORMAT_KEY_SAMPLE_RATE; # var introduced=21
+    AMEDIAFORMAT_KEY_SEI; # var introduced=28
     AMEDIAFORMAT_KEY_SLICE_HEIGHT; # var introduced=28
-    AMEDIAFORMAT_KEY_STRIDE; # var
+    AMEDIAFORMAT_KEY_STRIDE; # var introduced=21
+    AMEDIAFORMAT_KEY_TEMPORAL_LAYER_ID; # var introduced=28
     AMEDIAFORMAT_KEY_TEMPORAL_LAYERING; # var introduced=28
     AMEDIAFORMAT_KEY_TILE_HEIGHT; # var introduced=28
     AMEDIAFORMAT_KEY_TILE_WIDTH; # var introduced=28
+    AMEDIAFORMAT_KEY_TIME_US; # var introduced=28
+    AMEDIAFORMAT_KEY_TRACK_INDEX; # var introduced=28
     AMEDIAFORMAT_KEY_TRACK_ID; # var introduced=28
-    AMEDIAFORMAT_KEY_WIDTH; # var
+    AMEDIAFORMAT_KEY_WIDTH; # var introduced=21
     AMediaCodecActionCode_isRecoverable; # introduced=28
     AMediaCodecActionCode_isTransient; # introduced=28
     AMediaCodecCryptoInfo_delete;
diff --git a/services/audioflinger/Effects.cpp b/services/audioflinger/Effects.cpp
index 20c9d37..b3f5efd 100644
--- a/services/audioflinger/Effects.cpp
+++ b/services/audioflinger/Effects.cpp
@@ -248,7 +248,7 @@
     switch (mState) {
     case RESTART:
         reset_l();
-        // FALL THROUGH
+        FALLTHROUGH_INTENDED;
 
     case STARTING:
         // clear auxiliary effect input buffer for next accumulation
@@ -1157,7 +1157,8 @@
 {
     sp<ThreadBase> thread = mThread.promote();
     if (thread != 0 &&
-        (thread->type() == ThreadBase::OFFLOAD || thread->type() == ThreadBase::DIRECT)) {
+        (thread->type() == ThreadBase::OFFLOAD || thread->type() == ThreadBase::DIRECT) &&
+        !isNonOffloadableEnabled_l()) {
         PlaybackThread *t = (PlaybackThread *)thread.get();
         float vol_l = (float)left / (1 << 24);
         float vol_r = (float)right / (1 << 24);
@@ -2552,6 +2553,11 @@
 bool AudioFlinger::EffectChain::isNonOffloadableEnabled()
 {
     Mutex::Autolock _l(mLock);
+    return isNonOffloadableEnabled_l();
+}
+
+bool AudioFlinger::EffectChain::isNonOffloadableEnabled_l()
+{
     size_t size = mEffects.size();
     for (size_t i = 0; i < size; i++) {
         if (mEffects[i]->isEnabled() && !mEffects[i]->isOffloadable()) {
diff --git a/services/audioflinger/Effects.h b/services/audioflinger/Effects.h
index e04ee8e..15a26ea 100644
--- a/services/audioflinger/Effects.h
+++ b/services/audioflinger/Effects.h
@@ -378,6 +378,7 @@
 
     // At least one non offloadable effect in the chain is enabled
     bool isNonOffloadableEnabled();
+    bool isNonOffloadableEnabled_l();
 
     void syncHalEffectsState();
 
diff --git a/services/audioflinger/Threads.cpp b/services/audioflinger/Threads.cpp
index 4ca50d7..c2320bc 100644
--- a/services/audioflinger/Threads.cpp
+++ b/services/audioflinger/Threads.cpp
@@ -4357,7 +4357,7 @@
                     isActive = false;
                     break;
                 }
-                // fall through
+                FALLTHROUGH_INTENDED;
             case TrackBase::STOPPING_2:
             case TrackBase::PAUSED:
             case TrackBase::STOPPED:
diff --git a/services/audioflinger/Tracks.cpp b/services/audioflinger/Tracks.cpp
index a7c4253..6d25fb9 100644
--- a/services/audioflinger/Tracks.cpp
+++ b/services/audioflinger/Tracks.cpp
@@ -842,7 +842,7 @@
 
             // Offloaded track was draining, we need to carry on draining when resumed
             mResumeToStopping = true;
-            // fall through...
+            FALLTHROUGH_INTENDED;
         case ACTIVE:
         case RESUMING:
             mState = PAUSING;
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
index 92332fb..1e0640c 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioInputDescriptor.cpp
@@ -313,14 +313,13 @@
 
 sp<AudioInputDescriptor> AudioInputCollection::getInputFromId(audio_port_handle_t id) const
 {
-    sp<AudioInputDescriptor> inputDesc = NULL;
     for (size_t i = 0; i < size(); i++) {
-        inputDesc = valueAt(i);
-        if (inputDesc->getId() == id) {
-            break;
+        const sp<AudioInputDescriptor> inputDescriptor = valueAt(i);
+        if (inputDescriptor->getId() == id) {
+            return inputDescriptor;
         }
     }
-    return inputDesc;
+    return NULL;
 }
 
 uint32_t AudioInputCollection::activeInputsCountOnDevices(audio_devices_t devices) const
diff --git a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
index 294a2a6..99b45c8 100644
--- a/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
+++ b/services/audiopolicy/common/managerdefinitions/src/AudioOutputDescriptor.cpp
@@ -681,14 +681,13 @@
 
 sp<SwAudioOutputDescriptor> SwAudioOutputCollection::getOutputFromId(audio_port_handle_t id) const
 {
-    sp<SwAudioOutputDescriptor> outputDesc = NULL;
     for (size_t i = 0; i < size(); i++) {
-        outputDesc = valueAt(i);
+        const sp<SwAudioOutputDescriptor> outputDesc = valueAt(i);
         if (outputDesc->getId() == id) {
-            break;
+            return outputDesc;
         }
     }
-    return outputDesc;
+    return NULL;
 }
 
 bool SwAudioOutputCollection::isAnyOutputActive(audio_stream_type_t streamToIgnore) const
diff --git a/services/audiopolicy/enginedefault/Android.mk b/services/audiopolicy/enginedefault/Android.mk
index cbbe306..837d5bb 100644
--- a/services/audiopolicy/enginedefault/Android.mk
+++ b/services/audiopolicy/enginedefault/Android.mk
@@ -34,6 +34,8 @@
 LOCAL_MODULE := libaudiopolicyenginedefault
 LOCAL_MODULE_TAGS := optional
 
+LOCAL_HEADER_LIBRARIES := libbase_headers
+
 LOCAL_STATIC_LIBRARIES := \
     libaudiopolicycomponents \
 
diff --git a/services/audiopolicy/enginedefault/src/Engine.cpp b/services/audiopolicy/enginedefault/src/Engine.cpp
index 267996c..3476419 100644
--- a/services/audiopolicy/enginedefault/src/Engine.cpp
+++ b/services/audiopolicy/enginedefault/src/Engine.cpp
@@ -25,6 +25,7 @@
 #endif
 
 #include "Engine.h"
+#include <android-base/macros.h>
 #include <AudioPolicyManagerObserver.h>
 #include <AudioPort.h>
 #include <IOProfile.h>
@@ -302,7 +303,7 @@
             break;
         }
         // when in call, DTMF and PHONE strategies follow the same rules
-        // FALL THROUGH
+        FALLTHROUGH_INTENDED;
 
     case STRATEGY_PHONE:
         // Force use of only devices on primary output if:
@@ -343,7 +344,7 @@
             device = availableOutputDevicesType & AUDIO_DEVICE_OUT_BLUETOOTH_SCO;
             if (device) break;
             // if SCO device is requested but no SCO device is available, fall back to default case
-            // FALL THROUGH
+            FALLTHROUGH_INTENDED;
 
         default:    // FORCE_NONE
             device = availableOutputDevicesType & AUDIO_DEVICE_OUT_HEARING_AID;
@@ -416,7 +417,7 @@
                     outputDeviceTypesToIgnore);
             break;
         }
-        // FALL THROUGH
+        FALLTHROUGH_INTENDED;
 
     case STRATEGY_ENFORCED_AUDIBLE:
         // strategy STRATEGY_ENFORCED_AUDIBLE uses same routing policy as STRATEGY_SONIFICATION
@@ -466,7 +467,7 @@
             }
         }
         // The second device used for sonification is the same as the device used by media strategy
-        // FALL THROUGH
+        FALLTHROUGH_INTENDED;
 
     case STRATEGY_ACCESSIBILITY:
         if (strategy == STRATEGY_ACCESSIBILITY) {
@@ -496,7 +497,7 @@
             }
         }
         // For other cases, STRATEGY_ACCESSIBILITY behaves like STRATEGY_MEDIA
-        // FALL THROUGH
+        FALLTHROUGH_INTENDED;
 
     // FIXME: STRATEGY_REROUTING follow STRATEGY_MEDIA for now
     case STRATEGY_REROUTING:
@@ -681,7 +682,7 @@
                 device = AUDIO_DEVICE_IN_BLUETOOTH_SCO_HEADSET;
                 break;
             }
-            // FALL THROUGH
+            FALLTHROUGH_INTENDED;
 
         default:    // FORCE_NONE
             if (availableDeviceTypes & AUDIO_DEVICE_IN_WIRED_HEADSET) {
diff --git a/services/audiopolicy/service/AudioPolicyEffects.cpp b/services/audiopolicy/service/AudioPolicyEffects.cpp
index c7dfe0f..4a1cab4 100644
--- a/services/audiopolicy/service/AudioPolicyEffects.cpp
+++ b/services/audiopolicy/service/AudioPolicyEffects.cpp
@@ -460,12 +460,13 @@
         len = strnlen(node->value, EFFECT_STRING_LEN_MAX);
         if (*curSize + len + 1 > *totSize) {
             *totSize = *curSize + len + 1;
-            *param = (char *)realloc(*param, *totSize);
-            if (*param == NULL) {
+            char *newParam = (char *)realloc(*param, *totSize);
+            if (newParam == NULL) {
                 len = 0;
                 ALOGE("%s realloc error for string len %zu", __func__, *totSize);
                 goto exit;
             }
+            *param = newParam;
         }
         strncpy(*param + *curSize, node->value, len);
         *curSize += len;
diff --git a/services/camera/libcameraservice/CameraFlashlight.cpp b/services/camera/libcameraservice/CameraFlashlight.cpp
index 471c77d..e629cdd 100644
--- a/services/camera/libcameraservice/CameraFlashlight.cpp
+++ b/services/camera/libcameraservice/CameraFlashlight.cpp
@@ -125,7 +125,7 @@
     status_t res;
 
     std::vector<String8> cameraIds;
-    std::vector<std::string> ids = mProviderManager->getAPI1CompatibleCameraDeviceIds();
+    std::vector<std::string> ids = mProviderManager->getCameraDeviceIds();
     int numberOfCameras = static_cast<int>(ids.size());
     cameraIds.resize(numberOfCameras);
     // No module, must be provider
@@ -217,7 +217,7 @@
 
         if (mOpenedCameraIds.size() == 0) {
             // notify torch unavailable for all cameras with a flash
-            std::vector<std::string> ids = mProviderManager->getAPI1CompatibleCameraDeviceIds();
+            std::vector<std::string> ids = mProviderManager->getCameraDeviceIds();
             int numCameras = static_cast<int>(ids.size());
             for (int i = 0; i < numCameras; i++) {
                 String8 id8(ids[i].c_str());
@@ -263,7 +263,7 @@
 
     if (isBackwardCompatibleMode(cameraId)) {
         // notify torch available for all cameras with a flash
-        std::vector<std::string> ids = mProviderManager->getAPI1CompatibleCameraDeviceIds();
+        std::vector<std::string> ids = mProviderManager->getCameraDeviceIds();
         int numCameras = static_cast<int>(ids.size());
         for (int i = 0; i < numCameras; i++) {
             String8 id8(ids[i].c_str());
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 0db5642..c85d00f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -2261,8 +2261,8 @@
                 return NULL;
             }
         }
-        // Check if stream is being prepared
-        if (mInputStream->isPreparing()) {
+        // Check if stream prepare is blocking requests.
+        if (mInputStream->isBlockedByPrepare()) {
             CLOGE("Request references an input stream that's being prepared!");
             return NULL;
         }
@@ -2312,8 +2312,8 @@
                 return NULL;
             }
         }
-        // Check if stream is being prepared
-        if (stream->isPreparing()) {
+        // Check if stream prepare is blocking requests.
+        if (stream->isBlockedByPrepare()) {
             CLOGE("Request references an output stream that's being prepared!");
             return NULL;
         }
@@ -4854,6 +4854,15 @@
                 captureRequest->mOutputStreams.size());
         halRequest->output_buffers = outputBuffers->array();
         std::set<String8> requestedPhysicalCameras;
+
+        sp<Camera3Device> parent = mParent.promote();
+        if (parent == NULL) {
+            // Should not happen, and nowhere to send errors to, so just log it
+            CLOGE("RequestThread: Parent is gone");
+            return INVALID_OPERATION;
+        }
+        nsecs_t waitDuration = kBaseGetBufferWait + parent->getExpectedInFlightDuration();
+
         for (size_t j = 0; j < captureRequest->mOutputStreams.size(); j++) {
             sp<Camera3OutputStreamInterface> outputStream = captureRequest->mOutputStreams.editItemAt(j);
 
@@ -4862,7 +4871,8 @@
                 // Only try to prepare video stream on the first video request.
                 mPrepareVideoStream = false;
 
-                res = outputStream->startPrepare(Camera3StreamInterface::ALLOCATE_PIPELINE_MAX);
+                res = outputStream->startPrepare(Camera3StreamInterface::ALLOCATE_PIPELINE_MAX,
+                        false /*blockRequest*/);
                 while (res == NOT_ENOUGH_DATA) {
                     res = outputStream->prepareNextBuffer();
                 }
@@ -4874,6 +4884,7 @@
             }
 
             res = outputStream->getBuffer(&outputBuffers->editItemAt(j),
+                    waitDuration,
                     captureRequest->mOutputSurfaces[j]);
             if (res != OK) {
                 // Can't get output buffer from gralloc queue - this could be due to
@@ -4900,13 +4911,6 @@
         totalNumBuffers += halRequest->num_output_buffers;
 
         // Log request in the in-flight queue
-        sp<Camera3Device> parent = mParent.promote();
-        if (parent == NULL) {
-            // Should not happen, and nowhere to send errors to, so just log it
-            CLOGE("RequestThread: Parent is gone");
-            return INVALID_OPERATION;
-        }
-
         // If this request list is for constrained high speed recording (not
         // preview), and the current request is not the last one in the batch,
         // do not send callback to the app.
@@ -5533,7 +5537,7 @@
     Mutex::Autolock l(mLock);
     sp<NotificationListener> listener = mListener.promote();
 
-    res = stream->startPrepare(maxCount);
+    res = stream->startPrepare(maxCount, true /*blockRequest*/);
     if (res == OK) {
         // No preparation needed, fire listener right off
         ALOGV("%s: Stream %d already prepared", __FUNCTION__, stream->getId());
@@ -5621,7 +5625,7 @@
 
     auto it = mPendingStreams.begin();
     for (; it != mPendingStreams.end();) {
-        res = it->second->startPrepare(it->first);
+        res = it->second->startPrepare(it->first, true /*blockRequest*/);
         if (res == OK) {
             if (listener != NULL) {
                 listener->notifyPrepared(it->second->getId());
diff --git a/services/camera/libcameraservice/device3/Camera3Device.h b/services/camera/libcameraservice/device3/Camera3Device.h
index ef3cbc4..131d62f 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.h
+++ b/services/camera/libcameraservice/device3/Camera3Device.h
@@ -221,6 +221,7 @@
     static const size_t        kInFlightWarnLimitHighSpeed = 256; // batch size 32 * pipe depth 8
     static const nsecs_t       kDefaultExpectedDuration = 100000000; // 100 ms
     static const nsecs_t       kMinInflightDuration = 5000000000; // 5 s
+    static const nsecs_t       kBaseGetBufferWait = 3000000000; // 3 sec.
     // SCHED_FIFO priority for request submission thread in HFR mode
     static const int           kRequestThreadPriority = 1;
 
diff --git a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
index 2bb9ff7..fb3ce4c 100644
--- a/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3SharedOutputStream.cpp
@@ -60,9 +60,8 @@
         }
     }
 
-    android::PixelFormat format = isFormatOverridden() ? getOriginalFormat() : getFormat();
     res = mStreamSplitter->connect(initialSurfaces, usage, mUsage, camera3_stream::max_buffers,
-            getWidth(), getHeight(), format, &mConsumer);
+            getWidth(), getHeight(), getFormat(), &mConsumer);
     if (res != OK) {
         ALOGE("%s: Failed to connect to stream splitter: %s(%d)",
                 __FUNCTION__, strerror(-res), res);
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index 1105b75..6870350 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -61,6 +61,7 @@
     mOldUsage(0),
     mOldMaxBuffers(0),
     mPrepared(false),
+    mPrepareBlockRequest(true),
     mPreparedBufferIdx(0),
     mLastMaxCount(Camera3StreamInterface::ALLOCATE_PIPELINE_MAX),
     mBufferLimitLatency(kBufferLimitLatencyBinSize),
@@ -327,6 +328,7 @@
     // Reset prepared state, since buffer config has changed, and existing
     // allocations are no longer valid
     mPrepared = false;
+    mPrepareBlockRequest = true;
     mStreamUnpreparable = false;
 
     status_t res;
@@ -381,7 +383,7 @@
     return mStreamUnpreparable;
 }
 
-status_t Camera3Stream::startPrepare(int maxCount) {
+status_t Camera3Stream::startPrepare(int maxCount, bool blockRequest) {
     ATRACE_CALL();
 
     Mutex::Autolock l(mLock);
@@ -413,8 +415,6 @@
         return INVALID_OPERATION;
     }
 
-
-
     size_t pipelineMax = getBufferCountLocked();
     size_t clampedCount = (pipelineMax < static_cast<size_t>(maxCount)) ?
             pipelineMax : static_cast<size_t>(maxCount);
@@ -422,6 +422,7 @@
             pipelineMax : clampedCount;
 
     mPrepared = bufferCount <= mLastMaxCount;
+    mPrepareBlockRequest = blockRequest;
 
     if (mPrepared) return OK;
 
@@ -435,9 +436,9 @@
     return NOT_ENOUGH_DATA;
 }
 
-bool Camera3Stream::isPreparing() const {
+bool Camera3Stream::isBlockedByPrepare() const {
     Mutex::Autolock l(mLock);
-    return mState == STATE_PREPARING;
+    return mState == STATE_PREPARING && mPrepareBlockRequest;
 }
 
 bool Camera3Stream::isAbandoned() const {
@@ -569,6 +570,7 @@
 }
 
 status_t Camera3Stream::getBuffer(camera3_stream_buffer *buffer,
+        nsecs_t waitBufferTimeout,
         const std::vector<size_t>& surface_ids) {
     ATRACE_CALL();
     Mutex::Autolock l(mLock);
@@ -586,13 +588,16 @@
         ALOGV("%s: Already dequeued max output buffers (%d), wait for next returned one.",
                         __FUNCTION__, camera3_stream::max_buffers);
         nsecs_t waitStart = systemTime(SYSTEM_TIME_MONOTONIC);
-        res = mOutputBufferReturnedSignal.waitRelative(mLock, kWaitForBufferDuration);
+        if (waitBufferTimeout < kWaitForBufferDuration) {
+            waitBufferTimeout = kWaitForBufferDuration;
+        }
+        res = mOutputBufferReturnedSignal.waitRelative(mLock, waitBufferTimeout);
         nsecs_t waitEnd = systemTime(SYSTEM_TIME_MONOTONIC);
         mBufferLimitLatency.add(waitStart, waitEnd);
         if (res != OK) {
             if (res == TIMED_OUT) {
                 ALOGE("%s: wait for output buffer return timed out after %lldms (max_buffers %d)",
-                        __FUNCTION__, kWaitForBufferDuration / 1000000LL,
+                        __FUNCTION__, waitBufferTimeout / 1000000LL,
                         camera3_stream::max_buffers);
             }
             return res;
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index a60cb56..9c0f4c0 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -232,6 +232,11 @@
      *
      * This call performs no allocation, so is quick to call.
      *
+     * blockRequest specifies whether prepare will block upcoming capture
+     * request. This flag should only be set to false if the caller guarantees
+     * the whole buffer preparation process is done before capture request
+     * comes in.
+     *
      * Returns:
      *    OK if no more buffers need to be preallocated
      *    NOT_ENOUGH_DATA if calls to prepareNextBuffer are needed to finish
@@ -240,12 +245,12 @@
      *    INVALID_OPERATION if called when not in CONFIGURED state, or a
      *        valid buffer has already been returned to this stream.
      */
-    status_t         startPrepare(int maxCount);
+    status_t         startPrepare(int maxCount, bool blockRequest);
 
     /**
-     * Check if the stream is mid-preparing.
+     * Check if the request on a stream is blocked by prepare.
      */
-    bool             isPreparing() const;
+    bool             isBlockedByPrepare() const;
 
     /**
      * Continue stream buffer preparation by allocating the next
@@ -311,6 +316,7 @@
      *
      */
     status_t         getBuffer(camera3_stream_buffer *buffer,
+            nsecs_t waitBufferTimeout,
             const std::vector<size_t>& surface_ids = std::vector<size_t>());
 
     /**
@@ -533,6 +539,7 @@
     // has been called sufficient number of times, or stream configuration
     // had to register buffers with the HAL
     bool mPrepared;
+    bool mPrepareBlockRequest;
 
     Vector<camera3_stream_buffer_t> mPreparedBuffers;
     size_t mPreparedBufferIdx;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 9ed7184..3d45c89 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -160,6 +160,11 @@
      * PREPARING state. Otherwise, returns NOT_ENOUGH_DATA and transitions
      * to PREPARING.
      *
+     * blockRequest specifies whether prepare will block upcoming capture
+     * request. This flag should only be set to false if the caller guarantees
+     * the whole buffer preparation process is done before capture request
+     * comes in.
+     *
      * Returns:
      *    OK if no more buffers need to be preallocated
      *    NOT_ENOUGH_DATA if calls to prepareNextBuffer are needed to finish
@@ -168,12 +173,12 @@
      *    INVALID_OPERATION if called when not in CONFIGURED state, or a
      *        valid buffer has already been returned to this stream.
      */
-    virtual status_t startPrepare(int maxCount) = 0;
+    virtual status_t startPrepare(int maxCount, bool blockRequest) = 0;
 
     /**
-     * Check if the stream is mid-preparing.
+     * Check if the request on a stream is blocked by prepare.
      */
-    virtual bool     isPreparing() const = 0;
+    virtual bool     isBlockedByPrepare() const = 0;
 
     /**
      * Continue stream buffer preparation by allocating the next
@@ -237,6 +242,7 @@
      *
      */
     virtual status_t getBuffer(camera3_stream_buffer *buffer,
+            nsecs_t waitBufferTimeout,
             const std::vector<size_t>& surface_ids = std::vector<size_t>()) = 0;
 
     /**
diff --git a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
index 59ac636..a0be608 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
+++ b/services/camera/libcameraservice/device3/Camera3StreamSplitter.cpp
@@ -182,12 +182,19 @@
         return BAD_VALUE;
     }
 
-  status_t res = native_window_set_buffers_dimensions(outputQueue.get(),
+    status_t res = native_window_set_buffers_dimensions(outputQueue.get(),
             mWidth, mHeight);
     if (res != NO_ERROR) {
         SP_LOGE("addOutput: failed to set buffer dimensions (%d)", res);
         return res;
     }
+    res = native_window_set_buffers_format(outputQueue.get(),
+            mFormat);
+    if (res != OK) {
+        ALOGE("%s: Unable to configure stream buffer format %#x for surfaceId %zu",
+                __FUNCTION__, mFormat, surfaceId);
+        return res;
+    }
 
     sp<IGraphicBufferProducer> gbp = outputQueue->getIGraphicBufferProducer();
     // Connect to the buffer producer
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.cpp b/services/camera/libcameraservice/device3/DistortionMapper.cpp
index eef6658..4dafefd 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.cpp
+++ b/services/camera/libcameraservice/device3/DistortionMapper.cpp
@@ -49,7 +49,7 @@
 };
 
 // Only for capture result
-constexpr std::array<uint32_t, 2> DistortionMapper::kResultPointsToCorrect = {
+constexpr std::array<uint32_t, 2> DistortionMapper::kResultPointsToCorrectNoClamp = {
     ANDROID_STATISTICS_FACE_RECTANGLES, // Says rectangles, is really points
     ANDROID_STATISTICS_FACE_LANDMARKS,
 };
@@ -79,12 +79,21 @@
     array = deviceInfo.find(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE);
     if (array.count != 4) return BAD_VALUE;
 
-    mArrayWidth = array.data.i32[2];
-    mArrayHeight = array.data.i32[3];
+    float arrayX = static_cast<float>(array.data.i32[0]);
+    float arrayY = static_cast<float>(array.data.i32[1]);
+    mArrayWidth = static_cast<float>(array.data.i32[2]);
+    mArrayHeight = static_cast<float>(array.data.i32[3]);
 
     array = deviceInfo.find(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE);
-    mActiveWidth = array.data.i32[2];
-    mActiveHeight = array.data.i32[3];
+    if (array.count != 4) return BAD_VALUE;
+
+    float activeX = static_cast<float>(array.data.i32[0]);
+    float activeY = static_cast<float>(array.data.i32[1]);
+    mActiveWidth = static_cast<float>(array.data.i32[2]);
+    mActiveHeight = static_cast<float>(array.data.i32[3]);
+
+    mArrayDiffX = activeX - arrayX;
+    mArrayDiffY = activeY - arrayY;
 
     return updateCalibration(deviceInfo);
 }
@@ -111,22 +120,13 @@
                 if (weight == 0) {
                     continue;
                 }
-                res = mapCorrectedToRaw(e.data.i32 + j, 2);
+                res = mapCorrectedToRaw(e.data.i32 + j, 2, /*clamp*/true);
                 if (res != OK) return res;
-                for (size_t k = 0; k < 4; k+=2) {
-                    int32_t& x = e.data.i32[j + k];
-                    int32_t& y = e.data.i32[j + k + 1];
-                    // Clamp to within active array
-                    x = std::max(0, x);
-                    x = std::min(mActiveWidth - 1, x);
-                    y = std::max(0, y);
-                    y = std::min(mActiveHeight - 1, y);
-                }
             }
         }
         for (auto rect : kRequestRectsToCorrect) {
             e = request->find(rect);
-            res = mapCorrectedRectToRaw(e.data.i32, e.count / 4);
+            res = mapCorrectedRectToRaw(e.data.i32, e.count / 4, /*clamp*/true);
             if (res != OK) return res;
         }
     }
@@ -156,27 +156,18 @@
                 if (weight == 0) {
                     continue;
                 }
-                res = mapRawToCorrected(e.data.i32 + j, 2);
+                res = mapRawToCorrected(e.data.i32 + j, 2, /*clamp*/true);
                 if (res != OK) return res;
-                for (size_t k = 0; k < 4; k+=2) {
-                    int32_t& x = e.data.i32[j + k];
-                    int32_t& y = e.data.i32[j + k + 1];
-                    // Clamp to within active array
-                    x = std::max(0, x);
-                    x = std::min(mActiveWidth - 1, x);
-                    y = std::max(0, y);
-                    y = std::min(mActiveHeight - 1, y);
-                }
             }
         }
         for (auto rect : kResultRectsToCorrect) {
             e = result->find(rect);
-            res = mapRawRectToCorrected(e.data.i32, e.count / 4);
+            res = mapRawRectToCorrected(e.data.i32, e.count / 4, /*clamp*/true);
             if (res != OK) return res;
         }
-        for (auto pts : kResultPointsToCorrect) {
+        for (auto pts : kResultPointsToCorrectNoClamp) {
             e = result->find(pts);
-            res = mapRawToCorrected(e.data.i32, e.count / 2);
+            res = mapRawToCorrected(e.data.i32, e.count / 2, /*clamp*/false);
             if (res != OK) return res;
         }
     }
@@ -232,9 +223,12 @@
     return OK;
 }
 
-status_t DistortionMapper::mapRawToCorrected(int32_t *coordPairs, int coordCount) {
+status_t DistortionMapper::mapRawToCorrected(int32_t *coordPairs, int coordCount,
+        bool clamp, bool simple) {
     if (!mValidMapping) return INVALID_OPERATION;
 
+    if (simple) return mapRawToCorrectedSimple(coordPairs, coordCount, clamp);
+
     if (!mValidGrids) {
         status_t res = buildGrids();
         if (res != OK) return res;
@@ -275,6 +269,12 @@
         // Interpolate along left edge of corrected quad (which are axis-aligned) for y
         float corrY = corrQuad->coords[1] + v * (corrQuad->coords[7] - corrQuad->coords[1]);
 
+        // Clamp to within active array
+        if (clamp) {
+            corrX = std::min(mActiveWidth - 1, std::max(0.f, corrX));
+            corrY = std::min(mActiveHeight - 1, std::max(0.f, corrY));
+        }
+
         coordPairs[i] = static_cast<int32_t>(std::round(corrX));
         coordPairs[i + 1] = static_cast<int32_t>(std::round(corrY));
     }
@@ -282,7 +282,30 @@
     return OK;
 }
 
-status_t DistortionMapper::mapRawRectToCorrected(int32_t *rects, int rectCount) {
+status_t DistortionMapper::mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount,
+        bool clamp) const {
+    if (!mValidMapping) return INVALID_OPERATION;
+
+    float scaleX = mActiveWidth / mArrayWidth;
+    float scaleY = mActiveHeight / mArrayHeight;
+    for (int i = 0; i < coordCount * 2; i += 2) {
+        float x = coordPairs[i];
+        float y = coordPairs[i + 1];
+        float corrX = x * scaleX;
+        float corrY = y * scaleY;
+        if (clamp) {
+            corrX = std::min(mActiveWidth - 1, std::max(0.f, corrX));
+            corrY = std::min(mActiveHeight - 1, std::max(0.f, corrY));
+        }
+        coordPairs[i] = static_cast<int32_t>(std::round(corrX));
+        coordPairs[i + 1] = static_cast<int32_t>(std::round(corrY));
+    }
+
+    return OK;
+}
+
+status_t DistortionMapper::mapRawRectToCorrected(int32_t *rects, int rectCount, bool clamp,
+        bool simple) {
     if (!mValidMapping) return INVALID_OPERATION;
     for (int i = 0; i < rectCount * 4; i += 4) {
         // Map from (l, t, width, height) to (l, t, r, b)
@@ -293,7 +316,7 @@
             rects[i + 1] + rects[i + 3]
         };
 
-        mapRawToCorrected(coords, 2);
+        mapRawToCorrected(coords, 2, clamp, simple);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
@@ -305,14 +328,24 @@
     return OK;
 }
 
+status_t DistortionMapper::mapCorrectedToRaw(int32_t *coordPairs, int coordCount, bool clamp,
+        bool simple) const {
+    return mapCorrectedToRawImpl(coordPairs, coordCount, clamp, simple);
+}
+
 template<typename T>
-status_t DistortionMapper::mapCorrectedToRaw(T *coordPairs, int coordCount) const {
+status_t DistortionMapper::mapCorrectedToRawImpl(T *coordPairs, int coordCount, bool clamp,
+        bool simple) const {
     if (!mValidMapping) return INVALID_OPERATION;
 
+    if (simple) return mapCorrectedToRawImplSimple(coordPairs, coordCount, clamp);
+
+    float activeCx = mCx - mArrayDiffX;
+    float activeCy = mCy - mArrayDiffY;
     for (int i = 0; i < coordCount * 2; i += 2) {
-        // Move to normalized space
-        float ywi = (coordPairs[i + 1] - mCy) * mInvFy;
-        float xwi = (coordPairs[i] - mCx - mS * ywi) * mInvFx;
+        // Move to normalized space from active array space
+        float ywi = (coordPairs[i + 1] - activeCy) * mInvFy;
+        float xwi = (coordPairs[i] - activeCx - mS * ywi) * mInvFx;
         // Apply distortion model to calculate raw image coordinates
         float rSq = xwi * xwi + ywi * ywi;
         float Fr = 1.f + (mK[0] * rSq) + (mK[1] * rSq * rSq) + (mK[2] * rSq * rSq * rSq);
@@ -321,6 +354,11 @@
         // Move back to image space
         float xr = mFx * xc + mS * yc + mCx;
         float yr = mFy * yc + mCy;
+        // Clamp to within pre-correction active array
+        if (clamp) {
+            xr = std::min(mArrayWidth - 1, std::max(0.f, xr));
+            yr = std::min(mArrayHeight - 1, std::max(0.f, yr));
+        }
 
         coordPairs[i] = static_cast<T>(std::round(xr));
         coordPairs[i + 1] = static_cast<T>(std::round(yr));
@@ -329,10 +367,32 @@
     return OK;
 }
 
-template status_t DistortionMapper::mapCorrectedToRaw(int32_t*, int) const;
-template status_t DistortionMapper::mapCorrectedToRaw(float*, int) const;
+template<typename T>
+status_t DistortionMapper::mapCorrectedToRawImplSimple(T *coordPairs, int coordCount,
+        bool clamp) const {
+    if (!mValidMapping) return INVALID_OPERATION;
 
-status_t DistortionMapper::mapCorrectedRectToRaw(int32_t *rects, int rectCount) const {
+    float scaleX = mArrayWidth / mActiveWidth;
+    float scaleY = mArrayHeight / mActiveHeight;
+    for (int i = 0; i < coordCount * 2; i += 2) {
+        float x = coordPairs[i];
+        float y = coordPairs[i + 1];
+        float rawX = x * scaleX;
+        float rawY = y * scaleY;
+        if (clamp) {
+            rawX = std::min(mArrayWidth - 1, std::max(0.f, rawX));
+            rawY = std::min(mArrayHeight - 1, std::max(0.f, rawY));
+        }
+        coordPairs[i] = static_cast<T>(std::round(rawX));
+        coordPairs[i + 1] = static_cast<T>(std::round(rawY));
+    }
+
+    return OK;
+}
+
+
+status_t DistortionMapper::mapCorrectedRectToRaw(int32_t *rects, int rectCount, bool clamp,
+        bool simple) const {
     if (!mValidMapping) return INVALID_OPERATION;
 
     for (int i = 0; i < rectCount * 4; i += 4) {
@@ -344,7 +404,7 @@
             rects[i + 1] + rects[i + 3]
         };
 
-        mapCorrectedToRaw(coords, 2);
+        mapCorrectedToRaw(coords, 2, clamp, simple);
 
         // Map back to (l, t, width, height)
         rects[i] = coords[0];
@@ -380,7 +440,8 @@
             };
             mDistortedGrid[index].src = &mCorrectedGrid[index];
             mDistortedGrid[index].coords = mCorrectedGrid[index].coords;
-            status_t res = mapCorrectedToRaw(mDistortedGrid[index].coords.data(), 4);
+            status_t res = mapCorrectedToRawImpl(mDistortedGrid[index].coords.data(), 4,
+                    /*clamp*/false, /*simple*/false);
             if (res != OK) return res;
         }
     }
diff --git a/services/camera/libcameraservice/device3/DistortionMapper.h b/services/camera/libcameraservice/device3/DistortionMapper.h
index 00cbd32..4c0a1a6 100644
--- a/services/camera/libcameraservice/device3/DistortionMapper.h
+++ b/services/camera/libcameraservice/device3/DistortionMapper.h
@@ -73,8 +73,11 @@
      *
      *   coordPairs: A pointer to an array of consecutive (x,y) points
      *   coordCount: Number of (x,y) pairs to transform
+     *   clamp: Whether to clamp the result to the bounds of the active array
+     *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapRawToCorrected(int32_t *coordPairs, int coordCount);
+    status_t mapRawToCorrected(int32_t *coordPairs, int coordCount, bool clamp,
+            bool simple = true);
 
     /**
      * Transform from distorted (original) to corrected (warped) coordinates.
@@ -82,8 +85,11 @@
      *
      *   rects: A pointer to an array of consecutive (x,y, w, h) rectangles
      *   rectCount: Number of rectangles to transform
+     *   clamp: Whether to clamp the result to the bounds of the active array
+     *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapRawRectToCorrected(int32_t *rects, int rectCount);
+    status_t mapRawRectToCorrected(int32_t *rects, int rectCount, bool clamp,
+            bool simple = true);
 
     /**
      * Transform from corrected (warped) to distorted (original) coordinates.
@@ -91,9 +97,11 @@
      *
      *   coordPairs: A pointer to an array of consecutive (x,y) points
      *   coordCount: Number of (x,y) pairs to transform
+     *   clamp: Whether to clamp the result to the bounds of the precorrection active array
+     *   simple: Whether to do complex correction or just a simple linear map
      */
-    template<typename T>
-    status_t mapCorrectedToRaw(T* coordPairs, int coordCount) const;
+    status_t mapCorrectedToRaw(int32_t* coordPairs, int coordCount, bool clamp,
+            bool simple = true) const;
 
     /**
      * Transform from corrected (warped) to distorted (original) coordinates.
@@ -101,8 +109,11 @@
      *
      *   rects: A pointer to an array of consecutive (x,y, w, h) rectangles
      *   rectCount: Number of rectangles to transform
+     *   clamp: Whether to clamp the result to the bounds of the precorrection active array
+     *   simple: Whether to do complex correction or just a simple linear map
      */
-    status_t mapCorrectedRectToRaw(int32_t *rects, int rectCount) const;
+    status_t mapCorrectedRectToRaw(int32_t *rects, int rectCount, bool clamp,
+            bool simple = true) const;
 
     struct GridQuad {
         // Source grid quad, or null
@@ -150,8 +161,18 @@
     // Only capture result
     static const std::array<uint32_t, 1> kResultRectsToCorrect;
 
-    // Only for capture results
-    static const std::array<uint32_t, 2> kResultPointsToCorrect;
+    // Only for capture results; don't clamp
+    static const std::array<uint32_t, 2> kResultPointsToCorrectNoClamp;
+
+    // Single implementation for various mapCorrectedToRaw methods
+    template<typename T>
+    status_t mapCorrectedToRawImpl(T* coordPairs, int coordCount, bool clamp, bool simple) const;
+
+    // Simple linear interpolation option
+    template<typename T>
+    status_t mapCorrectedToRawImplSimple(T* coordPairs, int coordCount, bool clamp) const;
+
+    status_t mapRawToCorrectedSimple(int32_t *coordPairs, int coordCount, bool clamp) const;
 
     // Utility to create reverse mapping grids
     status_t buildGrids();
@@ -168,9 +189,11 @@
     float mK[5];
 
     // pre-correction active array dimensions
-    int mArrayWidth, mArrayHeight;
+    float mArrayWidth, mArrayHeight;
     // active array dimensions
-    int mActiveWidth, mActiveHeight;
+    float mActiveWidth, mActiveHeight;
+    // corner offsets between pre-correction and active arrays
+    float mArrayDiffX, mArrayDiffY;
 
     std::vector<GridQuad> mCorrectedGrid;
     std::vector<GridQuad> mDistortedGrid;
diff --git a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
index b489931..2a689c6 100644
--- a/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
+++ b/services/camera/libcameraservice/tests/DistortionMapperTest.cpp
@@ -30,6 +30,7 @@
 
 
 int32_t testActiveArray[] = {100, 100, 1000, 750};
+int32_t testPreCorrActiveArray[] = {90, 90, 1020, 770};
 
 float testICal[] = { 1000.f, 1000.f, 500.f, 500.f, 0.f };
 
@@ -45,14 +46,19 @@
 };
 
 
-void setupTestMapper(DistortionMapper *m, float distortion[5]) {
+void setupTestMapper(DistortionMapper *m,
+        float distortion[5], float intrinsics[5],
+        int32_t activeArray[4], int32_t preCorrectionActiveArray[4]) {
     CameraMetadata deviceInfo;
 
     deviceInfo.update(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
-            testActiveArray, 4);
+            preCorrectionActiveArray, 4);
+
+    deviceInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
+            activeArray, 4);
 
     deviceInfo.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
-            testICal, 5);
+            intrinsics, 5);
 
     deviceInfo.update(ANDROID_LENS_DISTORTION,
             distortion, 5);
@@ -89,6 +95,9 @@
     ASSERT_FALSE(m.calibrationValid());
 
     deviceInfo.update(ANDROID_SENSOR_INFO_PRE_CORRECTION_ACTIVE_ARRAY_SIZE,
+            testPreCorrActiveArray, 4);
+
+    deviceInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
             testActiveArray, 4);
 
     deviceInfo.update(ANDROID_LENS_INTRINSIC_CALIBRATION,
@@ -118,17 +127,19 @@
     status_t res;
 
     DistortionMapper m;
-    setupTestMapper(&m, identityDistortion);
+    setupTestMapper(&m, identityDistortion, testICal,
+            /*activeArray*/ testActiveArray,
+            /*preCorrectionActiveArray*/ testActiveArray);
 
     auto coords = basicCoords;
-    res = m.mapCorrectedToRaw(coords.data(), 5);
+    res = m.mapCorrectedToRaw(coords.data(), 5,  /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < coords.size(); i++) {
         EXPECT_EQ(coords[i], basicCoords[i]);
     }
 
-    res = m.mapRawToCorrected(coords.data(), 5);
+    res = m.mapRawToCorrected(coords.data(), 5, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < coords.size(); i++) {
@@ -137,18 +148,18 @@
 
     std::array<int32_t, 8> rects = {
         0, 0, 100, 100,
-        testActiveArray[2] - 100, testActiveArray[3]-100, 100, 100
+        testActiveArray[2] - 101, testActiveArray[3] - 101, 100, 100
     };
 
     auto rectsOrig = rects;
-    res = m.mapCorrectedRectToRaw(rects.data(), 2);
+    res = m.mapCorrectedRectToRaw(rects.data(), 2, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < rects.size(); i++) {
         EXPECT_EQ(rects[i], rectsOrig[i]);
     }
 
-    res = m.mapRawRectToCorrected(rects.data(), 2);
+    res = m.mapRawRectToCorrected(rects.data(), 2, /*clamp*/true);
     ASSERT_EQ(res, OK);
 
     for (size_t i = 0; i < rects.size(); i++) {
@@ -156,23 +167,39 @@
     }
 }
 
-TEST(DistortionMapperTest, LargeTransform) {
+TEST(DistortionMapperTest, SimpleTransform) {
+    status_t res;
+
+    DistortionMapper m;
+    setupTestMapper(&m, identityDistortion, testICal,
+            /*activeArray*/ testActiveArray,
+            /*preCorrectionActiveArray*/ testPreCorrActiveArray);
+
+    auto coords = basicCoords;
+    res = m.mapCorrectedToRaw(coords.data(), 5,  /*clamp*/true, /*simple*/true);
+    ASSERT_EQ(res, OK);
+
+    ASSERT_EQ(coords[0], 0); ASSERT_EQ(coords[1], 0);
+    ASSERT_EQ(coords[2], testPreCorrActiveArray[2] - 1); ASSERT_EQ(coords[3], 0);
+    ASSERT_EQ(coords[4], testPreCorrActiveArray[2] - 1); ASSERT_EQ(coords[5], testPreCorrActiveArray[3] - 1);
+    ASSERT_EQ(coords[6], 0); ASSERT_EQ(coords[7], testPreCorrActiveArray[3] - 1);
+    ASSERT_EQ(coords[8], testPreCorrActiveArray[2] / 2); ASSERT_EQ(coords[9], testPreCorrActiveArray[3] / 2);
+}
+
+
+void RandomTransformTest(::testing::Test *test,
+        int32_t* activeArray, DistortionMapper &m, bool clamp, bool simple) {
     status_t res;
     constexpr int maxAllowedPixelError = 2; // Maximum per-pixel error allowed
     constexpr int bucketsPerPixel = 3; // Histogram granularity
 
     unsigned int seed = 1234; // Ensure repeatability for debugging
-    const size_t coordCount = 1e6; // Number of random test points
-
-    float bigDistortion[] = {0.1, -0.003, 0.004, 0.02, 0.01};
-
-    DistortionMapper m;
-    setupTestMapper(&m, bigDistortion);
+    const size_t coordCount = 1e5; // Number of random test points
 
     std::default_random_engine gen(seed);
 
-    std::uniform_int_distribution<int> x_dist(0, testActiveArray[2] - 1);
-    std::uniform_int_distribution<int> y_dist(0, testActiveArray[3] - 1);
+    std::uniform_int_distribution<int> x_dist(0, activeArray[2] - 1);
+    std::uniform_int_distribution<int> y_dist(0, activeArray[3] - 1);
 
     std::vector<int32_t> randCoords(coordCount * 2);
 
@@ -186,12 +213,12 @@
     auto origCoords = randCoords;
 
     base::Timer correctedToRawTimer;
-    res = m.mapCorrectedToRaw(randCoords.data(), randCoords.size() / 2);
+    res = m.mapCorrectedToRaw(randCoords.data(), randCoords.size() / 2, clamp, simple);
     auto correctedToRawDurationMs = correctedToRawTimer.duration();
     EXPECT_EQ(res, OK);
 
     base::Timer rawToCorrectedTimer;
-    res = m.mapRawToCorrected(randCoords.data(), randCoords.size() / 2);
+    res = m.mapRawToCorrected(randCoords.data(), randCoords.size() / 2, clamp, simple);
     auto rawToCorrectedDurationMs = rawToCorrectedTimer.duration();
     EXPECT_EQ(res, OK);
 
@@ -202,9 +229,9 @@
             (std::chrono::duration_cast<std::chrono::duration<double, std::micro>>(
                 rawToCorrectedDurationMs) / (randCoords.size() / 2) ).count();
 
-    RecordProperty("CorrectedToRawDurationPerCoordUs",
+    test->RecordProperty("CorrectedToRawDurationPerCoordUs",
             base::StringPrintf("%f", correctedToRawDurationPerCoordUs));
-    RecordProperty("RawToCorrectedDurationPerCoordUs",
+    test->RecordProperty("RawToCorrectedDurationPerCoordUs",
             base::StringPrintf("%f", rawToCorrectedDurationPerCoordUs));
 
     // Calculate mapping errors after round trip
@@ -239,17 +266,61 @@
     }
 
     float rmsError = std::sqrt(totalErrorSq / randCoords.size());
-    RecordProperty("RmsError", base::StringPrintf("%f", rmsError));
+    test->RecordProperty("RmsError", base::StringPrintf("%f", rmsError));
     for (size_t i = 0; i < histogram.size(); i++) {
         std::string label = base::StringPrintf("HistogramBin[%f,%f)",
                 (float)i/bucketsPerPixel, (float)(i + 1)/bucketsPerPixel);
-        RecordProperty(label, histogram[i]);
+        test->RecordProperty(label, histogram[i]);
     }
-    RecordProperty("HistogramOutOfRange", outOfHistogram);
+    test->RecordProperty("HistogramOutOfRange", outOfHistogram);
+}
+
+// Test a realistic distortion function with matching calibration values, enforcing
+// clamping.
+TEST(DistortionMapperTest, DISABLED_SmallTransform) {
+    int32_t activeArray[] = {0, 8, 3278, 2450};
+    int32_t preCorrectionActiveArray[] = {0, 0, 3280, 2464};
+
+    float distortion[] = {0.06875723, -0.13922249, 0.02818312, -0.00032781, -0.00025431};
+    float intrinsics[] = {1812.50000000, 1812.50000000, 1645.59533691, 1229.23229980, 0.00000000};
+
+    DistortionMapper m;
+    setupTestMapper(&m, distortion, intrinsics, activeArray, preCorrectionActiveArray);
+
+    RandomTransformTest(this, activeArray, m, /*clamp*/true, /*simple*/false);
+}
+
+// Test a realistic distortion function with matching calibration values, enforcing
+// clamping, but using the simple linear transform
+TEST(DistortionMapperTest, SmallSimpleTransform) {
+    int32_t activeArray[] = {0, 8, 3278, 2450};
+    int32_t preCorrectionActiveArray[] = {0, 0, 3280, 2464};
+
+    float distortion[] = {0.06875723, -0.13922249, 0.02818312, -0.00032781, -0.00025431};
+    float intrinsics[] = {1812.50000000, 1812.50000000, 1645.59533691, 1229.23229980, 0.00000000};
+
+    DistortionMapper m;
+    setupTestMapper(&m, distortion, intrinsics, activeArray, preCorrectionActiveArray);
+
+    RandomTransformTest(this, activeArray, m, /*clamp*/true, /*simple*/true);
+}
+
+// Test a very large distortion function; the regions aren't valid for such a big transform,
+// so disable clamping.  This test is just to verify round-trip math accuracy for big transforms
+TEST(DistortionMapperTest, LargeTransform) {
+    float bigDistortion[] = {0.1, -0.003, 0.004, 0.02, 0.01};
+
+    DistortionMapper m;
+    setupTestMapper(&m, bigDistortion, testICal,
+            /*activeArray*/testActiveArray,
+            /*preCorrectionActiveArray*/testPreCorrActiveArray);
+
+    RandomTransformTest(this, testActiveArray, m, /*clamp*/false, /*simple*/false);
 }
 
 // Compare against values calculated by OpenCV
 // undistortPoints() method, which is the same as mapRawToCorrected
+// Ignore clamping
 // See script DistortionMapperComp.py
 #include "DistortionMapperTest_OpenCvData.h"
 
@@ -262,11 +333,14 @@
     const int32_t maxSqError = 2;
 
     DistortionMapper m;
-    setupTestMapper(&m, bigDistortion);
+    setupTestMapper(&m, bigDistortion, testICal,
+            /*activeArray*/testActiveArray,
+            /*preCorrectionActiveArray*/testActiveArray);
 
     using namespace openCvData;
 
-    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2);
+    res = m.mapRawToCorrected(rawCoords.data(), rawCoords.size() / 2, /*clamp*/false,
+            /*simple*/false);
 
     for (size_t i = 0; i < rawCoords.size(); i+=2) {
         int32_t dist = (rawCoords[i] - expCoords[i]) * (rawCoords[i] - expCoords[i]) +
diff --git a/services/mediacodec/seccomp_policy/mediacodec-x86.policy b/services/mediacodec/seccomp_policy/mediacodec-x86.policy
index bbbe552..821e69f 100644
--- a/services/mediacodec/seccomp_policy/mediacodec-x86.policy
+++ b/services/mediacodec/seccomp_policy/mediacodec-x86.policy
@@ -48,6 +48,8 @@
 set_tid_address: 1
 write: 1
 nanosleep: 1
+sched_setscheduler: 1
+uname: 1
 
 # Required by AddressSanitizer
 gettid: 1