Merge "MediaTranscodingService: Rework Service's AIDL."
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index 49783db..6fefa41 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -8380,12 +8380,16 @@
 // ACAMERA_SENSOR_INFO_TIMESTAMP_SOURCE
 typedef enum acamera_metadata_enum_acamera_sensor_info_timestamp_source {
     /**
-     * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic,
-     * but can not be compared to timestamps from other subsystems
-     * (e.g. accelerometer, gyro etc.), or other instances of the same or different
-     * camera devices in the same system. Timestamps between streams and results for
-     * a single camera instance are comparable, and the timestamps for all buffers
-     * and the result metadata generated by a single capture are identical.</p>
+     * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in nanoseconds and monotonic, but can
+     * not be compared to timestamps from other subsystems (e.g. accelerometer, gyro etc.),
+     * or other instances of the same or different camera devices in the same system with
+     * accuracy. However, the timestamps are roughly in the same timebase as
+     * <a href="https://developer.android.com/reference/android/os/SystemClock.html#uptimeMillis">SystemClock#uptimeMillis</a>.  The accuracy is sufficient for tasks
+     * like A/V synchronization for video recording, at least, and the timestamps can be
+     * directly used together with timestamps from the audio subsystem for that task.</p>
+     * <p>Timestamps between streams and results for a single camera instance are comparable,
+     * and the timestamps for all buffers and the result metadata generated by a single
+     * capture are identical.</p>
      *
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
@@ -8395,6 +8399,14 @@
      * <p>Timestamps from ACAMERA_SENSOR_TIMESTAMP are in the same timebase as
      * <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a>,
      * and they can be compared to other timestamps using that base.</p>
+     * <p>When buffers from a REALTIME device are passed directly to a video encoder from the
+     * camera, automatic compensation is done to account for differing timebases of the
+     * audio and camera subsystems.  If the application is receiving buffers and then later
+     * sending them to a video encoder or other application where they are compared with
+     * audio subsystem timestamps or similar, this compensation is not present.  In those
+     * cases, applications need to adjust the timestamps themselves.  Since <a href="https://developer.android.com/reference/android/os/SystemClock.html#elapsedRealtimeNanos">SystemClock#elapsedRealtimeNanos</a> and <a href="https://developer.android.com/reference/android/os/SystemClock.html#uptimeMillis">SystemClock#uptimeMillis</a> only diverge while the device is asleep, an
+     * offset between the two sources can be measured once per active session and applied
+     * to timestamps for sufficient accuracy for A/V sync.</p>
      *
      * @see ACAMERA_SENSOR_TIMESTAMP
      */
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index b534f8a..c66dea2 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -84,14 +84,13 @@
 using android::sp;
 using android::status_t;
 
-using android::DISPLAY_ORIENTATION_0;
-using android::DISPLAY_ORIENTATION_180;
-using android::DISPLAY_ORIENTATION_90;
 using android::INVALID_OPERATION;
 using android::NAME_NOT_FOUND;
 using android::NO_ERROR;
 using android::UNKNOWN_ERROR;
 
+namespace ui = android::ui;
+
 static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
 static const uint32_t kMaxBitRate = 200 * 1000000;  // 200Mbps
 static const uint32_t kMaxTimeLimitSec = 180;       // 3 minutes
@@ -328,7 +327,7 @@
     }
 
     t.setDisplayProjection(dpy,
-            gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
+            gRotate ? ui::ROTATION_90 : ui::ROTATION_0,
             layerStackRect, displayRect);
     return NO_ERROR;
 }
@@ -414,7 +413,7 @@
  */
 static status_t runEncoder(const sp<MediaCodec>& encoder,
         AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
-        const sp<IBinder>& virtualDpy, uint8_t orientation) {
+        const sp<IBinder>& virtualDpy, ui::Rotation orientation) {
     static int kTimeout = 250000;   // be responsive on signal
     status_t err;
     ssize_t trackIdx = -1;
@@ -484,7 +483,7 @@
                     if (err != NO_ERROR) {
                         ALOGW("getDisplayInfo(main) failed: %d", err);
                     } else if (orientation != displayInfo.orientation) {
-                        ALOGD("orientation changed, now %d", displayInfo.orientation);
+                        ALOGD("orientation changed, now %s", toCString(displayInfo.orientation));
                         SurfaceComposerClient::Transaction t;
                         setDisplayProjection(t, virtualDpy, displayInfo);
                         t.apply();
@@ -691,9 +690,9 @@
     }
 
     if (gVerbose) {
-        printf("Display is %dx%d @%.2ffps (orientation=%u), layerStack=%u\n",
+        printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
                 displayInfo.viewportW, displayInfo.viewportH, displayInfo.fps,
-                displayInfo.orientation, displayInfo.layerStack);
+                toCString(displayInfo.orientation), displayInfo.layerStack);
         fflush(stdout);
     }
 
diff --git a/cmds/stagefright/Android.mk b/cmds/stagefright/Android.mk
index defc94f..7b447d3 100644
--- a/cmds/stagefright/Android.mk
+++ b/cmds/stagefright/Android.mk
@@ -23,6 +23,7 @@
 
 LOCAL_MODULE_TAGS := optional
 
+LOCAL_SYSTEM_EXT_MODULE:= true
 LOCAL_MODULE:= stagefright
 
 include $(BUILD_EXECUTABLE)
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index bb910ad..56813c4 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -501,7 +501,7 @@
 status_t C2SoftAvcDec::initDecoder() {
     if (OK != createDecoder()) return UNKNOWN_ERROR;
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN64(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -909,7 +909,7 @@
         if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN128(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
             }
             if (s_decode_op.u4_pic_wd != mWidth || s_decode_op.u4_pic_ht != mHeight) {
                 mWidth = s_decode_op.u4_pic_wd;
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 389ea61..6db4387 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -497,7 +497,7 @@
 status_t C2SoftHevcDec::initDecoder() {
     if (OK != createDecoder()) return UNKNOWN_ERROR;
     mNumCores = MIN(getCpuCoreCount(), MAX_NUM_CORES);
-    mStride = ALIGN64(mWidth);
+    mStride = ALIGN128(mWidth);
     mSignalledError = false;
     resetPlugin();
     (void) setNumCores();
@@ -904,7 +904,7 @@
         if (0 < s_decode_op.u4_pic_wd && 0 < s_decode_op.u4_pic_ht) {
             if (mHeaderDecoded == false) {
                 mHeaderDecoded = true;
-                setParams(ALIGN64(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
+                setParams(ALIGN128(s_decode_op.u4_pic_wd), IVD_DECODE_FRAME);
             }
             if (s_decode_op.u4_pic_wd != mWidth ||  s_decode_op.u4_pic_ht != mHeight) {
                 mWidth = s_decode_op.u4_pic_wd;
diff --git a/media/libmediametrics/include/MediaMetricsItem.h b/media/libmediametrics/include/MediaMetricsItem.h
index 536c0e1..6141b36 100644
--- a/media/libmediametrics/include/MediaMetricsItem.h
+++ b/media/libmediametrics/include/MediaMetricsItem.h
@@ -35,6 +35,49 @@
 
 namespace android {
 
+/*
+ * MediaMetrics Keys and Properties for Audio.
+ *
+ * C/C++ friendly constants that ensure
+ * 1) Compilation error on misspelling
+ * 2) Consistent behavior and documentation.
+ *
+ * TODO: Move to separate header file.
+ */
+
+// Taxonomy of audio keys
+
+// Key Prefixes are used for MediaMetrics Item Keys and ends with a ".".
+// They must be appended with another value to make a key.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO "audio."
+
+// The AudioRecord key appends the "trackId" to the prefix.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_RECORD AMEDIAMETRICS_KEY_PREFIX_AUDIO "record."
+
+// The AudioThread key appends the "threadId" to the prefix.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD AMEDIAMETRICS_KEY_PREFIX_AUDIO "thread."
+
+// The AudioTrack key appends the "trackId" to the prefix.
+#define AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK  AMEDIAMETRICS_KEY_PREFIX_AUDIO "track."
+
+// Keys are strings used for MediaMetrics Item Keys
+#define AMEDIAMETRICS_KEY_AUDIO_FLINGER       AMEDIAMETRICS_KEY_PREFIX_AUDIO "flinger"
+#define AMEDIAMETRICS_KEY_AUDIO_POLICY        AMEDIAMETRICS_KEY_PREFIX_AUDIO "policy"
+
+// Props are properties allowed for Mediametrics Items.
+#define AMEDIAMETRICS_PROP_EVENT          "event"          // string value (often func name)
+#define AMEDIAMETRICS_PROP_LATENCYMS      "latencyMs"      // double value
+#define AMEDIAMETRICS_PROP_OUTPUTDEVICES  "outputDevices"  // string value
+#define AMEDIAMETRICS_PROP_STARTUPMS      "startupMs"      // double value
+#define AMEDIAMETRICS_PROP_THREADID       "threadId"       // int32 value io handle
+
+// Values are strings accepted for a given property.
+
+// An event is a general description, which often is a function name.
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR       "ctor"
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_DTOR       "dtor"
+#define AMEDIAMETRICS_PROP_EVENT_VALUE_UNDERRUN   "underrun" // from Thread
+
 class IMediaMetricsService;
 class Parcel;
 
diff --git a/media/tests/benchmark/src/native/encoder/Encoder.cpp b/media/tests/benchmark/src/native/encoder/Encoder.cpp
index f119cf3..8dc7cc6 100644
--- a/media/tests/benchmark/src/native/encoder/Encoder.cpp
+++ b/media/tests/benchmark/src/native/encoder/Encoder.cpp
@@ -159,6 +159,7 @@
         AMediaFormat_delete(mFormat);
         mFormat = nullptr;
     }
+    if (!mCodec) return;
     AMediaCodec_stop(mCodec);
     AMediaCodec_delete(mCodec);
     int64_t eTime = mStats->getCurTime();
diff --git a/services/mediametrics/AnalyticsState.h b/services/mediametrics/AnalyticsState.h
index 4711bb6..290ed21 100644
--- a/services/mediametrics/AnalyticsState.h
+++ b/services/mediametrics/AnalyticsState.h
@@ -56,6 +56,24 @@
     }
 
     /**
+     * Returns the TimeMachine.
+     *
+     * The TimeMachine object is internally locked, so access is safe and defined,
+     * but multiple threaded access may change results after calling.
+     */
+    TimeMachine& timeMachine() { return mTimeMachine; }
+    const TimeMachine& timeMachine() const { return mTimeMachine; }
+
+    /**
+     * Returns the TransactionLog.
+     *
+     * The TransactionLog object is internally locked, so access is safe and defined,
+     * but multiple threaded access may change results after calling.
+     */
+    TransactionLog& transactionLog() { return mTransactionLog; }
+    const TransactionLog& transactionLog() const { return mTransactionLog; }
+
+    /**
      * Returns a pair consisting of the dump string, and the number of lines in the string.
      *
      * The number of lines in the returned pair is used as an optimization
diff --git a/services/mediametrics/AudioAnalytics.cpp b/services/mediametrics/AudioAnalytics.cpp
index f7ee051..126e501 100644
--- a/services/mediametrics/AudioAnalytics.cpp
+++ b/services/mediametrics/AudioAnalytics.cpp
@@ -32,16 +32,60 @@
     // This triggers on an item of "audio.flinger"
     // with a property "event" set to "AudioFlinger" (the constructor).
     mActions.addAction(
-        "audio.flinger.event",
-        std::string("AudioFlinger"),
+        AMEDIAMETRICS_KEY_AUDIO_FLINGER "." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_CTOR),
         std::make_shared<AnalyticsActions::Function>(
-            [this](const std::shared_ptr<const android::mediametrics::Item> &){
-                ALOGW("Audioflinger() constructor event detected");
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                ALOGW("(key=%s) Audioflinger constructor event detected", item->getKey().c_str());
                 mPreviousAnalyticsState.set(std::make_shared<AnalyticsState>(
                         *mAnalyticsState.get()));
                 // Note: get returns shared_ptr temp, whose lifetime is extended
                 // to end of full expression.
                 mAnalyticsState->clear();  // TODO: filter the analytics state.
+                // Perhaps report this.
+            }));
+
+    // Check underruns
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD "*." AMEDIAMETRICS_PROP_EVENT,
+        std::string(AMEDIAMETRICS_PROP_EVENT_VALUE_UNDERRUN),
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                std::string threadId = item->getKey().substr(
+                        sizeof(AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD) - 1);
+                std::string outputDevices;
+                mAnalyticsState->timeMachine().get(
+                        item->getKey(), AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevices);
+                ALOGD("(key=%s) Thread underrun event detected on io handle:%s device:%s",
+                        item->getKey().c_str(), threadId.c_str(), outputDevices.c_str());
+                if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos) {
+                    // report this for Bluetooth
+                }
+            }));
+
+    // Check latencies, playback and startup
+    mActions.addAction(
+        AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK "*." AMEDIAMETRICS_PROP_LATENCYMS,
+        std::monostate{},  // accept any value
+        std::make_shared<AnalyticsActions::Function>(
+            [this](const std::shared_ptr<const android::mediametrics::Item> &item){
+                double latencyMs{};
+                double startupMs{};
+                if (!item->get(AMEDIAMETRICS_PROP_LATENCYMS, &latencyMs)
+                        || !item->get(AMEDIAMETRICS_PROP_STARTUPMS, &startupMs)) return;
+
+                std::string trackId = item->getKey().substr(
+                        sizeof(AMEDIAMETRICS_KEY_PREFIX_AUDIO_TRACK) - 1);
+                std::string thread = getThreadFromTrack(item->getKey());
+                std::string outputDevices;
+                mAnalyticsState->timeMachine().get(
+                        thread, AMEDIAMETRICS_PROP_OUTPUTDEVICES, &outputDevices);
+                ALOGD("(key=%s) Track latencyMs:%lf startupMs:%lf detected on port:%s device:%s",
+                        item->getKey().c_str(), latencyMs, startupMs,
+                        trackId.c_str(), outputDevices.c_str());
+                if (outputDevices.find("AUDIO_DEVICE_OUT_BLUETOOTH") != std::string::npos) {
+                    // report this for Bluetooth
+                }
             }));
 }
 
@@ -53,7 +97,7 @@
 status_t AudioAnalytics::submit(
         const std::shared_ptr<const mediametrics::Item>& item, bool isTrusted)
 {
-    if (!startsWith(item->getKey(), "audio.")) return BAD_VALUE;
+    if (!startsWith(item->getKey(), AMEDIAMETRICS_KEY_PREFIX_AUDIO)) return BAD_VALUE;
     status_t status = mAnalyticsState->submit(item, isTrusted);
     if (status != NO_ERROR) return status;  // may not be permitted.
 
@@ -94,4 +138,16 @@
     }
 }
 
+// HELPER METHODS
+
+std::string AudioAnalytics::getThreadFromTrack(const std::string& track) const
+{
+    int32_t threadId_int32{};
+    if (mAnalyticsState->timeMachine().get(
+            track, AMEDIAMETRICS_PROP_THREADID, &threadId_int32) != NO_ERROR) {
+        return {};
+    }
+    return std::string(AMEDIAMETRICS_KEY_PREFIX_AUDIO_THREAD) + std::to_string(threadId_int32);
+}
+
 } // namespace android
diff --git a/services/mediametrics/AudioAnalytics.h b/services/mediametrics/AudioAnalytics.h
index be885ec..4a42e22 100644
--- a/services/mediametrics/AudioAnalytics.h
+++ b/services/mediametrics/AudioAnalytics.h
@@ -72,6 +72,14 @@
      */
     void checkActions(const std::shared_ptr<const mediametrics::Item>& item);
 
+    // HELPER METHODS
+    /**
+     * Return the audio thread associated with an audio track name.
+     * e.g. "audio.track.32" -> "audio.thread.10" if the associated
+     * threadId for the audio track is 10.
+     */
+    std::string getThreadFromTrack(const std::string& track) const;
+
     // Actions is individually locked
     AnalyticsActions mActions;