Merge "Dynamic audio policy should reject non pcm"
diff --git a/camera/ndk/impl/ACameraMetadata.cpp b/camera/ndk/impl/ACameraMetadata.cpp
index c661233..de40990 100644
--- a/camera/ndk/impl/ACameraMetadata.cpp
+++ b/camera/ndk/impl/ACameraMetadata.cpp
@@ -36,6 +36,8 @@
filterDurations(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS);
filterDurations(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS);
filterDurations(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS);
+ filterDurations(ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS);
+ filterDurations(ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS);
}
// TODO: filter request/result keys
}
@@ -104,7 +106,8 @@
for (size_t i = 0; i < entry.count; ++i) {
if (ids[i] == '\0') {
if (start != i) {
- mStaticPhysicalCameraIds.push_back((const char*)ids+start);
+ mStaticPhysicalCameraIdValues.push_back(String8((const char *)ids+start));
+ mStaticPhysicalCameraIds.push_back(mStaticPhysicalCameraIdValues.back().string());
}
start = i+1;
}
@@ -173,6 +176,16 @@
filteredDurations.push_back(duration);
}
break;
+ case ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS:
+ case ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS:
+ if (format == HAL_PIXEL_FORMAT_BLOB) {
+ format = AIMAGE_FORMAT_HEIC;
+ filteredDurations.push_back(format);
+ filteredDurations.push_back(width);
+ filteredDurations.push_back(height);
+ filteredDurations.push_back(duration);
+ }
+ break;
default:
// Should not reach here
ALOGE("%s: Unkown tag 0x%x", __FUNCTION__, tag);
@@ -246,6 +259,31 @@
filteredDepthStreamConfigs.push_back(isInput);
}
mData.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS, filteredDepthStreamConfigs);
+
+ entry = mData.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS);
+ Vector<int32_t> filteredHeicStreamConfigs;
+ filteredHeicStreamConfigs.setCapacity(entry.count);
+
+ for (size_t i=0; i < entry.count; i += STREAM_CONFIGURATION_SIZE) {
+ int32_t format = entry.data.i32[i + STREAM_FORMAT_OFFSET];
+ int32_t width = entry.data.i32[i + STREAM_WIDTH_OFFSET];
+ int32_t height = entry.data.i32[i + STREAM_HEIGHT_OFFSET];
+ int32_t isInput = entry.data.i32[i + STREAM_IS_INPUT_OFFSET];
+ if (isInput == ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_INPUT) {
+ // Hide input streams
+ continue;
+ }
+ // Translate HAL formats to NDK format
+ if (format == HAL_PIXEL_FORMAT_BLOB) {
+ format = AIMAGE_FORMAT_HEIC;
+ }
+
+ filteredHeicStreamConfigs.push_back(format);
+ filteredHeicStreamConfigs.push_back(width);
+ filteredHeicStreamConfigs.push_back(height);
+ filteredHeicStreamConfigs.push_back(isInput);
+ }
+ mData.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS, filteredHeicStreamConfigs);
}
bool
@@ -484,6 +522,8 @@
ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
ANDROID_INFO_SUPPORTED_BUFFER_MANAGEMENT_VERSION,
ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
+ ANDROID_HEIC_INFO_SUPPORTED,
+ ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT,
});
/*~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~@~
diff --git a/camera/ndk/impl/ACameraMetadata.h b/camera/ndk/impl/ACameraMetadata.h
index 7049c4b..3d895cb 100644
--- a/camera/ndk/impl/ACameraMetadata.h
+++ b/camera/ndk/impl/ACameraMetadata.h
@@ -117,6 +117,7 @@
static std::unordered_set<uint32_t> sSystemTags;
std::vector<const char*> mStaticPhysicalCameraIds;
+ std::vector<String8> mStaticPhysicalCameraIdValues;
};
#endif // _ACAMERA_METADATA_H
diff --git a/camera/ndk/include/camera/NdkCameraMetadataTags.h b/camera/ndk/include/camera/NdkCameraMetadataTags.h
index c1efa5f..8c19e1d 100644
--- a/camera/ndk/include/camera/NdkCameraMetadataTags.h
+++ b/camera/ndk/include/camera/NdkCameraMetadataTags.h
@@ -71,6 +71,8 @@
ACAMERA_DEPTH,
ACAMERA_LOGICAL_MULTI_CAMERA,
ACAMERA_DISTORTION_CORRECTION,
+ ACAMERA_HEIC,
+ ACAMERA_HEIC_INFO,
ACAMERA_SECTION_COUNT,
ACAMERA_VENDOR = 0x8000
@@ -112,6 +114,8 @@
ACAMERA_DISTORTION_CORRECTION_START
= ACAMERA_DISTORTION_CORRECTION
<< 16,
+ ACAMERA_HEIC_START = ACAMERA_HEIC << 16,
+ ACAMERA_HEIC_INFO_START = ACAMERA_HEIC_INFO << 16,
ACAMERA_VENDOR_START = ACAMERA_VENDOR << 16
} acamera_metadata_section_start_t;
@@ -1912,6 +1916,7 @@
* <li>ACaptureRequest</li>
* </ul></p>
*
+ * <p>This tag is also used for HEIC image capture.</p>
*/
ACAMERA_JPEG_GPS_COORDINATES = // double[3]
ACAMERA_JPEG_START,
@@ -1927,6 +1932,7 @@
* <li>ACaptureRequest</li>
* </ul></p>
*
+ * <p>This tag is also used for HEIC image capture.</p>
*/
ACAMERA_JPEG_GPS_PROCESSING_METHOD = // byte
ACAMERA_JPEG_START + 1,
@@ -1942,6 +1948,7 @@
* <li>ACaptureRequest</li>
* </ul></p>
*
+ * <p>This tag is also used for HEIC image capture.</p>
*/
ACAMERA_JPEG_GPS_TIMESTAMP = // int64
ACAMERA_JPEG_START + 2,
@@ -1986,6 +1993,10 @@
* </code></pre>
* <p>For EXTERNAL cameras the sensor orientation will always be set to 0 and the facing will
* also be set to EXTERNAL. The above code is not relevant in such case.</p>
+ * <p>This tag is also used to describe the orientation of the HEIC image capture, in which
+ * case the rotation is reflected by
+ * <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>, and not by
+ * rotating the image data itself.</p>
*
* @see ACAMERA_SENSOR_ORIENTATION
*/
@@ -2003,7 +2014,8 @@
* <li>ACaptureRequest</li>
* </ul></p>
*
- * <p>85-95 is typical usage range.</p>
+ * <p>85-95 is typical usage range. This tag is also used to describe the quality
+ * of the HEIC image capture.</p>
*/
ACAMERA_JPEG_QUALITY = // byte
ACAMERA_JPEG_START + 4,
@@ -2019,6 +2031,7 @@
* <li>ACaptureRequest</li>
* </ul></p>
*
+ * <p>This tag is also used to describe the quality of the HEIC image capture.</p>
*/
ACAMERA_JPEG_THUMBNAIL_QUALITY = // byte
ACAMERA_JPEG_START + 5,
@@ -2055,6 +2068,10 @@
* orientation is requested. LEGACY device will always report unrotated thumbnail
* size.</li>
* </ul>
+ * <p>The tag is also used as thumbnail size for HEIC image format capture, in which case the
+ * the thumbnail rotation is reflected by
+ * <a href="https://developer.android.com/reference/android/media/ExifInterface.html#TAG_ORIENTATION">EXIF orientation flag</a>, and not by
+ * rotating the thumbnail data itself.</p>
*
* @see ACAMERA_JPEG_ORIENTATION
*/
@@ -2088,6 +2105,7 @@
* and vice versa.</li>
* <li>All non-<code>(0, 0)</code> sizes will have non-zero widths and heights.</li>
* </ul>
+ * <p>This list is also used as supported thumbnail sizes for HEIC image format capture.</p>
*
* @see ACAMERA_SCALER_AVAILABLE_STREAM_CONFIGURATIONS
*/
@@ -5757,6 +5775,80 @@
ACAMERA_DISTORTION_CORRECTION_START + 1,
ACAMERA_DISTORTION_CORRECTION_END,
+ /**
+ * <p>The available HEIC (ISO/IEC 23008-12) stream
+ * configurations that this camera device supports
+ * (i.e. format, width, height, output/input stream).</p>
+ *
+ * <p>Type: int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_t)</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>The configurations are listed as <code>(format, width, height, input?)</code> tuples.</p>
+ * <p>If the camera device supports HEIC image format, it will support identical set of stream
+ * combinations involving HEIC image format, compared to the combinations involving JPEG
+ * image format as required by the device's hardware level and capabilities.</p>
+ * <p>All the static, control, and dynamic metadata tags related to JPEG apply to HEIC formats.
+ * Configuring JPEG and HEIC streams at the same time is not supported.</p>
+ * <p>All the configuration tuples <code>(format, width, height, input?)</code> will contain
+ * AIMAGE_FORMAT_HEIC format as OUTPUT only.</p>
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS = // int32[n*4] (acamera_metadata_enum_android_heic_available_heic_stream_configurations_t)
+ ACAMERA_HEIC_START,
+ /**
+ * <p>This lists the minimum frame duration for each
+ * format/size combination for HEIC output formats.</p>
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>This should correspond to the frame duration when only that
+ * stream is active, with all processing (typically in android.*.mode)
+ * set to either OFF or FAST.</p>
+ * <p>When multiple streams are used in a request, the minimum frame
+ * duration will be max(individual stream min durations).</p>
+ * <p>See ACAMERA_SENSOR_FRAME_DURATION and
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for more details about
+ * calculating the max frame rate.</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ * @see ACAMERA_SENSOR_FRAME_DURATION
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS = // int64[4*n]
+ ACAMERA_HEIC_START + 1,
+ /**
+ * <p>This lists the maximum stall duration for each
+ * output format/size combination for HEIC streams.</p>
+ *
+ * <p>Type: int64[4*n]</p>
+ *
+ * <p>This tag may appear in:
+ * <ul>
+ * <li>ACameraMetadata from ACameraManager_getCameraCharacteristics</li>
+ * </ul></p>
+ *
+ * <p>A stall duration is how much extra time would get added
+ * to the normal minimum frame duration for a repeating request
+ * that has streams with non-zero stall.</p>
+ * <p>This functions similarly to
+ * ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS for HEIC
+ * streams.</p>
+ * <p>All HEIC output stream formats may have a nonzero stall
+ * duration.</p>
+ *
+ * @see ACAMERA_SCALER_AVAILABLE_STALL_DURATIONS
+ */
+ ACAMERA_HEIC_AVAILABLE_HEIC_STALL_DURATIONS = // int64[4*n]
+ ACAMERA_HEIC_START + 2,
+ ACAMERA_HEIC_END,
+
} acamera_metadata_tag_t;
/**
@@ -7608,6 +7700,13 @@
*/
ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_MONOCHROME = 12,
+ /**
+ * <p>The camera device is capable of writing image data into a region of memory
+ * inaccessible to Android userspace or the Android kernel, and only accessible to
+ * trusted execution environments (TEE).</p>
+ */
+ ACAMERA_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA = 13,
+
} acamera_metadata_enum_android_request_available_capabilities_t;
@@ -8366,6 +8465,16 @@
} acamera_metadata_enum_android_distortion_correction_mode_t;
+// ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS
+typedef enum acamera_metadata_enum_acamera_heic_available_heic_stream_configurations {
+ ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_OUTPUT = 0,
+
+ ACAMERA_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS_INPUT = 1,
+
+} acamera_metadata_enum_android_heic_available_heic_stream_configurations_t;
+
+
+
#endif /* __ANDROID_API__ >= 24 */
__END_DECLS
diff --git a/cmds/screenrecord/screenrecord.cpp b/cmds/screenrecord/screenrecord.cpp
index 7803ccc..c361690 100644
--- a/cmds/screenrecord/screenrecord.cpp
+++ b/cmds/screenrecord/screenrecord.cpp
@@ -86,6 +86,7 @@
using android::INFO_FORMAT_CHANGED;
using android::INFO_OUTPUT_BUFFERS_CHANGED;
using android::INVALID_OPERATION;
+using android::NAME_NOT_FOUND;
using android::NO_ERROR;
using android::UNKNOWN_ERROR;
@@ -585,8 +586,12 @@
self->startThreadPool();
// Get main display parameters.
- sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
- ISurfaceComposer::eDisplayIdMain);
+ const sp<IBinder> mainDpy = SurfaceComposerClient::getInternalDisplayToken();
+ if (mainDpy == nullptr) {
+ fprintf(stderr, "ERROR: no display\n");
+ return NAME_NOT_FOUND;
+ }
+
DisplayInfo mainDpyInfo;
err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
if (err != NO_ERROR) {
diff --git a/cmds/stagefright/codec.cpp b/cmds/stagefright/codec.cpp
index a463ec5..e5a4337 100644
--- a/cmds/stagefright/codec.cpp
+++ b/cmds/stagefright/codec.cpp
@@ -411,10 +411,12 @@
composerClient = new SurfaceComposerClient;
CHECK_EQ(composerClient->initCheck(), (status_t)OK);
- sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
- ISurfaceComposer::eDisplayIdMain));
+ const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+ CHECK(display != nullptr);
+
DisplayInfo info;
- SurfaceComposerClient::getDisplayInfo(display, &info);
+ CHECK_EQ(SurfaceComposerClient::getDisplayInfo(display, &info), NO_ERROR);
+
ssize_t displayWidth = info.w;
ssize_t displayHeight = info.h;
diff --git a/cmds/stagefright/mediafilter.cpp b/cmds/stagefright/mediafilter.cpp
index f0ee0e1..2cf6955 100644
--- a/cmds/stagefright/mediafilter.cpp
+++ b/cmds/stagefright/mediafilter.cpp
@@ -748,10 +748,12 @@
composerClient = new SurfaceComposerClient;
CHECK_EQ((status_t)OK, composerClient->initCheck());
- android::sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
- ISurfaceComposer::eDisplayIdMain));
+ const android::sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+ CHECK(display != nullptr);
+
DisplayInfo info;
- SurfaceComposerClient::getDisplayInfo(display, &info);
+ CHECK_EQ(SurfaceComposerClient::getDisplayInfo(display, &info), NO_ERROR);
+
ssize_t displayWidth = info.w;
ssize_t displayHeight = info.h;
diff --git a/cmds/stagefright/stream.cpp b/cmds/stagefright/stream.cpp
index b2f39dc..35bdbc0 100644
--- a/cmds/stagefright/stream.cpp
+++ b/cmds/stagefright/stream.cpp
@@ -318,10 +318,12 @@
sp<SurfaceComposerClient> composerClient = new SurfaceComposerClient;
CHECK_EQ(composerClient->initCheck(), (status_t)OK);
- sp<IBinder> display(SurfaceComposerClient::getBuiltInDisplay(
- ISurfaceComposer::eDisplayIdMain));
+ const sp<IBinder> display = SurfaceComposerClient::getInternalDisplayToken();
+ CHECK(display != nullptr);
+
DisplayInfo info;
- SurfaceComposerClient::getDisplayInfo(display, &info);
+ CHECK_EQ(SurfaceComposerClient::getDisplayInfo(display, &info), NO_ERROR);
+
ssize_t displayWidth = info.w;
ssize_t displayHeight = info.h;
diff --git a/drm/mediacas/plugins/clearkey/ClearKeyCasPlugin.cpp b/drm/mediacas/plugins/clearkey/ClearKeyCasPlugin.cpp
index 27bd631..bf35224 100644
--- a/drm/mediacas/plugins/clearkey/ClearKeyCasPlugin.cpp
+++ b/drm/mediacas/plugins/clearkey/ClearKeyCasPlugin.cpp
@@ -97,13 +97,13 @@
///////////////////////////////////////////////////////////////////////////////
ClearKeyCasPlugin::ClearKeyCasPlugin(
void *appData, CasPluginCallback callback)
- : mCallback(callback), mAppData(appData) {
+ : mCallback(callback), mCallbackExt(NULL), mAppData(appData) {
ALOGV("CTOR");
}
ClearKeyCasPlugin::ClearKeyCasPlugin(
void *appData, CasPluginCallbackExt callback)
- : mCallbackExt(callback), mAppData(appData) {
+ : mCallback(NULL), mCallbackExt(callback), mAppData(appData) {
ALOGV("CTOR");
}
diff --git a/media/codec2/components/aac/C2SoftAacDec.cpp b/media/codec2/components/aac/C2SoftAacDec.cpp
index c7c8442..04dda8f 100644
--- a/media/codec2/components/aac/C2SoftAacDec.cpp
+++ b/media/codec2/components/aac/C2SoftAacDec.cpp
@@ -52,33 +52,26 @@
namespace android {
-class C2SoftAacDec::IntfImpl : public C2InterfaceHelper {
+constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder";
+
+class C2SoftAacDec::IntfImpl : public SimpleInterface<void>::BaseParams {
public:
explicit IntfImpl(const std::shared_ptr<C2ReflectorHelper> &helper)
- : C2InterfaceHelper(helper) {
-
- setDerivedInstance(this);
+ : SimpleInterface<void>::BaseParams(
+ helper,
+ COMPONENT_NAME,
+ C2Component::KIND_DECODER,
+ C2Component::DOMAIN_AUDIO,
+ MEDIA_MIMETYPE_AUDIO_AAC) {
+ noPrivateBuffers();
+ noInputReferences();
+ noOutputReferences();
+ noInputLatency();
+ noTimeStretch();
addParameter(
- DefineParam(mInputFormat, C2_NAME_INPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::input(0u, C2FormatCompressed))
- .build());
-
- addParameter(
- DefineParam(mOutputFormat, C2_NAME_OUTPUT_STREAM_FORMAT_SETTING)
- .withConstValue(new C2StreamFormatConfig::output(0u, C2FormatAudio))
- .build());
-
- addParameter(
- DefineParam(mInputMediaType, C2_NAME_INPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::input>(
- MEDIA_MIMETYPE_AUDIO_AAC))
- .build());
-
- addParameter(
- DefineParam(mOutputMediaType, C2_NAME_OUTPUT_PORT_MIME_SETTING)
- .withConstValue(AllocSharedString<C2PortMimeConfig::output>(
- MEDIA_MIMETYPE_AUDIO_RAW))
+ DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+ .withConstValue(new C2PortActualDelayTuning::output(2u))
.build());
addParameter(
@@ -231,8 +224,6 @@
// TODO Add : C2StreamAacSbrModeTuning
};
-constexpr char COMPONENT_NAME[] = "c2.android.aac.decoder";
-
C2SoftAacDec::C2SoftAacDec(
const char *name,
c2_node_id_t id,
diff --git a/media/codec2/components/avc/C2SoftAvcDec.cpp b/media/codec2/components/avc/C2SoftAvcDec.cpp
index 3e62744..86cd3d8 100644
--- a/media/codec2/components/avc/C2SoftAvcDec.cpp
+++ b/media/codec2/components/avc/C2SoftAvcDec.cpp
@@ -51,6 +51,12 @@
noInputLatency();
noTimeStretch();
+ // TODO: Proper support for reorder depth.
+ addParameter(
+ DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+ .withConstValue(new C2PortActualDelayTuning::output(8u))
+ .build());
+
// TODO: output latency and reordering
addParameter(
@@ -877,6 +883,8 @@
} else if (!hasPicture) {
fillEmptyWork(work);
}
+
+ work->input.buffers.clear();
}
c2_status_t C2SoftAvcDec::drainInternal(
diff --git a/media/codec2/components/base/Android.bp b/media/codec2/components/base/Android.bp
index d02f541..78a444b 100644
--- a/media/codec2/components/base/Android.bp
+++ b/media/codec2/components/base/Android.bp
@@ -74,9 +74,6 @@
"signed-integer-overflow",
],
cfi: true,
- diag: {
- cfi: true,
- },
},
}
@@ -90,9 +87,6 @@
"signed-integer-overflow",
],
cfi: true,
- diag: {
- cfi: true,
- },
},
}
@@ -128,9 +122,6 @@
"signed-integer-overflow",
],
cfi: true,
- diag: {
- cfi: true,
- },
},
ldflags: ["-Wl,-Bsymbolic"],
diff --git a/media/codec2/components/hevc/C2SoftHevcDec.cpp b/media/codec2/components/hevc/C2SoftHevcDec.cpp
index 99892ce..f0d7d88 100644
--- a/media/codec2/components/hevc/C2SoftHevcDec.cpp
+++ b/media/codec2/components/hevc/C2SoftHevcDec.cpp
@@ -51,7 +51,11 @@
noInputLatency();
noTimeStretch();
- // TODO: output latency and reordering
+ // TODO: Proper support for reorder depth.
+ addParameter(
+ DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+ .withConstValue(new C2PortActualDelayTuning::output(8u))
+ .build());
addParameter(
DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
diff --git a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
index 901f5ed..0b89cff 100644
--- a/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
+++ b/media/codec2/components/mpeg4_h263/C2SoftMpeg4Dec.cpp
@@ -60,7 +60,11 @@
noInputLatency();
noTimeStretch();
- // TODO: output latency and reordering
+ // TODO: Proper support for reorder depth.
+ addParameter(
+ DefineParam(mActualOutputDelay, C2_PARAMKEY_OUTPUT_DELAY)
+ .withConstValue(new C2PortActualDelayTuning::output(1u))
+ .build());
addParameter(
DefineParam(mAttrib, C2_PARAMKEY_COMPONENT_ATTRIBUTES)
diff --git a/media/codec2/core/include/C2Config.h b/media/codec2/core/include/C2Config.h
index 0357115..fb6edb6 100644
--- a/media/codec2/core/include/C2Config.h
+++ b/media/codec2/core/include/C2Config.h
@@ -597,6 +597,9 @@
LEVEL_AVC_5, ///< AVC (H.264) Level 5
LEVEL_AVC_5_1, ///< AVC (H.264) Level 5.1
LEVEL_AVC_5_2, ///< AVC (H.264) Level 5.2
+ LEVEL_AVC_6, ///< AVC (H.264) Level 6
+ LEVEL_AVC_6_1, ///< AVC (H.264) Level 6.1
+ LEVEL_AVC_6_2, ///< AVC (H.264) Level 6.2
// HEVC (H.265) tiers and levels
LEVEL_HEVC_MAIN_1 = _C2_PL_HEVC_BASE, ///< HEVC (H.265) Main Tier Level 1
diff --git a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/types.h b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/types.h
index c38e674..b9f3aa8 100644
--- a/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/types.h
+++ b/media/codec2/hidl/1.0/utils/include/codec2/hidl/1.0/types.h
@@ -120,11 +120,9 @@
IComponentStore::ComponentTraits* d,
const C2Component::Traits& s);
-// ComponentTraits -> C2Component::Traits, std::unique_ptr<std::vector<std::string>>
-// Note: The output d is only valid as long as aliasesBuffer remains alive.
+// ComponentTraits -> C2Component::Traits
bool objcpy(
C2Component::Traits* d,
- std::unique_ptr<std::vector<std::string>>* aliasesBuffer,
const IComponentStore::ComponentTraits& s);
// C2StructDescriptor -> StructDescriptor
diff --git a/media/codec2/hidl/1.0/utils/types.cpp b/media/codec2/hidl/1.0/utils/types.cpp
index caed839..02cdc23 100644
--- a/media/codec2/hidl/1.0/utils/types.cpp
+++ b/media/codec2/hidl/1.0/utils/types.cpp
@@ -351,7 +351,6 @@
// ComponentTraits -> C2Component::Traits, std::unique_ptr<std::vector<std::string>>
bool objcpy(
C2Component::Traits* d,
- std::unique_ptr<std::vector<std::string>>* aliasesBuffer,
const IComponentStore::ComponentTraits& s) {
d->name = s.name.c_str();
@@ -394,15 +393,9 @@
d->rank = static_cast<C2Component::rank_t>(s.rank);
d->mediaType = s.mediaType.c_str();
-
- // aliasesBuffer must not be resized after this.
- *aliasesBuffer = std::make_unique<std::vector<std::string>>(
- s.aliases.size());
- (*aliasesBuffer)->resize(s.aliases.size());
- std::vector<C2StringLiteral> dAliases(s.aliases.size());
+ d->aliases.resize(s.aliases.size());
for (size_t i = 0; i < s.aliases.size(); ++i) {
- (**aliasesBuffer)[i] = s.aliases[i].c_str();
- d->aliases[i] = (**aliasesBuffer)[i].c_str();
+ d->aliases[i] = s.aliases[i];
}
return true;
}
diff --git a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
index d1557cb..fca2902 100644
--- a/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
+++ b/media/codec2/hidl/1.0/vts/functional/common/media_c2_hidl_test_common.h
@@ -55,12 +55,10 @@
: callBack(fn) {}
virtual void onWorkDone(
const std::weak_ptr<android::Codec2Client::Component>& comp,
- std::list<std::unique_ptr<C2Work>>& workItems,
- size_t numDiscardedInputBuffers) override {
+ std::list<std::unique_ptr<C2Work>>& workItems) override {
/* TODO */
ALOGD("onWorkDone called");
(void)comp;
- (void)numDiscardedInputBuffers;
if (callBack) callBack(workItems);
}
@@ -89,9 +87,10 @@
}
virtual void onInputBufferDone(
- const std::shared_ptr<C2Buffer>& buffer) override {
+ uint64_t frameIndex, size_t arrayIndex) override {
/* TODO */
- (void)buffer;
+ (void)frameIndex;
+ (void)arrayIndex;
}
virtual void onFrameRendered(
diff --git a/media/codec2/hidl/client/client.cpp b/media/codec2/hidl/client/client.cpp
index 5b52fcd..3808be5 100644
--- a/media/codec2/hidl/client/client.cpp
+++ b/media/codec2/hidl/client/client.cpp
@@ -344,17 +344,13 @@
return Void();
}
// release input buffers potentially held by the component from queue
- size_t numDiscardedInputBuffers = 0;
std::shared_ptr<Codec2Client::Component> strongComponent =
component.lock();
if (strongComponent) {
- numDiscardedInputBuffers =
- strongComponent->handleOnWorkDone(workItems);
+ strongComponent->handleOnWorkDone(workItems);
}
if (std::shared_ptr<Codec2Client::Listener> listener = base.lock()) {
- listener->onWorkDone(component,
- workItems,
- numDiscardedInputBuffers);
+ listener->onWorkDone(component, workItems);
} else {
LOG(DEBUG) << "onWorkDone -- listener died.";
}
@@ -418,26 +414,15 @@
LOG(DEBUG) << "onInputBuffersReleased -- listener died.";
return Void();
}
- std::shared_ptr<Codec2Client::Component> strongComponent =
- component.lock();
- if (!strongComponent) {
- LOG(DEBUG) << "onInputBuffersReleased -- component died.";
- return Void();
- }
for (const InputBuffer& inputBuffer : inputBuffers) {
- std::shared_ptr<C2Buffer> buffer =
- strongComponent->freeInputBuffer(
- inputBuffer.frameIndex,
- inputBuffer.arrayIndex);
LOG(VERBOSE) << "onInputBuffersReleased --"
" received death notification of"
" input buffer:"
" frameIndex = " << inputBuffer.frameIndex
<< ", bufferIndex = " << inputBuffer.arrayIndex
<< ".";
- if (buffer) {
- listener->onInputBufferDone(buffer);
- }
+ listener->onInputBufferDone(
+ inputBuffer.frameIndex, inputBuffer.arrayIndex);
}
return Void();
}
@@ -579,9 +564,8 @@
return;
}
mTraitsList.resize(t.size());
- mAliasesBuffer.resize(t.size());
for (size_t i = 0; i < t.size(); ++i) {
- if (!objcpy(&mTraitsList[i], &mAliasesBuffer[i], t[i])) {
+ if (!objcpy(&mTraitsList[i], t[i])) {
LOG(ERROR) << "listComponents -- corrupted output.";
return;
}
@@ -918,43 +902,8 @@
return static_cast<c2_status_t>(static_cast<Status>(transResult));
}
-size_t Codec2Client::Component::handleOnWorkDone(
+void Codec2Client::Component::handleOnWorkDone(
const std::list<std::unique_ptr<C2Work>> &workItems) {
- // Input buffers' lifetime management
- std::vector<uint64_t> inputDone;
- for (const std::unique_ptr<C2Work> &work : workItems) {
- if (work) {
- if (work->worklets.empty()
- || !work->worklets.back()
- || (work->worklets.back()->output.flags &
- C2FrameData::FLAG_INCOMPLETE) == 0) {
- // input is complete
- inputDone.emplace_back(work->input.ordinal.frameIndex.peeku());
- }
- }
- }
-
- size_t numDiscardedInputBuffers = 0;
- {
- std::lock_guard<std::mutex> lock(mInputBuffersMutex);
- for (uint64_t inputIndex : inputDone) {
- auto it = mInputBuffers.find(inputIndex);
- if (it == mInputBuffers.end()) {
- LOG(VERBOSE) << "onWorkDone -- returned consumed/unknown "
- "input frame: index = "
- << inputIndex << ".";
- } else {
- LOG(VERBOSE) << "onWorkDone -- processed input frame: "
- << inputIndex
- << " (containing " << it->second.size()
- << " buffers).";
- mInputBuffers.erase(it);
- mInputBufferCount.erase(inputIndex);
- ++numDiscardedInputBuffers;
- }
- }
- }
-
// Output bufferqueue-based blocks' lifetime management
mOutputBufferQueueMutex.lock();
sp<IGraphicBufferProducer> igbp = mOutputIgbp;
@@ -965,72 +914,10 @@
if (igbp) {
holdBufferQueueBlocks(workItems, igbp, bqId, generation);
}
- return numDiscardedInputBuffers;
-}
-
-std::shared_ptr<C2Buffer> Codec2Client::Component::freeInputBuffer(
- uint64_t frameIndex,
- size_t bufferIndex) {
- std::shared_ptr<C2Buffer> buffer;
- std::lock_guard<std::mutex> lock(mInputBuffersMutex);
- auto it = mInputBuffers.find(frameIndex);
- if (it == mInputBuffers.end()) {
- LOG(INFO) << "freeInputBuffer -- Unrecognized input frame index "
- << frameIndex << ".";
- return nullptr;
- }
- if (bufferIndex >= it->second.size()) {
- LOG(INFO) << "freeInputBuffer -- Input buffer number " << bufferIndex
- << " is not valid in input with frame index " << frameIndex
- << ".";
- return nullptr;
- }
- buffer = it->second[bufferIndex];
- if (!buffer) {
- LOG(INFO) << "freeInputBuffer -- Input buffer number " << bufferIndex
- << " in input with frame index " << frameIndex
- << " has already been freed.";
- return nullptr;
- }
- it->second[bufferIndex] = nullptr;
- if (--mInputBufferCount[frameIndex] == 0) {
- mInputBuffers.erase(it);
- mInputBufferCount.erase(frameIndex);
- }
- return buffer;
}
c2_status_t Codec2Client::Component::queue(
std::list<std::unique_ptr<C2Work>>* const items) {
- // remember input buffers queued to hold reference to them
- {
- std::lock_guard<std::mutex> lock(mInputBuffersMutex);
- for (const std::unique_ptr<C2Work> &work : *items) {
- if (!work) {
- continue;
- }
- if (work->input.buffers.size() == 0) {
- continue;
- }
-
- uint64_t inputIndex = work->input.ordinal.frameIndex.peeku();
- auto res = mInputBuffers.emplace(inputIndex, work->input.buffers);
- if (!res.second) {
- // TODO: append? - for now we are replacing
- res.first->second = work->input.buffers;
- LOG(INFO) << "queue -- duplicate input frame index: "
- << inputIndex
- << ". Discarding the old input frame...";
- }
- mInputBufferCount[inputIndex] = work->input.buffers.size();
- LOG(VERBOSE) << "queue -- queuing input frame: "
- << "index = " << inputIndex
- << ", number of buffers = "
- << work->input.buffers.size()
- << ".";
- }
- }
-
WorkBundle workBundle;
if (!objcpy(&workBundle, *items, &mBufferPoolSender)) {
LOG(ERROR) << "queue -- bad input.";
@@ -1088,24 +975,6 @@
}
}
- // Input buffers' lifetime management
- for (uint64_t flushedIndex : flushedIndices) {
- std::lock_guard<std::mutex> lock(mInputBuffersMutex);
- auto it = mInputBuffers.find(flushedIndex);
- if (it == mInputBuffers.end()) {
- LOG(VERBOSE) << "flush -- returned consumed/unknown input frame: "
- "index = " << flushedIndex << ".";
- } else {
- LOG(VERBOSE) << "flush -- returned unprocessed input frame: "
- "index = " << flushedIndex
- << ", number of buffers = "
- << mInputBufferCount[flushedIndex]
- << ".";
- mInputBuffers.erase(it);
- mInputBufferCount.erase(flushedIndex);
- }
- }
-
// Output bufferqueue-based blocks' lifetime management
mOutputBufferQueueMutex.lock();
sp<IGraphicBufferProducer> igbp = mOutputIgbp;
@@ -1160,10 +1029,6 @@
if (status != C2_OK) {
LOG(DEBUG) << "stop -- call failed: " << status << ".";
}
- mInputBuffersMutex.lock();
- mInputBuffers.clear();
- mInputBufferCount.clear();
- mInputBuffersMutex.unlock();
return status;
}
@@ -1178,10 +1043,6 @@
if (status != C2_OK) {
LOG(DEBUG) << "reset -- call failed: " << status << ".";
}
- mInputBuffersMutex.lock();
- mInputBuffers.clear();
- mInputBufferCount.clear();
- mInputBuffersMutex.unlock();
return status;
}
@@ -1196,10 +1057,6 @@
if (status != C2_OK) {
LOG(DEBUG) << "release -- call failed: " << status << ".";
}
- mInputBuffersMutex.lock();
- mInputBuffers.clear();
- mInputBufferCount.clear();
- mInputBuffersMutex.unlock();
return status;
}
diff --git a/media/codec2/hidl/client/include/codec2/hidl/client.h b/media/codec2/hidl/client/include/codec2/hidl/client.h
index f320ef3..478ce6e 100644
--- a/media/codec2/hidl/client/include/codec2/hidl/client.h
+++ b/media/codec2/hidl/client/include/codec2/hidl/client.h
@@ -232,8 +232,6 @@
mutable bool mListed;
std::string mServiceName;
mutable std::vector<C2Component::Traits> mTraitsList;
- mutable std::vector<std::unique_ptr<std::vector<std::string>>>
- mAliasesBuffer;
sp<::android::hardware::media::bufferpool::V2_0::IClientManager>
mHostPoolManager;
@@ -252,16 +250,9 @@
struct Codec2Client::Listener {
// This is called when the component produces some output.
- //
- // numDiscardedInputBuffers is the number of input buffers contained in
- // workItems that have just become unused. Note that workItems may contain
- // more input buffers than numDiscardedInputBuffers because buffers that
- // have been previously reported by onInputBufferDone() are not counted
- // towards numDiscardedInputBuffers, but may still show up in workItems.
virtual void onWorkDone(
const std::weak_ptr<Component>& comp,
- std::list<std::unique_ptr<C2Work>>& workItems,
- size_t numDiscardedInputBuffers) = 0;
+ std::list<std::unique_ptr<C2Work>>& workItems) = 0;
// This is called when the component goes into a tripped state.
virtual void onTripped(
@@ -283,7 +274,7 @@
// Input buffers that have been returned by onWorkDone() or flush() will not
// trigger a call to this function.
virtual void onInputBufferDone(
- const std::shared_ptr<C2Buffer>& buffer) = 0;
+ uint64_t frameIndex, size_t arrayIndex) = 0;
// This is called when the component becomes aware of a frame being
// rendered.
@@ -385,24 +376,6 @@
protected:
sp<Base> mBase;
- // Mutex for mInputBuffers and mInputBufferCount.
- mutable std::mutex mInputBuffersMutex;
-
- // Map: frameIndex -> vector of bufferIndices
- //
- // mInputBuffers[frameIndex][bufferIndex] may be null if the buffer in that
- // slot has been freed.
- mutable std::map<uint64_t, std::vector<std::shared_ptr<C2Buffer>>>
- mInputBuffers;
-
- // Map: frameIndex -> number of bufferIndices that have not been freed
- //
- // mInputBufferCount[frameIndex] keeps track of the number of non-null
- // elements in mInputBuffers[frameIndex]. When mInputBufferCount[frameIndex]
- // decreases to 0, frameIndex can be removed from both mInputBuffers and
- // mInputBufferCount.
- mutable std::map<uint64_t, size_t> mInputBufferCount;
-
::android::hardware::media::c2::V1_0::utils::DefaultBufferPoolSender
mBufferPoolSender;
@@ -419,10 +392,7 @@
friend struct Codec2Client;
struct HidlListener;
- // Return the number of input buffers that should be discarded.
- size_t handleOnWorkDone(const std::list<std::unique_ptr<C2Work>> &workItems);
- // Remove an input buffer from mInputBuffers and return it.
- std::shared_ptr<C2Buffer> freeInputBuffer(uint64_t frameIndex, size_t bufferIndex);
+ void handleOnWorkDone(const std::list<std::unique_ptr<C2Work>> &workItems);
};
diff --git a/media/codec2/sfplugin/Android.bp b/media/codec2/sfplugin/Android.bp
index 2870d39..a212651 100644
--- a/media/codec2/sfplugin/Android.bp
+++ b/media/codec2/sfplugin/Android.bp
@@ -8,6 +8,7 @@
"CCodecConfig.cpp",
"Codec2Buffer.cpp",
"Codec2InfoBuilder.cpp",
+ "PipelineWatcher.cpp",
"ReflectedParamUpdater.cpp",
"SkipCutBuffer.cpp",
],
diff --git a/media/codec2/sfplugin/CCodec.cpp b/media/codec2/sfplugin/CCodec.cpp
index 10263de..bc22045 100644
--- a/media/codec2/sfplugin/CCodec.cpp
+++ b/media/codec2/sfplugin/CCodec.cpp
@@ -448,14 +448,13 @@
virtual void onWorkDone(
const std::weak_ptr<Codec2Client::Component>& component,
- std::list<std::unique_ptr<C2Work>>& workItems,
- size_t numDiscardedInputBuffers) override {
+ std::list<std::unique_ptr<C2Work>>& workItems) override {
(void)component;
sp<CCodec> codec(mCodec.promote());
if (!codec) {
return;
}
- codec->onWorkDone(workItems, numDiscardedInputBuffers);
+ codec->onWorkDone(workItems);
}
virtual void onTripped(
@@ -504,10 +503,10 @@
}
virtual void onInputBufferDone(
- const std::shared_ptr<C2Buffer>& buffer) override {
+ uint64_t frameIndex, size_t arrayIndex) override {
sp<CCodec> codec(mCodec.promote());
if (codec) {
- codec->onInputBufferDone(buffer);
+ codec->onInputBufferDone(frameIndex, arrayIndex);
}
}
@@ -531,10 +530,6 @@
{RenderedFrameInfo(mediaTimeUs, renderTimeNs)});
}
- void onWorkQueued(bool eos) override {
- mCodec->onWorkQueued(eos);
- }
-
void onOutputBuffersChanged() override {
mCodec->mCallback->onOutputBuffersChanged();
}
@@ -546,8 +541,7 @@
// CCodec
CCodec::CCodec()
- : mChannel(new CCodecBufferChannel(std::make_shared<CCodecCallbackImpl>(this))),
- mQueuedWorkCount(0) {
+ : mChannel(new CCodecBufferChannel(std::make_shared<CCodecCallbackImpl>(this))) {
}
CCodec::~CCodec() {
@@ -943,6 +937,47 @@
(new AMessage(kWhatCreateInputSurface, this))->post();
}
+sp<PersistentSurface> CCodec::CreateOmxInputSurface() {
+ using namespace android::hardware::media::omx::V1_0;
+ using namespace android::hardware::media::omx::V1_0::utils;
+ using namespace android::hardware::graphics::bufferqueue::V1_0::utils;
+ typedef android::hardware::media::omx::V1_0::Status OmxStatus;
+ android::sp<IOmx> omx = IOmx::getService();
+ typedef android::hardware::graphics::bufferqueue::V1_0::
+ IGraphicBufferProducer HGraphicBufferProducer;
+ typedef android::hardware::media::omx::V1_0::
+ IGraphicBufferSource HGraphicBufferSource;
+ OmxStatus s;
+ android::sp<HGraphicBufferProducer> gbp;
+ android::sp<HGraphicBufferSource> gbs;
+ android::Return<void> transStatus = omx->createInputSurface(
+ [&s, &gbp, &gbs](
+ OmxStatus status,
+ const android::sp<HGraphicBufferProducer>& producer,
+ const android::sp<HGraphicBufferSource>& source) {
+ s = status;
+ gbp = producer;
+ gbs = source;
+ });
+ if (transStatus.isOk() && s == OmxStatus::OK) {
+ return new PersistentSurface(
+ new H2BGraphicBufferProducer(gbp),
+ sp<::android::IGraphicBufferSource>(new LWGraphicBufferSource(gbs)));
+ }
+
+ return nullptr;
+}
+
+sp<PersistentSurface> CCodec::CreateCompatibleInputSurface() {
+ sp<PersistentSurface> surface(CreateInputSurface());
+
+ if (surface == nullptr) {
+ surface = CreateOmxInputSurface();
+ }
+
+ return surface;
+}
+
void CCodec::createInputSurface() {
status_t err;
sp<IGraphicBufferProducer> bufferProducer;
@@ -955,7 +990,7 @@
outputFormat = config->mOutputFormat;
}
- std::shared_ptr<PersistentSurface> persistentSurface(CreateInputSurface());
+ sp<PersistentSurface> persistentSurface = CreateCompatibleInputSurface();
if (persistentSurface->getHidlTarget()) {
sp<IInputSurface> hidlInputSurface = IInputSurface::castFrom(
@@ -1343,7 +1378,6 @@
}
mChannel->flush(flushedWork);
- subQueuedWorkCount(flushedWork.size());
{
Mutexed<State>::Locked state(mState);
@@ -1465,28 +1499,16 @@
config->setParameters(comp, params, C2_MAY_BLOCK);
}
-void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems,
- size_t numDiscardedInputBuffers) {
+void CCodec::onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems) {
if (!workItems.empty()) {
- {
- Mutexed<std::list<size_t>>::Locked numDiscardedInputBuffersQueue(
- mNumDiscardedInputBuffersQueue);
- numDiscardedInputBuffersQueue->insert(
- numDiscardedInputBuffersQueue->end(),
- workItems.size() - 1, 0);
- numDiscardedInputBuffersQueue->emplace_back(
- numDiscardedInputBuffers);
- }
- {
- Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
- queue->splice(queue->end(), workItems);
- }
+ Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
+ queue->splice(queue->end(), workItems);
}
(new AMessage(kWhatWorkDone, this))->post();
}
-void CCodec::onInputBufferDone(const std::shared_ptr<C2Buffer>& buffer) {
- mChannel->onInputBufferDone(buffer);
+void CCodec::onInputBufferDone(uint64_t frameIndex, size_t arrayIndex) {
+ mChannel->onInputBufferDone(frameIndex, arrayIndex);
}
void CCodec::onMessageReceived(const sp<AMessage> &msg) {
@@ -1512,7 +1534,6 @@
case kWhatStart: {
// C2Component::start() should return within 500ms.
setDeadline(now, 550ms, "start");
- mQueuedWorkCount = 0;
start();
break;
}
@@ -1520,10 +1541,6 @@
// C2Component::stop() should return within 500ms.
setDeadline(now, 550ms, "stop");
stop();
-
- mQueuedWorkCount = 0;
- Mutexed<NamedTimePoint>::Locked deadline(mQueueDeadline);
- deadline->set(TimePoint::max(), "none");
break;
}
case kWhatFlush: {
@@ -1549,7 +1566,6 @@
}
case kWhatWorkDone: {
std::unique_ptr<C2Work> work;
- size_t numDiscardedInputBuffers;
bool shouldPost = false;
{
Mutexed<std::list<std::unique_ptr<C2Work>>>::Locked queue(mWorkDoneQueue);
@@ -1560,24 +1576,10 @@
queue->pop_front();
shouldPost = !queue->empty();
}
- {
- Mutexed<std::list<size_t>>::Locked numDiscardedInputBuffersQueue(
- mNumDiscardedInputBuffersQueue);
- if (numDiscardedInputBuffersQueue->empty()) {
- numDiscardedInputBuffers = 0;
- } else {
- numDiscardedInputBuffers = numDiscardedInputBuffersQueue->front();
- numDiscardedInputBuffersQueue->pop_front();
- }
- }
if (shouldPost) {
(new AMessage(kWhatWorkDone, this))->post();
}
- if (work->worklets.empty()
- || !(work->worklets.front()->output.flags & C2FrameData::FLAG_INCOMPLETE)) {
- subQueuedWorkCount(1);
- }
// handle configuration changes in work done
Mutexed<Config>::Locked config(mConfig);
bool changed = false;
@@ -1641,8 +1643,7 @@
}
mChannel->onWorkDone(
std::move(work), changed ? config->mOutputFormat : nullptr,
- initData.hasChanged() ? initData.update().get() : nullptr,
- numDiscardedInputBuffers);
+ initData.hasChanged() ? initData.update().get() : nullptr);
break;
}
case kWhatWatch: {
@@ -1669,17 +1670,26 @@
void CCodec::initiateReleaseIfStuck() {
std::string name;
bool pendingDeadline = false;
- for (Mutexed<NamedTimePoint> *deadlinePtr : { &mDeadline, &mQueueDeadline, &mEosDeadline }) {
- Mutexed<NamedTimePoint>::Locked deadline(*deadlinePtr);
+ {
+ Mutexed<NamedTimePoint>::Locked deadline(mDeadline);
if (deadline->get() < std::chrono::steady_clock::now()) {
name = deadline->getName();
- break;
}
if (deadline->get() != TimePoint::max()) {
pendingDeadline = true;
}
}
if (name.empty()) {
+ constexpr std::chrono::steady_clock::duration kWorkDurationThreshold = 3s;
+ std::chrono::steady_clock::duration elapsed = mChannel->elapsed();
+ if (elapsed >= kWorkDurationThreshold) {
+ name = "queue";
+ }
+ if (elapsed > 0s) {
+ pendingDeadline = true;
+ }
+ }
+ if (name.empty()) {
// We're not stuck.
if (pendingDeadline) {
// If we are not stuck yet but still has deadline coming up,
@@ -1694,79 +1704,23 @@
mCallback->onError(UNKNOWN_ERROR, ACTION_CODE_FATAL);
}
-void CCodec::onWorkQueued(bool eos) {
- ALOGV("queued work count +1 from %d", mQueuedWorkCount.load());
- int32_t count = ++mQueuedWorkCount;
- if (eos) {
- CCodecWatchdog::getInstance()->watch(this);
- Mutexed<NamedTimePoint>::Locked deadline(mEosDeadline);
- deadline->set(std::chrono::steady_clock::now() + 3s, "eos");
- }
- // TODO: query and use input/pipeline/output delay combined
- if (count >= 4) {
- CCodecWatchdog::getInstance()->watch(this);
- Mutexed<NamedTimePoint>::Locked deadline(mQueueDeadline);
- deadline->set(std::chrono::steady_clock::now() + 3s, "queue");
- }
-}
-
-void CCodec::subQueuedWorkCount(uint32_t count) {
- ALOGV("queued work count -%u from %d", count, mQueuedWorkCount.load());
- int32_t currentCount = (mQueuedWorkCount -= count);
- if (currentCount == 0) {
- Mutexed<NamedTimePoint>::Locked deadline(mEosDeadline);
- deadline->set(TimePoint::max(), "none");
- }
- Mutexed<NamedTimePoint>::Locked deadline(mQueueDeadline);
- deadline->set(TimePoint::max(), "none");
-}
-
} // namespace android
extern "C" android::CodecBase *CreateCodec() {
return new android::CCodec;
}
+// Create Codec 2.0 input surface
extern "C" android::PersistentSurface *CreateInputSurface() {
// Attempt to create a Codec2's input surface.
std::shared_ptr<android::Codec2Client::InputSurface> inputSurface =
android::Codec2Client::CreateInputSurface();
- if (inputSurface) {
- return new android::PersistentSurface(
- inputSurface->getGraphicBufferProducer(),
- static_cast<android::sp<android::hidl::base::V1_0::IBase>>(
- inputSurface->getHalInterface()));
+ if (!inputSurface) {
+ return nullptr;
}
-
- // Fall back to OMX.
- using namespace android::hardware::media::omx::V1_0;
- using namespace android::hardware::media::omx::V1_0::utils;
- using namespace android::hardware::graphics::bufferqueue::V1_0::utils;
- typedef android::hardware::media::omx::V1_0::Status OmxStatus;
- android::sp<IOmx> omx = IOmx::getService();
- typedef android::hardware::graphics::bufferqueue::V1_0::
- IGraphicBufferProducer HGraphicBufferProducer;
- typedef android::hardware::media::omx::V1_0::
- IGraphicBufferSource HGraphicBufferSource;
- OmxStatus s;
- android::sp<HGraphicBufferProducer> gbp;
- android::sp<HGraphicBufferSource> gbs;
- android::Return<void> transStatus = omx->createInputSurface(
- [&s, &gbp, &gbs](
- OmxStatus status,
- const android::sp<HGraphicBufferProducer>& producer,
- const android::sp<HGraphicBufferSource>& source) {
- s = status;
- gbp = producer;
- gbs = source;
- });
- if (transStatus.isOk() && s == OmxStatus::OK) {
- return new android::PersistentSurface(
- new H2BGraphicBufferProducer(gbp),
- sp<::android::IGraphicBufferSource>(
- new LWGraphicBufferSource(gbs)));
- }
-
- return nullptr;
+ return new android::PersistentSurface(
+ inputSurface->getGraphicBufferProducer(),
+ static_cast<android::sp<android::hidl::base::V1_0::IBase>>(
+ inputSurface->getHalInterface()));
}
diff --git a/media/codec2/sfplugin/CCodec.h b/media/codec2/sfplugin/CCodec.h
index 78b009e..ba5f5f3 100644
--- a/media/codec2/sfplugin/CCodec.h
+++ b/media/codec2/sfplugin/CCodec.h
@@ -66,9 +66,8 @@
virtual void signalRequestIDRFrame() override;
void initiateReleaseIfStuck();
- void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems,
- size_t numDiscardedInputBuffers);
- void onInputBufferDone(const std::shared_ptr<C2Buffer>& buffer);
+ void onWorkDone(std::list<std::unique_ptr<C2Work>> &workItems);
+ void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
protected:
virtual ~CCodec();
@@ -76,7 +75,7 @@
virtual void onMessageReceived(const sp<AMessage> &msg) override;
private:
- typedef std::chrono::time_point<std::chrono::steady_clock> TimePoint;
+ typedef std::chrono::steady_clock::time_point TimePoint;
status_t tryAndReportOnError(std::function<status_t()> job);
@@ -90,6 +89,16 @@
void flush();
void release(bool sendCallback);
+ /**
+ * Creates an input surface for the current device configuration compatible with CCodec.
+ * This could be backed by the C2 HAL or the OMX HAL.
+ */
+ static sp<PersistentSurface> CreateCompatibleInputSurface();
+
+ /// Creates an input surface to the OMX HAL
+ static sp<PersistentSurface> CreateOmxInputSurface();
+
+ /// handle a create input surface call
void createInputSurface();
void setInputSurface(const sp<PersistentSurface> &surface);
status_t setupInputSurface(const std::shared_ptr<InputSurfaceWrapper> &surface);
@@ -100,9 +109,6 @@
const std::chrono::milliseconds &timeout,
const char *name);
- void onWorkQueued(bool eos);
- void subQueuedWorkCount(uint32_t count);
-
enum {
kWhatAllocate,
kWhatConfigure,
@@ -167,13 +173,9 @@
struct ClientListener;
Mutexed<NamedTimePoint> mDeadline;
- std::atomic_int32_t mQueuedWorkCount;
- Mutexed<NamedTimePoint> mQueueDeadline;
- Mutexed<NamedTimePoint> mEosDeadline;
typedef CCodecConfig Config;
Mutexed<Config> mConfig;
Mutexed<std::list<std::unique_ptr<C2Work>>> mWorkDoneQueue;
- Mutexed<std::list<size_t>> mNumDiscardedInputBuffersQueue;
friend class CCodecCallbackImpl;
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.cpp b/media/codec2/sfplugin/CCodecBufferChannel.cpp
index 8e6a3f8..6842fa5 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.cpp
+++ b/media/codec2/sfplugin/CCodecBufferChannel.cpp
@@ -94,6 +94,11 @@
*/
virtual void getArray(Vector<sp<MediaCodecBuffer>> *) const {}
+ /**
+ * Return number of buffers the client owns.
+ */
+ virtual size_t numClientBuffers() const = 0;
+
protected:
std::string mComponentName; ///< name of component for debugging
std::string mChannelName; ///< name of channel for debugging
@@ -508,6 +513,14 @@
mBuffers.clear();
}
+ size_t numClientBuffers() const {
+ return std::count_if(
+ mBuffers.begin(), mBuffers.end(),
+ [](const Entry &entry) {
+ return (entry.clientBuffer != nullptr);
+ });
+ }
+
private:
friend class BuffersArrayImpl;
@@ -693,6 +706,14 @@
}
}
+ size_t numClientBuffers() const {
+ return std::count_if(
+ mBuffers.begin(), mBuffers.end(),
+ [](const Entry &entry) {
+ return entry.ownedByClient;
+ });
+ }
+
private:
std::string mImplName; ///< name for debugging
const char *mName; ///< C-string version of name
@@ -756,6 +777,10 @@
mImpl.flush();
}
+ size_t numClientBuffers() const final {
+ return mImpl.numClientBuffers();
+ }
+
private:
BuffersArrayImpl mImpl;
};
@@ -823,6 +848,10 @@
return std::move(array);
}
+ size_t numClientBuffers() const final {
+ return mImpl.numClientBuffers();
+ }
+
virtual sp<Codec2Buffer> alloc(size_t size) {
C2MemoryUsage usage = { C2MemoryUsage::CPU_READ, C2MemoryUsage::CPU_WRITE };
std::shared_ptr<C2LinearBlock> block;
@@ -967,6 +996,10 @@
return std::move(array);
}
+ size_t numClientBuffers() const final {
+ return mImpl.numClientBuffers();
+ }
+
private:
FlexBuffersImpl mImpl;
std::shared_ptr<C2AllocatorStore> mStore;
@@ -1030,6 +1063,10 @@
return std::move(array);
}
+ size_t numClientBuffers() const final {
+ return mImpl.numClientBuffers();
+ }
+
private:
FlexBuffersImpl mImpl;
std::shared_ptr<LocalBufferPool> mLocalBufferPool;
@@ -1065,6 +1102,10 @@
void getArray(Vector<sp<MediaCodecBuffer>> *array) const final {
array->clear();
}
+
+ size_t numClientBuffers() const final {
+ return 0u;
+ }
};
class OutputBuffersArray : public CCodecBufferChannel::OutputBuffers {
@@ -1185,6 +1226,10 @@
mImpl.realloc(alloc);
}
+ size_t numClientBuffers() const final {
+ return mImpl.numClientBuffers();
+ }
+
private:
BuffersArrayImpl mImpl;
};
@@ -1246,6 +1291,10 @@
return std::move(array);
}
+ size_t numClientBuffers() const final {
+ return mImpl.numClientBuffers();
+ }
+
/**
* Return an appropriate Codec2Buffer object for the type of buffers.
*
@@ -1422,90 +1471,6 @@
count->value = -1;
}
-// CCodecBufferChannel::PipelineCapacity
-
-CCodecBufferChannel::PipelineCapacity::PipelineCapacity()
- : input(0), component(0),
- mName("<UNKNOWN COMPONENT>") {
-}
-
-void CCodecBufferChannel::PipelineCapacity::initialize(
- int newInput,
- int newComponent,
- const char* newName,
- const char* callerTag) {
- input.store(newInput, std::memory_order_relaxed);
- component.store(newComponent, std::memory_order_relaxed);
- mName = newName;
- ALOGV("[%s] %s -- PipelineCapacity::initialize(): "
- "pipeline availability initialized ==> "
- "input = %d, component = %d",
- mName, callerTag ? callerTag : "*",
- newInput, newComponent);
-}
-
-bool CCodecBufferChannel::PipelineCapacity::allocate(const char* callerTag) {
- int prevInput = input.fetch_sub(1, std::memory_order_relaxed);
- int prevComponent = component.fetch_sub(1, std::memory_order_relaxed);
- if (prevInput > 0 && prevComponent > 0) {
- ALOGV("[%s] %s -- PipelineCapacity::allocate() returns true: "
- "pipeline availability -1 all ==> "
- "input = %d, component = %d",
- mName, callerTag ? callerTag : "*",
- prevInput - 1,
- prevComponent - 1);
- return true;
- }
- input.fetch_add(1, std::memory_order_relaxed);
- component.fetch_add(1, std::memory_order_relaxed);
- ALOGV("[%s] %s -- PipelineCapacity::allocate() returns false: "
- "pipeline availability unchanged ==> "
- "input = %d, component = %d",
- mName, callerTag ? callerTag : "*",
- prevInput,
- prevComponent);
- return false;
-}
-
-void CCodecBufferChannel::PipelineCapacity::free(const char* callerTag) {
- int prevInput = input.fetch_add(1, std::memory_order_relaxed);
- int prevComponent = component.fetch_add(1, std::memory_order_relaxed);
- ALOGV("[%s] %s -- PipelineCapacity::free(): "
- "pipeline availability +1 all ==> "
- "input = %d, component = %d",
- mName, callerTag ? callerTag : "*",
- prevInput + 1,
- prevComponent + 1);
-}
-
-int CCodecBufferChannel::PipelineCapacity::freeInputSlots(
- size_t numDiscardedInputBuffers,
- const char* callerTag) {
- int prevInput = input.fetch_add(numDiscardedInputBuffers,
- std::memory_order_relaxed);
- ALOGV("[%s] %s -- PipelineCapacity::freeInputSlots(%zu): "
- "pipeline availability +%zu input ==> "
- "input = %d, component = %d",
- mName, callerTag ? callerTag : "*",
- numDiscardedInputBuffers,
- numDiscardedInputBuffers,
- prevInput + static_cast<int>(numDiscardedInputBuffers),
- component.load(std::memory_order_relaxed));
- return prevInput + static_cast<int>(numDiscardedInputBuffers);
-}
-
-int CCodecBufferChannel::PipelineCapacity::freeComponentSlot(
- const char* callerTag) {
- int prevComponent = component.fetch_add(1, std::memory_order_relaxed);
- ALOGV("[%s] %s -- PipelineCapacity::freeComponentSlot(): "
- "pipeline availability +1 component ==> "
- "input = %d, component = %d",
- mName, callerTag ? callerTag : "*",
- input.load(std::memory_order_relaxed),
- prevComponent + 1);
- return prevComponent + 1;
-}
-
// CCodecBufferChannel::ReorderStash
CCodecBufferChannel::ReorderStash::ReorderStash() {
@@ -1595,7 +1560,6 @@
mFrameIndex(0u),
mFirstValidFrameIndex(0u),
mMetaMode(MODE_NONE),
- mAvailablePipelineCapacity(),
mInputMetEos(false) {
Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
buffers->reset(new DummyInputBuffers(""));
@@ -1658,6 +1622,9 @@
work->input.ordinal.customOrdinal = timeUs;
work->input.buffers.clear();
+ uint64_t queuedFrameIndex = work->input.ordinal.frameIndex.peeku();
+ std::vector<std::shared_ptr<C2Buffer>> queuedBuffers;
+
if (buffer->size() > 0u) {
Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
std::shared_ptr<C2Buffer> c2buffer;
@@ -1665,11 +1632,9 @@
return -ENOENT;
}
work->input.buffers.push_back(c2buffer);
- } else {
- mAvailablePipelineCapacity.freeInputSlots(1, "queueInputBufferInternal");
- if (eos) {
- flags |= C2FrameData::FLAG_END_OF_STREAM;
- }
+ queuedBuffers.push_back(c2buffer);
+ } else if (eos) {
+ flags |= C2FrameData::FLAG_END_OF_STREAM;
}
work->input.flags = (C2FrameData::flags_t)flags;
// TODO: fill info's
@@ -1680,10 +1645,16 @@
std::list<std::unique_ptr<C2Work>> items;
items.push_back(std::move(work));
+ mPipelineWatcher.lock()->onWorkQueued(
+ queuedFrameIndex,
+ std::move(queuedBuffers),
+ PipelineWatcher::Clock::now());
c2_status_t err = mComponent->queue(&items);
+ if (err != C2_OK) {
+ mPipelineWatcher.lock()->onWorkDone(queuedFrameIndex);
+ }
if (err == C2_OK && eos && buffer->size() > 0u) {
- mCCodecCallback->onWorkQueued(false);
work.reset(new C2Work);
work->input.ordinal.timestamp = timeUs;
work->input.ordinal.frameIndex = mFrameIndex++;
@@ -1693,13 +1664,22 @@
work->input.flags = C2FrameData::FLAG_END_OF_STREAM;
work->worklets.emplace_back(new C2Worklet);
+ queuedFrameIndex = work->input.ordinal.frameIndex.peeku();
+ queuedBuffers.clear();
+
items.clear();
items.push_back(std::move(work));
+
+ mPipelineWatcher.lock()->onWorkQueued(
+ queuedFrameIndex,
+ std::move(queuedBuffers),
+ PipelineWatcher::Clock::now());
err = mComponent->queue(&items);
+ if (err != C2_OK) {
+ mPipelineWatcher.lock()->onWorkDone(queuedFrameIndex);
+ }
}
if (err == C2_OK) {
- mCCodecCallback->onWorkQueued(eos);
-
Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
bool released = (*buffers)->releaseBuffer(buffer, nullptr, true);
ALOGV("[%s] queueInputBuffer: buffer %sreleased", mName, released ? "" : "not ");
@@ -1844,16 +1824,26 @@
}
void CCodecBufferChannel::feedInputBufferIfAvailableInternal() {
- while (!mInputMetEos &&
- !mReorderStash.lock()->hasPending() &&
- mAvailablePipelineCapacity.allocate("feedInputBufferIfAvailable")) {
+ if (mInputMetEos ||
+ mReorderStash.lock()->hasPending() ||
+ mPipelineWatcher.lock()->pipelineFull()) {
+ return;
+ } else {
+ Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
+ if ((*buffers)->numClientBuffers() >= mNumOutputSlots) {
+ return;
+ }
+ }
+ for (size_t i = 0; i < mNumInputSlots; ++i) {
sp<MediaCodecBuffer> inBuffer;
size_t index;
{
Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
+ if ((*buffers)->numClientBuffers() >= mNumInputSlots) {
+ return;
+ }
if (!(*buffers)->requestNewBuffer(&index, &inBuffer)) {
ALOGV("[%s] no new buffer available", mName);
- mAvailablePipelineCapacity.free("feedInputBufferIfAvailable");
break;
}
}
@@ -2032,15 +2022,12 @@
{
Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
if (*buffers && (*buffers)->releaseBuffer(buffer, nullptr, true)) {
- buffers.unlock();
released = true;
- mAvailablePipelineCapacity.freeInputSlots(1, "discardBuffer");
}
}
{
Mutexed<std::unique_ptr<OutputBuffers>>::Locked buffers(mOutputBuffers);
if (*buffers && (*buffers)->releaseBuffer(buffer, nullptr)) {
- buffers.unlock();
released = true;
}
}
@@ -2408,10 +2395,14 @@
// about buffers from the previous generation do not interfere with the
// newly initialized pipeline capacity.
- mAvailablePipelineCapacity.initialize(
- mNumInputSlots,
- mNumInputSlots + mNumOutputSlots,
- mName);
+ {
+ Mutexed<PipelineWatcher>::Locked watcher(mPipelineWatcher);
+ watcher->inputDelay(inputDelay ? inputDelay.value : 0)
+ .pipelineDelay(pipelineDelay ? pipelineDelay.value : 0)
+ .outputDelay(outputDelay ? outputDelay.value : 0)
+ .smoothnessFactor(kSmoothnessFactor);
+ watcher->flush();
+ }
mInputMetEos = false;
mSync.start();
@@ -2472,21 +2463,16 @@
buffer->meta()->setInt64("timeUs", 0);
post = false;
}
- if (mAvailablePipelineCapacity.allocate("requestInitialInputBuffers")) {
- if (post) {
- mCallback->onInputBufferAvailable(index, buffer);
- } else {
- toBeQueued.emplace_back(buffer);
- }
+ if (post) {
+ mCallback->onInputBufferAvailable(index, buffer);
} else {
- ALOGD("[%s] pipeline is full while requesting %zu-th input buffer",
- mName, i);
+ toBeQueued.emplace_back(buffer);
}
}
}
for (const sp<MediaCodecBuffer> &buffer : toBeQueued) {
if (queueInputBufferInternal(buffer) != OK) {
- mAvailablePipelineCapacity.freeComponentSlot("requestInitialInputBuffers");
+ ALOGV("[%s] Error while queueing initial buffers", mName);
}
}
return OK;
@@ -2532,28 +2518,25 @@
(*buffers)->flush(flushedWork);
}
mReorderStash.lock()->flush();
+ mPipelineWatcher.lock()->flush();
}
void CCodecBufferChannel::onWorkDone(
std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
- const C2StreamInitDataInfo::output *initData,
- size_t numDiscardedInputBuffers) {
+ const C2StreamInitDataInfo::output *initData) {
if (handleWork(std::move(work), outputFormat, initData)) {
- mAvailablePipelineCapacity.freeInputSlots(numDiscardedInputBuffers,
- "onWorkDone");
feedInputBufferIfAvailable();
}
}
void CCodecBufferChannel::onInputBufferDone(
- const std::shared_ptr<C2Buffer>& buffer) {
+ uint64_t frameIndex, size_t arrayIndex) {
+ std::shared_ptr<C2Buffer> buffer =
+ mPipelineWatcher.lock()->onInputBufferReleased(frameIndex, arrayIndex);
bool newInputSlotAvailable;
{
Mutexed<std::unique_ptr<InputBuffers>>::Locked buffers(mInputBuffers);
newInputSlotAvailable = (*buffers)->expireComponentBuffer(buffer);
- if (newInputSlotAvailable) {
- mAvailablePipelineCapacity.freeInputSlots(1, "onInputBufferDone");
- }
}
if (newInputSlotAvailable) {
feedInputBufferIfAvailable();
@@ -2573,7 +2556,7 @@
if (work->worklets.size() != 1u
|| !work->worklets.front()
|| !(work->worklets.front()->output.flags & C2FrameData::FLAG_INCOMPLETE)) {
- mAvailablePipelineCapacity.freeComponentSlot("handleWork");
+ mPipelineWatcher.lock()->onWorkDone(work->input.ordinal.frameIndex.peeku());
}
if (work->result == C2_NOT_FOUND) {
@@ -2832,6 +2815,10 @@
return OK;
}
+PipelineWatcher::Clock::duration CCodecBufferChannel::elapsed() {
+ return mPipelineWatcher.lock()->elapsed(PipelineWatcher::Clock::now());
+}
+
void CCodecBufferChannel::setMetaMode(MetaMode mode) {
mMetaMode = mode;
}
diff --git a/media/codec2/sfplugin/CCodecBufferChannel.h b/media/codec2/sfplugin/CCodecBufferChannel.h
index ebc1491..9dccab8 100644
--- a/media/codec2/sfplugin/CCodecBufferChannel.h
+++ b/media/codec2/sfplugin/CCodecBufferChannel.h
@@ -34,6 +34,7 @@
#include <media/ICrypto.h>
#include "InputSurfaceWrapper.h"
+#include "PipelineWatcher.h"
namespace android {
@@ -44,7 +45,6 @@
virtual ~CCodecCallback() = default;
virtual void onError(status_t err, enum ActionCode actionCode) = 0;
virtual void onOutputFramesRendered(int64_t mediaTimeUs, nsecs_t renderTimeNs) = 0;
- virtual void onWorkQueued(bool eos) = 0;
virtual void onOutputBuffersChanged() = 0;
};
@@ -128,22 +128,21 @@
* @param workItems finished work item.
* @param outputFormat new output format if it has changed, otherwise nullptr
* @param initData new init data (CSD) if it has changed, otherwise nullptr
- * @param numDiscardedInputBuffers the number of input buffers that are
- * returned for the first time (not previously returned by
- * onInputBufferDone()).
*/
void onWorkDone(
std::unique_ptr<C2Work> work, const sp<AMessage> &outputFormat,
- const C2StreamInitDataInfo::output *initData,
- size_t numDiscardedInputBuffers);
+ const C2StreamInitDataInfo::output *initData);
/**
* Make an input buffer available for the client as it is no longer needed
* by the codec.
*
- * @param buffer The buffer that becomes unused.
+ * @param frameIndex The index of input work
+ * @param arrayIndex The index of buffer in the input work buffers.
*/
- void onInputBufferDone(const std::shared_ptr<C2Buffer>& buffer);
+ void onInputBufferDone(uint64_t frameIndex, size_t arrayIndex);
+
+ PipelineWatcher::Clock::duration elapsed();
enum MetaMode {
MODE_NONE,
@@ -266,79 +265,7 @@
MetaMode mMetaMode;
- // PipelineCapacity is used in the input buffer gating logic.
- //
- // There are three criteria that need to be met before
- // onInputBufferAvailable() is called:
- // 1. The number of input buffers that have been received by
- // CCodecBufferChannel but not returned via onWorkDone() or
- // onInputBufferDone() does not exceed a certain limit. (Let us call this
- // number the "input" capacity.)
- // 2. The number of work items that have been received by
- // CCodecBufferChannel whose outputs have not been returned from the
- // component (by calling onWorkDone()) does not exceed a certain limit.
- // (Let us call this the "component" capacity.)
- //
- // These three criteria guarantee that a new input buffer that arrives from
- // the invocation of onInputBufferAvailable() will not
- // 1. overload CCodecBufferChannel's input buffers;
- // 2. overload the component; or
- //
- struct PipelineCapacity {
- // The number of available input capacity.
- std::atomic_int input;
- // The number of available component capacity.
- std::atomic_int component;
-
- PipelineCapacity();
- // Set the values of #input and #component.
- void initialize(int newInput, int newComponent,
- const char* newName = "<UNKNOWN COMPONENT>",
- const char* callerTag = nullptr);
-
- // Return true and decrease #input and #component by one if
- // they are all greater than zero; return false otherwise.
- //
- // callerTag is used for logging only.
- //
- // allocate() is called by CCodecBufferChannel to check whether it can
- // receive another input buffer. If the return value is true,
- // onInputBufferAvailable() and onOutputBufferAvailable() can be called
- // afterwards.
- bool allocate(const char* callerTag = nullptr);
-
- // Increase #input and #component by one.
- //
- // callerTag is used for logging only.
- //
- // free() is called by CCodecBufferChannel after allocate() returns true
- // but onInputBufferAvailable() cannot be called for any reasons. It
- // essentially undoes an allocate() call.
- void free(const char* callerTag = nullptr);
-
- // Increase #input by @p numDiscardedInputBuffers.
- //
- // callerTag is used for logging only.
- //
- // freeInputSlots() is called by CCodecBufferChannel when onWorkDone()
- // or onInputBufferDone() is called. @p numDiscardedInputBuffers is
- // provided in onWorkDone(), and is 1 in onInputBufferDone().
- int freeInputSlots(size_t numDiscardedInputBuffers,
- const char* callerTag = nullptr);
-
- // Increase #component by one and return the updated value.
- //
- // callerTag is used for logging only.
- //
- // freeComponentSlot() is called by CCodecBufferChannel when
- // onWorkDone() is called.
- int freeComponentSlot(const char* callerTag = nullptr);
-
- private:
- // Component name. Used for logging.
- const char* mName;
- };
- PipelineCapacity mAvailablePipelineCapacity;
+ Mutexed<PipelineWatcher> mPipelineWatcher;
class ReorderStash {
public:
diff --git a/media/codec2/sfplugin/Codec2InfoBuilder.cpp b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
index 5f0dd0b..ead0a9b 100644
--- a/media/codec2/sfplugin/Codec2InfoBuilder.cpp
+++ b/media/codec2/sfplugin/Codec2InfoBuilder.cpp
@@ -68,262 +68,146 @@
s.compare(s.size() - suffixLen, suffixLen, suffix) == 0;
}
-// Constants from ACodec
-constexpr OMX_U32 kPortIndexInput = 0;
-constexpr OMX_U32 kPortIndexOutput = 1;
-constexpr OMX_U32 kMaxIndicesToCheck = 32;
+void addSupportedProfileLevels(
+ std::shared_ptr<Codec2Client::Interface> intf,
+ MediaCodecInfo::CapabilitiesWriter *caps,
+ const Traits& trait, const std::string &mediaType) {
+ std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
+ C2Mapper::GetProfileLevelMapper(trait.mediaType);
+ // if we don't know the media type, pass through all values unmapped
-status_t queryOmxCapabilities(
- const char* name, const char* mediaType, bool isEncoder,
- MediaCodecInfo::CapabilitiesWriter* caps) {
-
- const char *role = GetComponentRole(isEncoder, mediaType);
- if (role == nullptr) {
- return BAD_VALUE;
- }
-
- using namespace ::android::hardware::media::omx::V1_0;
- using ::android::hardware::Return;
- using ::android::hardware::Void;
- using ::android::hardware::hidl_vec;
- using ::android::hardware::media::omx::V1_0::utils::LWOmxNode;
-
- sp<IOmx> omx = IOmx::getService();
- if (!omx) {
- ALOGW("Could not obtain IOmx service.");
- return NO_INIT;
- }
-
- struct Observer : IOmxObserver {
- virtual Return<void> onMessages(const hidl_vec<Message>&) override {
- return Void();
- }
+ // TODO: we cannot find levels that are local 'maxima' without knowing the coding
+ // e.g. H.263 level 45 and level 30 could be two values for highest level as
+ // they don't include one another. For now we use the last supported value.
+ bool encoder = trait.kind == C2Component::KIND_ENCODER;
+ C2StreamProfileLevelInfo pl(encoder /* output */, 0u);
+ std::vector<C2FieldSupportedValuesQuery> profileQuery = {
+ C2FieldSupportedValuesQuery::Possible(C2ParamField(&pl, &pl.profile))
};
- sp<Observer> observer = new Observer();
- Status status;
- sp<IOmxNode> tOmxNode;
- Return<void> transStatus = omx->allocateNode(
- name, observer,
- [&status, &tOmxNode](Status s, const sp<IOmxNode>& n) {
- status = s;
- tOmxNode = n;
- });
- if (!transStatus.isOk()) {
- ALOGW("IOmx::allocateNode -- transaction failed.");
- return NO_INIT;
- }
- if (status != Status::OK) {
- ALOGW("IOmx::allocateNode -- error returned: %d.",
- static_cast<int>(status));
- return NO_INIT;
+ c2_status_t err = intf->querySupportedValues(profileQuery, C2_DONT_BLOCK);
+ ALOGV("query supported profiles -> %s | %s", asString(err), asString(profileQuery[0].status));
+ if (err != C2_OK || profileQuery[0].status != C2_OK) {
+ return;
}
- sp<LWOmxNode> omxNode = new LWOmxNode(tOmxNode);
-
- status_t err = SetComponentRole(omxNode, role);
- if (err != OK) {
- omxNode->freeNode();
- ALOGW("Failed to SetComponentRole: component = %s, role = %s.",
- name, role);
- return err;
+ // we only handle enumerated values
+ if (profileQuery[0].values.type != C2FieldSupportedValues::VALUES) {
+ return;
}
- bool isVideo = hasPrefix(mediaType, "video/") == 0;
- bool isImage = hasPrefix(mediaType, "image/") == 0;
+ // determine if codec supports HDR
+ bool supportsHdr = false;
+ bool supportsHdr10Plus = false;
- if (isVideo || isImage) {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
- InitOMXParams(¶m);
- param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput;
-
- for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
- param.nProfileIndex = index;
- status_t err = omxNode->getParameter(
- OMX_IndexParamVideoProfileLevelQuerySupported,
- ¶m, sizeof(param));
- if (err != OK) {
+ std::vector<std::shared_ptr<C2ParamDescriptor>> paramDescs;
+ c2_status_t err1 = intf->querySupportedParams(¶mDescs);
+ if (err1 == C2_OK) {
+ for (const std::shared_ptr<C2ParamDescriptor> &desc : paramDescs) {
+ switch ((uint32_t)desc->index()) {
+ case C2StreamHdr10PlusInfo::output::PARAM_TYPE:
+ supportsHdr10Plus = true;
+ break;
+ case C2StreamHdrStaticInfo::output::PARAM_TYPE:
+ supportsHdr = true;
+ break;
+ default:
break;
}
- caps->addProfileLevel(param.eProfile, param.eLevel);
-
- // AVC components may not list the constrained profiles explicitly, but
- // decoders that support a profile also support its constrained version.
- // Encoders must explicitly support constrained profiles.
- if (!isEncoder && strcasecmp(mediaType, MEDIA_MIMETYPE_VIDEO_AVC) == 0) {
- if (param.eProfile == OMX_VIDEO_AVCProfileHigh) {
- caps->addProfileLevel(OMX_VIDEO_AVCProfileConstrainedHigh, param.eLevel);
- } else if (param.eProfile == OMX_VIDEO_AVCProfileBaseline) {
- caps->addProfileLevel(OMX_VIDEO_AVCProfileConstrainedBaseline, param.eLevel);
- }
- }
-
- if (index == kMaxIndicesToCheck) {
- ALOGW("[%s] stopping checking profiles after %u: %x/%x",
- name, index,
- param.eProfile, param.eLevel);
- }
- }
-
- // Color format query
- // return colors in the order reported by the OMX component
- // prefix "flexible" standard ones with the flexible equivalent
- OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
- InitOMXParams(&portFormat);
- portFormat.nPortIndex = isEncoder ? kPortIndexInput : kPortIndexOutput;
- for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
- portFormat.nIndex = index;
- status_t err = omxNode->getParameter(
- OMX_IndexParamVideoPortFormat,
- &portFormat, sizeof(portFormat));
- if (err != OK) {
- break;
- }
-
- OMX_U32 flexibleEquivalent;
- if (IsFlexibleColorFormat(
- omxNode, portFormat.eColorFormat, false /* usingNativeWindow */,
- &flexibleEquivalent)) {
- caps->addColorFormat(flexibleEquivalent);
- }
- caps->addColorFormat(portFormat.eColorFormat);
-
- if (index == kMaxIndicesToCheck) {
- ALOGW("[%s] stopping checking formats after %u: %s(%x)",
- name, index,
- asString(portFormat.eColorFormat), portFormat.eColorFormat);
- }
- }
- } else if (strcasecmp(mediaType, MEDIA_MIMETYPE_AUDIO_AAC) == 0) {
- // More audio codecs if they have profiles.
- OMX_AUDIO_PARAM_ANDROID_PROFILETYPE param;
- InitOMXParams(¶m);
- param.nPortIndex = isEncoder ? kPortIndexOutput : kPortIndexInput;
- for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
- param.nProfileIndex = index;
- status_t err = omxNode->getParameter(
- (OMX_INDEXTYPE)OMX_IndexParamAudioProfileQuerySupported,
- ¶m, sizeof(param));
- if (err != OK) {
- break;
- }
- // For audio, level is ignored.
- caps->addProfileLevel(param.eProfile, 0 /* level */);
-
- if (index == kMaxIndicesToCheck) {
- ALOGW("[%s] stopping checking profiles after %u: %x",
- name, index,
- param.eProfile);
- }
- }
-
- // NOTE: Without Android extensions, OMX does not provide a way to query
- // AAC profile support
- if (param.nProfileIndex == 0) {
- ALOGW("component %s doesn't support profile query.", name);
}
}
- if (isVideo && !isEncoder) {
- native_handle_t *sidebandHandle = nullptr;
- if (omxNode->configureVideoTunnelMode(
- kPortIndexOutput, OMX_TRUE, 0, &sidebandHandle) == OK) {
- // tunneled playback includes adaptive playback
- } else {
- // tunneled playback is not supported
- caps->removeDetail(MediaCodecInfo::Capabilities::FEATURE_TUNNELED_PLAYBACK);
- if (omxNode->setPortMode(
- kPortIndexOutput, IOMX::kPortModeDynamicANWBuffer) == OK ||
- omxNode->prepareForAdaptivePlayback(
- kPortIndexOutput, OMX_TRUE,
- 1280 /* width */, 720 /* height */) != OK) {
- // adaptive playback is not supported
- caps->removeDetail(MediaCodecInfo::Capabilities::FEATURE_ADAPTIVE_PLAYBACK);
- }
- }
- }
+ // For VP9, the static info is always propagated by framework.
+ supportsHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
- if (isVideo && isEncoder) {
- OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params;
- InitOMXParams(¶ms);
- params.nPortIndex = kPortIndexOutput;
-
- OMX_VIDEO_PARAM_INTRAREFRESHTYPE fallbackParams;
- InitOMXParams(&fallbackParams);
- fallbackParams.nPortIndex = kPortIndexOutput;
- fallbackParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic;
-
- if (omxNode->getConfig(
- (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh,
- ¶ms, sizeof(params)) != OK &&
- omxNode->getParameter(
- OMX_IndexParamVideoIntraRefresh, &fallbackParams,
- sizeof(fallbackParams)) != OK) {
- // intra refresh is not supported
- caps->removeDetail(MediaCodecInfo::Capabilities::FEATURE_INTRA_REFRESH);
- }
- }
-
- omxNode->freeNode();
- return OK;
-}
-
-void buildOmxInfo(const MediaCodecsXmlParser& parser,
- MediaCodecListWriter* writer) {
- uint32_t omxRank = ::android::base::GetUintProperty(
- "debug.stagefright.omx_default_rank", uint32_t(0x100));
- for (const MediaCodecsXmlParser::Codec& codec : parser.getCodecMap()) {
- const std::string &name = codec.first;
- if (!hasPrefix(codec.first, "OMX.")) {
+ for (C2Value::Primitive profile : profileQuery[0].values.values) {
+ pl.profile = (C2Config::profile_t)profile.ref<uint32_t>();
+ std::vector<std::unique_ptr<C2SettingResult>> failures;
+ err = intf->config({&pl}, C2_DONT_BLOCK, &failures);
+ ALOGV("set profile to %u -> %s", pl.profile, asString(err));
+ std::vector<C2FieldSupportedValuesQuery> levelQuery = {
+ C2FieldSupportedValuesQuery::Current(C2ParamField(&pl, &pl.level))
+ };
+ err = intf->querySupportedValues(levelQuery, C2_DONT_BLOCK);
+ ALOGV("query supported levels -> %s | %s", asString(err), asString(levelQuery[0].status));
+ if (err != C2_OK || levelQuery[0].status != C2_OK
+ || levelQuery[0].values.type != C2FieldSupportedValues::VALUES
+ || levelQuery[0].values.values.size() == 0) {
continue;
}
- const MediaCodecsXmlParser::CodecProperties &properties = codec.second;
- bool encoder = properties.isEncoder;
- std::unique_ptr<MediaCodecInfoWriter> info =
- writer->addMediaCodecInfo();
- info->setName(name.c_str());
- info->setOwner("default");
- typename std::underlying_type<MediaCodecInfo::Attributes>::type attrs = 0;
- if (encoder) {
- attrs |= MediaCodecInfo::kFlagIsEncoder;
- }
- // NOTE: we don't support software-only codecs in OMX
- if (!hasPrefix(name, "OMX.google.")) {
- attrs |= MediaCodecInfo::kFlagIsVendor;
- if (properties.quirkSet.find("attribute::software-codec")
- == properties.quirkSet.end()) {
- attrs |= MediaCodecInfo::kFlagIsHardwareAccelerated;
- }
- }
- info->setAttributes(attrs);
- info->setRank(omxRank);
- // OMX components don't have aliases
- for (const MediaCodecsXmlParser::Type &type : properties.typeMap) {
- const std::string &mediaType = type.first;
- std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
- info->addMediaType(mediaType.c_str());
- const MediaCodecsXmlParser::AttributeMap &attrMap = type.second;
- for (const MediaCodecsXmlParser::Attribute& attr : attrMap) {
- const std::string &key = attr.first;
- const std::string &value = attr.second;
- if (hasPrefix(key, "feature-") &&
- !hasPrefix(key, "feature-bitrate-modes")) {
- caps->addDetail(key.c_str(), hasPrefix(value, "1") ? 1 : 0);
- } else {
- caps->addDetail(key.c_str(), value.c_str());
+
+ C2Value::Primitive level = levelQuery[0].values.values.back();
+ pl.level = (C2Config::level_t)level.ref<uint32_t>();
+ ALOGV("supporting level: %u", pl.level);
+ int32_t sdkProfile, sdkLevel;
+ if (mapper && mapper->mapProfile(pl.profile, &sdkProfile)
+ && mapper->mapLevel(pl.level, &sdkLevel)) {
+ caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
+ // also list HDR profiles if component supports HDR
+ if (supportsHdr) {
+ auto hdrMapper = C2Mapper::GetHdrProfileLevelMapper(trait.mediaType);
+ if (hdrMapper && hdrMapper->mapProfile(pl.profile, &sdkProfile)) {
+ caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
+ }
+ if (supportsHdr10Plus) {
+ hdrMapper = C2Mapper::GetHdrProfileLevelMapper(
+ trait.mediaType, true /*isHdr10Plus*/);
+ if (hdrMapper && hdrMapper->mapProfile(pl.profile, &sdkProfile)) {
+ caps->addProfileLevel((uint32_t)sdkProfile, (uint32_t)sdkLevel);
+ }
}
}
- status_t err = queryOmxCapabilities(
- name.c_str(),
- mediaType.c_str(),
- encoder,
- caps.get());
- if (err != OK) {
- ALOGI("Failed to query capabilities for %s (media type: %s). Error: %d",
- name.c_str(),
- mediaType.c_str(),
- static_cast<int>(err));
+ } else if (!mapper) {
+ caps->addProfileLevel(pl.profile, pl.level);
+ }
+
+ // for H.263 also advertise the second highest level if the
+ // codec supports level 45, as level 45 only covers level 10
+ // TODO: move this to some form of a setting so it does not
+ // have to be here
+ if (mediaType == MIMETYPE_VIDEO_H263) {
+ C2Config::level_t nextLevel = C2Config::LEVEL_UNUSED;
+ for (C2Value::Primitive v : levelQuery[0].values.values) {
+ C2Config::level_t level = (C2Config::level_t)v.ref<uint32_t>();
+ if (level < C2Config::LEVEL_H263_45 && level > nextLevel) {
+ nextLevel = level;
+ }
}
+ if (nextLevel != C2Config::LEVEL_UNUSED
+ && nextLevel != pl.level
+ && mapper
+ && mapper->mapProfile(pl.profile, &sdkProfile)
+ && mapper->mapLevel(nextLevel, &sdkLevel)) {
+ caps->addProfileLevel(
+ (uint32_t)sdkProfile, (uint32_t)sdkLevel);
+ }
+ }
+ }
+}
+
+void addSupportedColorFormats(
+ std::shared_ptr<Codec2Client::Interface> intf,
+ MediaCodecInfo::CapabilitiesWriter *caps,
+ const Traits& trait, const std::string &mediaType) {
+ (void)intf;
+
+ // TODO: get this from intf() as well, but how do we map them to
+ // MediaCodec color formats?
+ bool encoder = trait.kind == C2Component::KIND_ENCODER;
+ if (mediaType.find("video") != std::string::npos) {
+ // vendor video codecs prefer opaque format
+ if (trait.name.find("android") == std::string::npos) {
+ caps->addColorFormat(COLOR_FormatSurface);
+ }
+ caps->addColorFormat(COLOR_FormatYUV420Flexible);
+ caps->addColorFormat(COLOR_FormatYUV420Planar);
+ caps->addColorFormat(COLOR_FormatYUV420SemiPlanar);
+ caps->addColorFormat(COLOR_FormatYUV420PackedPlanar);
+ caps->addColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
+ // framework video encoders must support surface format, though it is unclear
+ // that they will be able to map it if it is opaque
+ if (encoder && trait.name.find("android") != std::string::npos) {
+ caps->addColorFormat(COLOR_FormatSurface);
}
}
}
@@ -335,7 +219,7 @@
// properly. (Assume "full" behavior eventually.)
//
// debug.stagefright.ccodec supports 5 values.
- // 0 - Only OMX components are available.
+ // 0 - No Codec 2.0 components are available.
// 1 - Audio decoders and encoders with prefix "c2.android." are available
// and ranked first.
// All other components with prefix "c2.android." are available with
@@ -366,306 +250,156 @@
MediaCodecsXmlParser parser(
MediaCodecsXmlParser::defaultSearchDirs,
- option == 0 ? "media_codecs.xml" :
- "media_codecs_c2.xml",
- option == 0 ? "media_codecs_performance.xml" :
- "media_codecs_performance_c2.xml");
+ "media_codecs_c2.xml",
+ "media_codecs_performance_c2.xml");
if (parser.getParsingStatus() != OK) {
ALOGD("XML parser no good");
return OK;
}
- bool surfaceTest(Codec2Client::CreateInputSurface());
- if (option == 0 || (option != 4 && !surfaceTest)) {
- buildOmxInfo(parser, writer);
- }
-
for (const Traits& trait : traits) {
C2Component::rank_t rank = trait.rank;
- std::shared_ptr<Codec2Client::Interface> intf =
- Codec2Client::CreateInterfaceByName(trait.name.c_str());
- if (!intf || parser.getCodecMap().count(intf->getName()) == 0) {
- ALOGD("%s not found in xml", trait.name.c_str());
- continue;
- }
- std::string canonName = intf->getName();
-
- // TODO: Remove this block once all codecs are enabled by default.
- switch (option) {
- case 0:
- continue;
- case 1:
- if (hasPrefix(canonName, "c2.vda.")) {
- break;
+ // Interface must be accessible for us to list the component, and there also
+ // must be an XML entry for the codec. Codec aliases listed in the traits
+ // allow additional XML entries to be specified for each alias. These will
+ // be listed as separate codecs. If no XML entry is specified for an alias,
+ // those will be treated as an additional alias specified in the XML entry
+ // for the interface name.
+ std::vector<std::string> nameAndAliases = trait.aliases;
+ nameAndAliases.insert(nameAndAliases.begin(), trait.name);
+ for (const std::string &nameOrAlias : nameAndAliases) {
+ bool isAlias = trait.name != nameOrAlias;
+ std::shared_ptr<Codec2Client::Interface> intf =
+ Codec2Client::CreateInterfaceByName(nameOrAlias.c_str());
+ if (!intf) {
+ ALOGD("could not create interface for %s'%s'",
+ isAlias ? "alias " : "",
+ nameOrAlias.c_str());
+ continue;
}
- if (hasPrefix(canonName, "c2.android.")) {
- if (trait.domain == C2Component::DOMAIN_AUDIO) {
+ if (parser.getCodecMap().count(nameOrAlias) == 0) {
+ if (isAlias) {
+ std::unique_ptr<MediaCodecInfoWriter> baseCodecInfo =
+ writer->findMediaCodecInfo(trait.name.c_str());
+ if (!baseCodecInfo) {
+ ALOGD("alias '%s' not found in xml but canonical codec info '%s' missing",
+ nameOrAlias.c_str(),
+ trait.name.c_str());
+ } else {
+ ALOGD("alias '%s' not found in xml; use an XML <Alias> tag for this",
+ nameOrAlias.c_str());
+ // merge alias into existing codec
+ baseCodecInfo->addAlias(nameOrAlias.c_str());
+ }
+ } else {
+ ALOGD("component '%s' not found in xml", trait.name.c_str());
+ }
+ continue;
+ }
+ std::string canonName = trait.name;
+
+ // TODO: Remove this block once all codecs are enabled by default.
+ switch (option) {
+ case 0:
+ continue;
+ case 1:
+ if (hasPrefix(canonName, "c2.vda.")) {
+ break;
+ }
+ if (hasPrefix(canonName, "c2.android.")) {
+ if (trait.domain == C2Component::DOMAIN_AUDIO) {
+ rank = 1;
+ break;
+ }
+ break;
+ }
+ if (hasSuffix(canonName, ".avc.decoder") ||
+ hasSuffix(canonName, ".avc.encoder")) {
+ rank = std::numeric_limits<decltype(rank)>::max();
+ break;
+ }
+ continue;
+ case 2:
+ if (hasPrefix(canonName, "c2.vda.")) {
+ break;
+ }
+ if (hasPrefix(canonName, "c2.android.")) {
rank = 1;
break;
}
+ if (hasSuffix(canonName, ".avc.decoder") ||
+ hasSuffix(canonName, ".avc.encoder")) {
+ rank = std::numeric_limits<decltype(rank)>::max();
+ break;
+ }
+ continue;
+ case 3:
+ if (hasPrefix(canonName, "c2.android.")) {
+ rank = 1;
+ }
break;
}
- if (hasSuffix(canonName, ".avc.decoder") ||
- hasSuffix(canonName, ".avc.encoder")) {
- rank = std::numeric_limits<decltype(rank)>::max();
- break;
- }
- continue;
- case 2:
- if (hasPrefix(canonName, "c2.vda.")) {
- break;
- }
- if (hasPrefix(canonName, "c2.android.")) {
- rank = 1;
- break;
- }
- if (hasSuffix(canonName, ".avc.decoder") ||
- hasSuffix(canonName, ".avc.encoder")) {
- rank = std::numeric_limits<decltype(rank)>::max();
- break;
- }
- continue;
- case 3:
- if (hasPrefix(canonName, "c2.android.")) {
- rank = 1;
- }
- break;
- }
- ALOGV("canonName = %s", canonName.c_str());
- std::unique_ptr<MediaCodecInfoWriter> codecInfo = writer->addMediaCodecInfo();
- codecInfo->setName(trait.name.c_str());
- codecInfo->setOwner(("codec2::" + trait.owner).c_str());
- const MediaCodecsXmlParser::CodecProperties &codec = parser.getCodecMap().at(canonName);
+ ALOGV("adding codec entry for '%s'", nameOrAlias.c_str());
+ std::unique_ptr<MediaCodecInfoWriter> codecInfo = writer->addMediaCodecInfo();
+ codecInfo->setName(nameOrAlias.c_str());
+ codecInfo->setOwner(("codec2::" + trait.owner).c_str());
+ const MediaCodecsXmlParser::CodecProperties &codec =
+ parser.getCodecMap().at(nameOrAlias);
- bool encoder = trait.kind == C2Component::KIND_ENCODER;
- typename std::underlying_type<MediaCodecInfo::Attributes>::type attrs = 0;
+ bool encoder = trait.kind == C2Component::KIND_ENCODER;
+ typename std::underlying_type<MediaCodecInfo::Attributes>::type attrs = 0;
- if (encoder) {
- attrs |= MediaCodecInfo::kFlagIsEncoder;
- }
- if (trait.owner == "software") {
- attrs |= MediaCodecInfo::kFlagIsSoftwareOnly;
- } else {
- attrs |= MediaCodecInfo::kFlagIsVendor;
- if (trait.owner == "vendor-software") {
+ if (encoder) {
+ attrs |= MediaCodecInfo::kFlagIsEncoder;
+ }
+ if (trait.owner == "software") {
attrs |= MediaCodecInfo::kFlagIsSoftwareOnly;
- } else if (codec.quirkSet.find("attribute::software-codec") == codec.quirkSet.end()) {
- attrs |= MediaCodecInfo::kFlagIsHardwareAccelerated;
- }
- }
- codecInfo->setAttributes(attrs);
- codecInfo->setRank(rank);
-
- for (const std::string &alias : codec.aliases) {
- codecInfo->addAlias(alias.c_str());
- }
-
- for (auto typeIt = codec.typeMap.begin(); typeIt != codec.typeMap.end(); ++typeIt) {
- const std::string &mediaType = typeIt->first;
- const MediaCodecsXmlParser::AttributeMap &attrMap = typeIt->second;
- std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
- codecInfo->addMediaType(mediaType.c_str());
- for (auto attrIt = attrMap.begin(); attrIt != attrMap.end(); ++attrIt) {
- std::string key, value;
- std::tie(key, value) = *attrIt;
- if (key.find("feature-") == 0 && key.find("feature-bitrate-modes") != 0) {
- caps->addDetail(key.c_str(), std::stoi(value));
- } else {
- caps->addDetail(key.c_str(), value.c_str());
+ } else {
+ attrs |= MediaCodecInfo::kFlagIsVendor;
+ if (trait.owner == "vendor-software") {
+ attrs |= MediaCodecInfo::kFlagIsSoftwareOnly;
+ } else if (codec.quirkSet.find("attribute::software-codec")
+ == codec.quirkSet.end()) {
+ attrs |= MediaCodecInfo::kFlagIsHardwareAccelerated;
}
}
-
- bool gotProfileLevels = false;
- if (intf) {
- std::shared_ptr<C2Mapper::ProfileLevelMapper> mapper =
- C2Mapper::GetProfileLevelMapper(trait.mediaType);
- // if we don't know the media type, pass through all values unmapped
-
- // TODO: we cannot find levels that are local 'maxima' without knowing the coding
- // e.g. H.263 level 45 and level 30 could be two values for highest level as
- // they don't include one another. For now we use the last supported value.
- C2StreamProfileLevelInfo pl(encoder /* output */, 0u);
- std::vector<C2FieldSupportedValuesQuery> profileQuery = {
- C2FieldSupportedValuesQuery::Possible(C2ParamField(&pl, &pl.profile))
- };
-
- c2_status_t err = intf->querySupportedValues(profileQuery, C2_DONT_BLOCK);
- ALOGV("query supported profiles -> %s | %s",
- asString(err), asString(profileQuery[0].status));
- if (err == C2_OK && profileQuery[0].status == C2_OK) {
- if (profileQuery[0].values.type == C2FieldSupportedValues::VALUES) {
- std::vector<std::shared_ptr<C2ParamDescriptor>> paramDescs;
- c2_status_t err1 = intf->querySupportedParams(¶mDescs);
- bool isHdr = false, isHdr10Plus = false;
- if (err1 == C2_OK) {
- for (const std::shared_ptr<C2ParamDescriptor> &desc : paramDescs) {
- if ((uint32_t)desc->index() ==
- C2StreamHdr10PlusInfo::output::PARAM_TYPE) {
- isHdr10Plus = true;
- } else if ((uint32_t)desc->index() ==
- C2StreamHdrStaticInfo::output::PARAM_TYPE) {
- isHdr = true;
- }
- }
- }
- // For VP9, the static info is always propagated by framework.
- isHdr |= (mediaType == MIMETYPE_VIDEO_VP9);
-
- for (C2Value::Primitive profile : profileQuery[0].values.values) {
- pl.profile = (C2Config::profile_t)profile.ref<uint32_t>();
- std::vector<std::unique_ptr<C2SettingResult>> failures;
- err = intf->config({&pl}, C2_DONT_BLOCK, &failures);
- ALOGV("set profile to %u -> %s", pl.profile, asString(err));
- std::vector<C2FieldSupportedValuesQuery> levelQuery = {
- C2FieldSupportedValuesQuery::Current(C2ParamField(&pl, &pl.level))
- };
- err = intf->querySupportedValues(levelQuery, C2_DONT_BLOCK);
- ALOGV("query supported levels -> %s | %s",
- asString(err), asString(levelQuery[0].status));
- if (err == C2_OK && levelQuery[0].status == C2_OK) {
- if (levelQuery[0].values.type == C2FieldSupportedValues::VALUES
- && levelQuery[0].values.values.size() > 0) {
- C2Value::Primitive level = levelQuery[0].values.values.back();
- pl.level = (C2Config::level_t)level.ref<uint32_t>();
- ALOGV("supporting level: %u", pl.level);
- int32_t sdkProfile, sdkLevel;
- if (mapper && mapper->mapProfile(pl.profile, &sdkProfile)
- && mapper->mapLevel(pl.level, &sdkLevel)) {
- caps->addProfileLevel(
- (uint32_t)sdkProfile, (uint32_t)sdkLevel);
- gotProfileLevels = true;
- if (isHdr) {
- auto hdrMapper = C2Mapper::GetHdrProfileLevelMapper(
- trait.mediaType);
- if (hdrMapper && hdrMapper->mapProfile(
- pl.profile, &sdkProfile)) {
- caps->addProfileLevel(
- (uint32_t)sdkProfile,
- (uint32_t)sdkLevel);
- }
- if (isHdr10Plus) {
- hdrMapper = C2Mapper::GetHdrProfileLevelMapper(
- trait.mediaType, true /*isHdr10Plus*/);
- if (hdrMapper && hdrMapper->mapProfile(
- pl.profile, &sdkProfile)) {
- caps->addProfileLevel(
- (uint32_t)sdkProfile,
- (uint32_t)sdkLevel);
- }
- }
- }
- } else if (!mapper) {
- caps->addProfileLevel(pl.profile, pl.level);
- gotProfileLevels = true;
- }
-
- // for H.263 also advertise the second highest level if the
- // codec supports level 45, as level 45 only covers level 10
- // TODO: move this to some form of a setting so it does not
- // have to be here
- if (mediaType == MIMETYPE_VIDEO_H263) {
- C2Config::level_t nextLevel = C2Config::LEVEL_UNUSED;
- for (C2Value::Primitive v : levelQuery[0].values.values) {
- C2Config::level_t level =
- (C2Config::level_t)v.ref<uint32_t>();
- if (level < C2Config::LEVEL_H263_45
- && level > nextLevel) {
- nextLevel = level;
- }
- }
- if (nextLevel != C2Config::LEVEL_UNUSED
- && nextLevel != pl.level
- && mapper
- && mapper->mapProfile(pl.profile, &sdkProfile)
- && mapper->mapLevel(nextLevel, &sdkLevel)) {
- caps->addProfileLevel(
- (uint32_t)sdkProfile, (uint32_t)sdkLevel);
- }
- }
- }
- }
- }
- }
+ codecInfo->setAttributes(attrs);
+ if (!codec.rank.empty()) {
+ uint32_t xmlRank;
+ char dummy;
+ if (sscanf(codec.rank.c_str(), "%u%c", &xmlRank, &dummy) == 1) {
+ rank = xmlRank;
}
}
+ codecInfo->setRank(rank);
- if (!gotProfileLevels) {
- if (mediaType == MIMETYPE_VIDEO_VP9) {
- if (encoder) {
- caps->addProfileLevel(VP9Profile0, VP9Level41);
- } else {
- caps->addProfileLevel(VP9Profile0, VP9Level5);
- caps->addProfileLevel(VP9Profile2, VP9Level5);
- caps->addProfileLevel(VP9Profile2HDR, VP9Level5);
- }
- } else if (mediaType == MIMETYPE_VIDEO_AV1 && !encoder) {
- caps->addProfileLevel(AV1Profile0, AV1Level2);
- caps->addProfileLevel(AV1Profile0, AV1Level21);
- caps->addProfileLevel(AV1Profile1, AV1Level22);
- caps->addProfileLevel(AV1Profile1, AV1Level3);
- caps->addProfileLevel(AV1Profile2, AV1Level31);
- caps->addProfileLevel(AV1Profile2, AV1Level32);
- } else if (mediaType == MIMETYPE_VIDEO_HEVC && !encoder) {
- caps->addProfileLevel(HEVCProfileMain, HEVCMainTierLevel51);
- caps->addProfileLevel(HEVCProfileMainStill, HEVCMainTierLevel51);
- } else if (mediaType == MIMETYPE_VIDEO_VP8) {
- if (encoder) {
- caps->addProfileLevel(VP8ProfileMain, VP8Level_Version0);
- } else {
- caps->addProfileLevel(VP8ProfileMain, VP8Level_Version0);
- }
- } else if (mediaType == MIMETYPE_VIDEO_AVC) {
- if (encoder) {
- caps->addProfileLevel(AVCProfileBaseline, AVCLevel41);
-// caps->addProfileLevel(AVCProfileConstrainedBaseline, AVCLevel41);
- caps->addProfileLevel(AVCProfileMain, AVCLevel41);
- } else {
- caps->addProfileLevel(AVCProfileBaseline, AVCLevel52);
- caps->addProfileLevel(AVCProfileConstrainedBaseline, AVCLevel52);
- caps->addProfileLevel(AVCProfileMain, AVCLevel52);
- caps->addProfileLevel(AVCProfileConstrainedHigh, AVCLevel52);
- caps->addProfileLevel(AVCProfileHigh, AVCLevel52);
- }
- } else if (mediaType == MIMETYPE_VIDEO_MPEG4) {
- if (encoder) {
- caps->addProfileLevel(MPEG4ProfileSimple, MPEG4Level2);
- } else {
- caps->addProfileLevel(MPEG4ProfileSimple, MPEG4Level3);
- }
- } else if (mediaType == MIMETYPE_VIDEO_H263) {
- if (encoder) {
- caps->addProfileLevel(H263ProfileBaseline, H263Level45);
- } else {
- caps->addProfileLevel(H263ProfileBaseline, H263Level30);
- caps->addProfileLevel(H263ProfileBaseline, H263Level45);
- caps->addProfileLevel(H263ProfileISWV2, H263Level30);
- caps->addProfileLevel(H263ProfileISWV2, H263Level45);
- }
- } else if (mediaType == MIMETYPE_VIDEO_MPEG2 && !encoder) {
- caps->addProfileLevel(MPEG2ProfileSimple, MPEG2LevelHL);
- caps->addProfileLevel(MPEG2ProfileMain, MPEG2LevelHL);
- }
+ for (const std::string &alias : codec.aliases) {
+ ALOGV("adding alias '%s'", alias.c_str());
+ codecInfo->addAlias(alias.c_str());
}
- // TODO: get this from intf() as well, but how do we map them to
- // MediaCodec color formats?
- if (mediaType.find("video") != std::string::npos) {
- // vendor video codecs prefer opaque format
- if (trait.name.find("android") == std::string::npos) {
- caps->addColorFormat(COLOR_FormatSurface);
+ for (auto typeIt = codec.typeMap.begin(); typeIt != codec.typeMap.end(); ++typeIt) {
+ const std::string &mediaType = typeIt->first;
+ const MediaCodecsXmlParser::AttributeMap &attrMap = typeIt->second;
+ std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
+ codecInfo->addMediaType(mediaType.c_str());
+ for (auto attrIt = attrMap.begin(); attrIt != attrMap.end(); ++attrIt) {
+ std::string key, value;
+ std::tie(key, value) = *attrIt;
+ if (key.find("feature-") == 0 && key.find("feature-bitrate-modes") != 0) {
+ int32_t intValue = 0;
+ // Ignore trailing bad characters and default to 0.
+ (void)sscanf(value.c_str(), "%d", &intValue);
+ caps->addDetail(key.c_str(), intValue);
+ } else {
+ caps->addDetail(key.c_str(), value.c_str());
+ }
}
- caps->addColorFormat(COLOR_FormatYUV420Flexible);
- caps->addColorFormat(COLOR_FormatYUV420Planar);
- caps->addColorFormat(COLOR_FormatYUV420SemiPlanar);
- caps->addColorFormat(COLOR_FormatYUV420PackedPlanar);
- caps->addColorFormat(COLOR_FormatYUV420PackedSemiPlanar);
- // framework video encoders must support surface format, though it is unclear
- // that they will be able to map it if it is opaque
- if (encoder && trait.name.find("android") != std::string::npos) {
- caps->addColorFormat(COLOR_FormatSurface);
- }
+
+ addSupportedProfileLevels(intf, caps.get(), trait, mediaType);
+ addSupportedColorFormats(intf, caps.get(), trait, mediaType);
}
}
}
@@ -677,4 +411,3 @@
extern "C" android::MediaCodecListBuilderBase *CreateBuilder() {
return new android::Codec2InfoBuilder;
}
-
diff --git a/media/codec2/sfplugin/PipelineWatcher.cpp b/media/codec2/sfplugin/PipelineWatcher.cpp
new file mode 100644
index 0000000..fe0a2c8
--- /dev/null
+++ b/media/codec2/sfplugin/PipelineWatcher.cpp
@@ -0,0 +1,145 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//#define LOG_NDEBUG 0
+#define LOG_TAG "PipelineWatcher"
+
+#include <numeric>
+
+#include <log/log.h>
+
+#include "PipelineWatcher.h"
+
+namespace android {
+
+PipelineWatcher &PipelineWatcher::inputDelay(uint32_t value) {
+ mInputDelay = value;
+ return *this;
+}
+
+PipelineWatcher &PipelineWatcher::pipelineDelay(uint32_t value) {
+ mPipelineDelay = value;
+ return *this;
+}
+
+PipelineWatcher &PipelineWatcher::outputDelay(uint32_t value) {
+ mOutputDelay = value;
+ return *this;
+}
+
+PipelineWatcher &PipelineWatcher::smoothnessFactor(uint32_t value) {
+ mSmoothnessFactor = value;
+ return *this;
+}
+
+void PipelineWatcher::onWorkQueued(
+ uint64_t frameIndex,
+ std::vector<std::shared_ptr<C2Buffer>> &&buffers,
+ const Clock::time_point &queuedAt) {
+ ALOGV("onWorkQueued(frameIndex=%llu, buffers(size=%zu), queuedAt=%lld)",
+ (unsigned long long)frameIndex,
+ buffers.size(),
+ (long long)queuedAt.time_since_epoch().count());
+ auto it = mFramesInPipeline.find(frameIndex);
+ if (it != mFramesInPipeline.end()) {
+ ALOGD("onWorkQueued: Duplicate frame index (%llu); previous entry removed",
+ (unsigned long long)frameIndex);
+ (void)mFramesInPipeline.erase(it);
+ }
+ (void)mFramesInPipeline.try_emplace(frameIndex, std::move(buffers), queuedAt);
+}
+
+std::shared_ptr<C2Buffer> PipelineWatcher::onInputBufferReleased(
+ uint64_t frameIndex, size_t arrayIndex) {
+ ALOGV("onInputBufferReleased(frameIndex=%llu, arrayIndex=%zu)",
+ (unsigned long long)frameIndex, arrayIndex);
+ auto it = mFramesInPipeline.find(frameIndex);
+ if (it == mFramesInPipeline.end()) {
+ ALOGD("onInputBufferReleased: frameIndex not found (%llu); ignored",
+ (unsigned long long)frameIndex);
+ return nullptr;
+ }
+ if (it->second.buffers.size() <= arrayIndex) {
+ ALOGD("onInputBufferReleased: buffers at %llu: size %zu, requested index: %zu",
+ (unsigned long long)frameIndex, it->second.buffers.size(), arrayIndex);
+ return nullptr;
+ }
+ std::shared_ptr<C2Buffer> buffer(std::move(it->second.buffers[arrayIndex]));
+ ALOGD_IF(!buffer, "onInputBufferReleased: buffer already released (%llu:%zu)",
+ (unsigned long long)frameIndex, arrayIndex);
+ return buffer;
+}
+
+void PipelineWatcher::onWorkDone(uint64_t frameIndex) {
+ ALOGV("onWorkDone(frameIndex=%llu)", (unsigned long long)frameIndex);
+ auto it = mFramesInPipeline.find(frameIndex);
+ if (it == mFramesInPipeline.end()) {
+ ALOGD("onWorkDone: frameIndex not found (%llu); ignored",
+ (unsigned long long)frameIndex);
+ return;
+ }
+ (void)mFramesInPipeline.erase(it);
+}
+
+void PipelineWatcher::flush() {
+ mFramesInPipeline.clear();
+}
+
+bool PipelineWatcher::pipelineFull() const {
+ if (mFramesInPipeline.size() >=
+ mInputDelay + mPipelineDelay + mOutputDelay + mSmoothnessFactor) {
+ ALOGV("pipelineFull: too many frames in pipeline (%zu)", mFramesInPipeline.size());
+ return true;
+ }
+ size_t sizeWithInputReleased = std::count_if(
+ mFramesInPipeline.begin(),
+ mFramesInPipeline.end(),
+ [](const decltype(mFramesInPipeline)::value_type &value) {
+ for (const std::shared_ptr<C2Buffer> &buffer : value.second.buffers) {
+ if (buffer) {
+ return false;
+ }
+ }
+ return true;
+ });
+ if (sizeWithInputReleased >=
+ mPipelineDelay + mOutputDelay + mSmoothnessFactor) {
+ ALOGV("pipelineFull: too many frames in pipeline, with input released (%zu)",
+ sizeWithInputReleased);
+ return true;
+ }
+ ALOGV("pipeline has room (total: %zu, input released: %zu)",
+ mFramesInPipeline.size(), sizeWithInputReleased);
+ return false;
+}
+
+PipelineWatcher::Clock::duration PipelineWatcher::elapsed(
+ const PipelineWatcher::Clock::time_point &now) const {
+ return std::accumulate(
+ mFramesInPipeline.begin(),
+ mFramesInPipeline.end(),
+ Clock::duration::zero(),
+ [&now](const Clock::duration ¤t,
+ const decltype(mFramesInPipeline)::value_type &value) {
+ Clock::duration elapsed = now - value.second.queuedAt;
+ ALOGV("elapsed: frameIndex = %llu elapsed = %lldms",
+ (unsigned long long)value.first,
+ std::chrono::duration_cast<std::chrono::milliseconds>(elapsed).count());
+ return current > elapsed ? current : elapsed;
+ });
+}
+
+} // namespace android
diff --git a/media/codec2/sfplugin/PipelineWatcher.h b/media/codec2/sfplugin/PipelineWatcher.h
new file mode 100644
index 0000000..ce82298
--- /dev/null
+++ b/media/codec2/sfplugin/PipelineWatcher.h
@@ -0,0 +1,78 @@
+/*
+ * Copyright 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef PIPELINE_WATCHER_H_
+#define PIPELINE_WATCHER_H_
+
+#include <chrono>
+#include <map>
+#include <memory>
+
+#include <C2Work.h>
+
+namespace android {
+
+/**
+ * PipelineWatcher watches the status of the work.
+ */
+class PipelineWatcher {
+public:
+ typedef std::chrono::steady_clock Clock;
+
+ PipelineWatcher()
+ : mInputDelay(0),
+ mPipelineDelay(0),
+ mOutputDelay(0),
+ mSmoothnessFactor(0) {}
+ ~PipelineWatcher() = default;
+
+ PipelineWatcher &inputDelay(uint32_t value);
+ PipelineWatcher &pipelineDelay(uint32_t value);
+ PipelineWatcher &outputDelay(uint32_t value);
+ PipelineWatcher &smoothnessFactor(uint32_t value);
+
+ void onWorkQueued(
+ uint64_t frameIndex,
+ std::vector<std::shared_ptr<C2Buffer>> &&buffers,
+ const Clock::time_point &queuedAt);
+ std::shared_ptr<C2Buffer> onInputBufferReleased(
+ uint64_t frameIndex, size_t arrayIndex);
+ void onWorkDone(uint64_t frameIndex);
+ void flush();
+
+ bool pipelineFull() const;
+ Clock::duration elapsed(const Clock::time_point &now) const;
+
+private:
+ uint32_t mInputDelay;
+ uint32_t mPipelineDelay;
+ uint32_t mOutputDelay;
+ uint32_t mSmoothnessFactor;
+
+ struct Frame {
+ Frame(std::vector<std::shared_ptr<C2Buffer>> &&b,
+ const Clock::time_point &q)
+ : buffers(b),
+ queuedAt(q) {}
+ std::vector<std::shared_ptr<C2Buffer>> buffers;
+ const Clock::time_point queuedAt;
+ };
+ std::map<uint64_t, Frame> mFramesInPipeline;
+};
+
+} // namespace android
+
+#endif // PIPELINE_WATCHER_H_
diff --git a/media/codec2/sfplugin/utils/Codec2Mapper.cpp b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
index c369e16..0a6a717 100644
--- a/media/codec2/sfplugin/utils/Codec2Mapper.cpp
+++ b/media/codec2/sfplugin/utils/Codec2Mapper.cpp
@@ -65,7 +65,9 @@
{ C2Config::LEVEL_AVC_5, AVCLevel5 },
{ C2Config::LEVEL_AVC_5_1, AVCLevel51 },
{ C2Config::LEVEL_AVC_5_2, AVCLevel52 },
-
+ { C2Config::LEVEL_AVC_6, AVCLevel6 },
+ { C2Config::LEVEL_AVC_6_1, AVCLevel61 },
+ { C2Config::LEVEL_AVC_6_2, AVCLevel62 },
};
ALookup<C2Config::profile_t, int32_t> sAvcProfiles = {
diff --git a/media/codec2/vndk/C2Config.cpp b/media/codec2/vndk/C2Config.cpp
index 8a27088..34680a7 100644
--- a/media/codec2/vndk/C2Config.cpp
+++ b/media/codec2/vndk/C2Config.cpp
@@ -186,6 +186,9 @@
{ "avc-5", C2Config::LEVEL_AVC_5 },
{ "avc-5.1", C2Config::LEVEL_AVC_5_1 },
{ "avc-5.2", C2Config::LEVEL_AVC_5_2 },
+ { "avc-6", C2Config::LEVEL_AVC_6 },
+ { "avc-6.1", C2Config::LEVEL_AVC_6_1 },
+ { "avc-6.2", C2Config::LEVEL_AVC_6_2 },
{ "hevc-main-1", C2Config::LEVEL_HEVC_MAIN_1 },
{ "hevc-main-2", C2Config::LEVEL_HEVC_MAIN_2 },
{ "hevc-main-2.1", C2Config::LEVEL_HEVC_MAIN_2_1 },
diff --git a/media/codec2/vndk/C2Store.cpp b/media/codec2/vndk/C2Store.cpp
index dc7e89c..f07d9b0 100644
--- a/media/codec2/vndk/C2Store.cpp
+++ b/media/codec2/vndk/C2Store.cpp
@@ -517,7 +517,6 @@
*
* \note Only used by ComponentLoader.
*
- * \param alias[in] module alias
* \param libPath[in] library path
*
* \retval C2_OK the component module has been successfully loaded
@@ -527,7 +526,7 @@
* \retval C2_REFUSED permission denied to load the component module (unexpected)
* \retval C2_TIMED_OUT could not load the module within the time limit (unexpected)
*/
- c2_status_t init(std::string alias, std::string libPath);
+ c2_status_t init(std::string libPath);
virtual ~ComponentModule() override;
@@ -570,7 +569,7 @@
std::shared_ptr<ComponentModule> localModule = mModule.lock();
if (localModule == nullptr) {
localModule = std::make_shared<ComponentModule>();
- res = localModule->init(mAlias, mLibPath);
+ res = localModule->init(mLibPath);
if (res == C2_OK) {
mModule = localModule;
}
@@ -582,13 +581,12 @@
/**
* Creates a component loader for a specific library path (or name).
*/
- ComponentLoader(std::string alias, std::string libPath)
- : mAlias(alias), mLibPath(libPath) {}
+ ComponentLoader(std::string libPath)
+ : mLibPath(libPath) {}
private:
std::mutex mMutex; ///< mutex guarding the module
std::weak_ptr<ComponentModule> mModule; ///< weak reference to the loaded module
- std::string mAlias; ///< component alias
std::string mLibPath; ///< library path
};
@@ -624,9 +622,10 @@
};
/**
- * Retrieves the component loader for a component.
+ * Retrieves the component module for a component.
*
- * \return a non-ref-holding pointer to the component loader.
+ * \param module pointer to a shared_pointer where the component module will be stored on
+ * success.
*
* \retval C2_OK the component loader has been successfully retrieved
* \retval C2_NO_MEMORY not enough memory to locate the component loader
@@ -640,16 +639,25 @@
* component but some components could not be loaded due to lack of
* permissions)
*/
- c2_status_t findComponent(C2String name, ComponentLoader **loader);
+ c2_status_t findComponent(C2String name, std::shared_ptr<ComponentModule> *module);
- std::map<C2String, ComponentLoader> mComponents; ///< map of name -> components
- std::vector<C2String> mComponentsList; ///< list of components
+ /**
+ * Loads each component module and discover its contents.
+ */
+ void visitComponents();
+
+ std::mutex mMutex; ///< mutex guarding the component lists during construction
+ bool mVisited; ///< component modules visited
+ std::map<C2String, ComponentLoader> mComponents; ///< path -> component module
+ std::map<C2String, C2String> mComponentNameToPath; ///< name -> path
+ std::vector<std::shared_ptr<const C2Component::Traits>> mComponentList;
+
std::shared_ptr<C2ReflectorHelper> mReflector;
Interface mInterface;
};
c2_status_t C2PlatformComponentStore::ComponentModule::init(
- std::string alias, std::string libPath) {
+ std::string libPath) {
ALOGV("in %s", __func__);
ALOGV("loading dll");
mLibHandle = dlopen(libPath.c_str(), RTLD_NOW|RTLD_NODELETE);
@@ -684,13 +692,27 @@
std::shared_ptr<C2Component::Traits> traits(new (std::nothrow) C2Component::Traits);
if (traits) {
- if (alias != intf->getName()) {
- ALOGV("%s is alias to %s", alias.c_str(), intf->getName().c_str());
+ traits->name = intf->getName();
+
+ C2ComponentKindSetting kind;
+ C2ComponentDomainSetting domain;
+ res = intf->query_vb({ &kind, &domain }, {}, C2_MAY_BLOCK, nullptr);
+ bool fixDomain = res != C2_OK;
+ if (res == C2_OK) {
+ traits->kind = kind.value;
+ traits->domain = domain.value;
+ } else {
+ // TODO: remove this fall-back
+ ALOGD("failed to query interface for kind and domain: %d", res);
+
+ traits->kind =
+ (traits->name.find("encoder") != std::string::npos) ? C2Component::KIND_ENCODER :
+ (traits->name.find("decoder") != std::string::npos) ? C2Component::KIND_DECODER :
+ C2Component::KIND_OTHER;
}
- traits->name = alias;
- // TODO: get this from interface properly.
- bool encoder = (traits->name.find("encoder") != std::string::npos);
- uint32_t mediaTypeIndex = encoder ? C2PortMimeConfig::output::PARAM_TYPE
+
+ uint32_t mediaTypeIndex =
+ traits->kind == C2Component::KIND_ENCODER ? C2PortMimeConfig::output::PARAM_TYPE
: C2PortMimeConfig::input::PARAM_TYPE;
std::vector<std::unique_ptr<C2Param>> params;
res = intf->query_vb({}, { mediaTypeIndex }, C2_MAY_BLOCK, ¶ms);
@@ -702,29 +724,54 @@
ALOGD("failed to query interface: unexpected vector size: %zu", params.size());
return mInit;
}
- C2PortMimeConfig *mediaTypeConfig = (C2PortMimeConfig *)(params[0].get());
+ C2PortMimeConfig *mediaTypeConfig = C2PortMimeConfig::From(params[0].get());
if (mediaTypeConfig == nullptr) {
ALOGD("failed to query media type");
return mInit;
}
- traits->mediaType = mediaTypeConfig->m.value;
- // TODO: get this properly.
- traits->rank = 0x200;
+ traits->mediaType =
+ std::string(mediaTypeConfig->m.value,
+ strnlen(mediaTypeConfig->m.value, mediaTypeConfig->flexCount()));
- // TODO: define these values properly
- bool decoder = (traits->name.find("decoder") != std::string::npos);
- traits->kind =
- decoder ? C2Component::KIND_DECODER :
- encoder ? C2Component::KIND_ENCODER :
- C2Component::KIND_OTHER;
- if (strncmp(traits->mediaType.c_str(), "audio/", 6) == 0) {
- traits->domain = C2Component::DOMAIN_AUDIO;
- } else if (strncmp(traits->mediaType.c_str(), "video/", 6) == 0) {
- traits->domain = C2Component::DOMAIN_VIDEO;
- } else if (strncmp(traits->mediaType.c_str(), "image/", 6) == 0) {
- traits->domain = C2Component::DOMAIN_IMAGE;
- } else {
- traits->domain = C2Component::DOMAIN_OTHER;
+ if (fixDomain) {
+ if (strncmp(traits->mediaType.c_str(), "audio/", 6) == 0) {
+ traits->domain = C2Component::DOMAIN_AUDIO;
+ } else if (strncmp(traits->mediaType.c_str(), "video/", 6) == 0) {
+ traits->domain = C2Component::DOMAIN_VIDEO;
+ } else if (strncmp(traits->mediaType.c_str(), "image/", 6) == 0) {
+ traits->domain = C2Component::DOMAIN_IMAGE;
+ } else {
+ traits->domain = C2Component::DOMAIN_OTHER;
+ }
+ }
+
+ // TODO: get this properly from the store during emplace
+ switch (traits->domain) {
+ case C2Component::DOMAIN_AUDIO:
+ traits->rank = 8;
+ break;
+ default:
+ traits->rank = 512;
+ }
+
+ params.clear();
+ res = intf->query_vb({}, { C2ComponentAliasesSetting::PARAM_TYPE }, C2_MAY_BLOCK, ¶ms);
+ if (res == C2_OK && params.size() == 1u) {
+ C2ComponentAliasesSetting *aliasesSetting =
+ C2ComponentAliasesSetting::From(params[0].get());
+ if (aliasesSetting) {
+ // Split aliases on ','
+ // This looks simpler in plain C and even std::string would still make a copy.
+ char *aliases = ::strndup(aliasesSetting->m.value, aliasesSetting->flexCount());
+ ALOGD("'%s' has aliases: '%s'", intf->getName().c_str(), aliases);
+
+ for (char *tok, *ptr, *str = aliases; (tok = ::strtok_r(str, ",", &ptr));
+ str = nullptr) {
+ traits->aliases.push_back(tok);
+ ALOGD("adding alias: '%s'", tok);
+ }
+ free(aliases);
+ }
}
}
mTraits = traits;
@@ -783,82 +830,45 @@
}
C2PlatformComponentStore::C2PlatformComponentStore()
- : mReflector(std::make_shared<C2ReflectorHelper>()),
+ : mVisited(false),
+ mReflector(std::make_shared<C2ReflectorHelper>()),
mInterface(mReflector) {
- auto emplace = [this](const char *alias, const char *libPath) {
- // ComponentLoader is neither copiable nor movable, so it must be
- // constructed in-place. Now ComponentLoader takes two arguments in
- // constructor, so we need to use piecewise_construct to achieve this
- // behavior.
- mComponents.emplace(
- std::piecewise_construct,
- std::forward_as_tuple(alias),
- std::forward_as_tuple(alias, libPath));
- mComponentsList.emplace_back(alias);
+ auto emplace = [this](const char *libPath) {
+ mComponents.emplace(libPath, libPath);
};
- // TODO: move this also into a .so so it can be updated
- emplace("c2.android.avc.decoder", "libcodec2_soft_avcdec.so");
- emplace("c2.android.avc.encoder", "libcodec2_soft_avcenc.so");
- emplace("c2.android.aac.decoder", "libcodec2_soft_aacdec.so");
- emplace("c2.android.aac.encoder", "libcodec2_soft_aacenc.so");
- emplace("c2.android.amrnb.decoder", "libcodec2_soft_amrnbdec.so");
- emplace("c2.android.amrnb.encoder", "libcodec2_soft_amrnbenc.so");
- emplace("c2.android.amrwb.decoder", "libcodec2_soft_amrwbdec.so");
- emplace("c2.android.amrwb.encoder", "libcodec2_soft_amrwbenc.so");
- emplace("c2.android.hevc.decoder", "libcodec2_soft_hevcdec.so");
- emplace("c2.android.g711.alaw.decoder", "libcodec2_soft_g711alawdec.so");
- emplace("c2.android.g711.mlaw.decoder", "libcodec2_soft_g711mlawdec.so");
- emplace("c2.android.mpeg2.decoder", "libcodec2_soft_mpeg2dec.so");
- emplace("c2.android.h263.decoder", "libcodec2_soft_h263dec.so");
- emplace("c2.android.h263.encoder", "libcodec2_soft_h263enc.so");
- emplace("c2.android.mpeg4.decoder", "libcodec2_soft_mpeg4dec.so");
- emplace("c2.android.mpeg4.encoder", "libcodec2_soft_mpeg4enc.so");
- emplace("c2.android.mp3.decoder", "libcodec2_soft_mp3dec.so");
- emplace("c2.android.vorbis.decoder", "libcodec2_soft_vorbisdec.so");
- emplace("c2.android.opus.decoder", "libcodec2_soft_opusdec.so");
- emplace("c2.android.opus.encoder", "libcodec2_soft_opusenc.so");
- emplace("c2.android.vp8.decoder", "libcodec2_soft_vp8dec.so");
- emplace("c2.android.vp9.decoder", "libcodec2_soft_vp9dec.so");
- emplace("c2.android.vp8.encoder", "libcodec2_soft_vp8enc.so");
- emplace("c2.android.vp9.encoder", "libcodec2_soft_vp9enc.so");
- emplace("c2.android.av1.decoder", "libcodec2_soft_av1dec.so");
- emplace("c2.android.raw.decoder", "libcodec2_soft_rawdec.so");
- emplace("c2.android.flac.decoder", "libcodec2_soft_flacdec.so");
- emplace("c2.android.flac.encoder", "libcodec2_soft_flacenc.so");
- emplace("c2.android.gsm.decoder", "libcodec2_soft_gsmdec.so");
- emplace("c2.android.xaac.decoder", "libcodec2_soft_xaacdec.so");
- // "Aliases"
- // TODO: use aliases proper from C2Component::Traits
- emplace("OMX.google.h264.decoder", "libcodec2_soft_avcdec.so");
- emplace("OMX.google.h264.encoder", "libcodec2_soft_avcenc.so");
- emplace("OMX.google.aac.decoder", "libcodec2_soft_aacdec.so");
- emplace("OMX.google.aac.encoder", "libcodec2_soft_aacenc.so");
- emplace("OMX.google.amrnb.decoder", "libcodec2_soft_amrnbdec.so");
- emplace("OMX.google.amrnb.encoder", "libcodec2_soft_amrnbenc.so");
- emplace("OMX.google.amrwb.decoder", "libcodec2_soft_amrwbdec.so");
- emplace("OMX.google.amrwb.encoder", "libcodec2_soft_amrwbenc.so");
- emplace("OMX.google.hevc.decoder", "libcodec2_soft_hevcdec.so");
- emplace("OMX.google.g711.alaw.decoder", "libcodec2_soft_g711alawdec.so");
- emplace("OMX.google.g711.mlaw.decoder", "libcodec2_soft_g711mlawdec.so");
- emplace("OMX.google.mpeg2.decoder", "libcodec2_soft_mpeg2dec.so");
- emplace("OMX.google.h263.decoder", "libcodec2_soft_h263dec.so");
- emplace("OMX.google.h263.encoder", "libcodec2_soft_h263enc.so");
- emplace("OMX.google.mpeg4.decoder", "libcodec2_soft_mpeg4dec.so");
- emplace("OMX.google.mpeg4.encoder", "libcodec2_soft_mpeg4enc.so");
- emplace("OMX.google.mp3.decoder", "libcodec2_soft_mp3dec.so");
- emplace("OMX.google.vorbis.decoder", "libcodec2_soft_vorbisdec.so");
- emplace("OMX.google.opus.decoder", "libcodec2_soft_opusdec.so");
- emplace("OMX.google.vp8.decoder", "libcodec2_soft_vp8dec.so");
- emplace("OMX.google.vp9.decoder", "libcodec2_soft_vp9dec.so");
- emplace("OMX.google.vp8.encoder", "libcodec2_soft_vp8enc.so");
- emplace("OMX.google.vp9.encoder", "libcodec2_soft_vp9enc.so");
- emplace("OMX.google.raw.decoder", "libcodec2_soft_rawdec.so");
- emplace("OMX.google.flac.decoder", "libcodec2_soft_flacdec.so");
- emplace("OMX.google.flac.encoder", "libcodec2_soft_flacenc.so");
- emplace("OMX.google.gsm.decoder", "libcodec2_soft_gsmdec.so");
- emplace("OMX.google.xaac.decoder", "libcodec2_soft_xaacdec.so");
+ // TODO: move this also into a .so so it can be updated
+ emplace("libcodec2_soft_aacdec.so");
+ emplace("libcodec2_soft_aacenc.so");
+ emplace("libcodec2_soft_amrnbdec.so");
+ emplace("libcodec2_soft_amrnbenc.so");
+ emplace("libcodec2_soft_amrwbdec.so");
+ emplace("libcodec2_soft_amrwbenc.so");
+ emplace("libcodec2_soft_av1dec.so");
+ emplace("libcodec2_soft_avcdec.so");
+ emplace("libcodec2_soft_avcenc.so");
+ emplace("libcodec2_soft_flacdec.so");
+ emplace("libcodec2_soft_flacenc.so");
+ emplace("libcodec2_soft_g711alawdec.so");
+ emplace("libcodec2_soft_g711mlawdec.so");
+ emplace("libcodec2_soft_gsmdec.so");
+ emplace("libcodec2_soft_h263dec.so");
+ emplace("libcodec2_soft_h263enc.so");
+ emplace("libcodec2_soft_hevcdec.so");
+ emplace("libcodec2_soft_mp3dec.so");
+ emplace("libcodec2_soft_mpeg2dec.so");
+ emplace("libcodec2_soft_mpeg4dec.so");
+ emplace("libcodec2_soft_mpeg4enc.so");
+ emplace("libcodec2_soft_opusdec.so");
+ emplace("libcodec2_soft_opusenc.so");
+ emplace("libcodec2_soft_rawdec.so");
+ emplace("libcodec2_soft_vorbisdec.so");
+ emplace("libcodec2_soft_vp8dec.so");
+ emplace("libcodec2_soft_vp8enc.so");
+ emplace("libcodec2_soft_vp9dec.so");
+ emplace("libcodec2_soft_vp9enc.so");
+ emplace("libcodec2_soft_xaacdec.so");
}
c2_status_t C2PlatformComponentStore::copyBuffer(
@@ -881,47 +891,56 @@
return mInterface.config(params, C2_MAY_BLOCK, failures);
}
-std::vector<std::shared_ptr<const C2Component::Traits>> C2PlatformComponentStore::listComponents() {
- // This method SHALL return within 500ms.
- std::vector<std::shared_ptr<const C2Component::Traits>> list;
- for (const C2String &alias : mComponentsList) {
- ComponentLoader &loader = mComponents.at(alias);
+void C2PlatformComponentStore::visitComponents() {
+ std::lock_guard<std::mutex> lock(mMutex);
+ if (mVisited) {
+ return;
+ }
+ for (auto &pathAndLoader : mComponents) {
+ const C2String &path = pathAndLoader.first;
+ ComponentLoader &loader = pathAndLoader.second;
std::shared_ptr<ComponentModule> module;
- c2_status_t res = loader.fetchModule(&module);
- if (res == C2_OK) {
+ if (loader.fetchModule(&module) == C2_OK) {
std::shared_ptr<const C2Component::Traits> traits = module->getTraits();
if (traits) {
- list.push_back(traits);
+ mComponentList.push_back(traits);
+ mComponentNameToPath.emplace(traits->name, path);
+ for (const C2String &alias : traits->aliases) {
+ mComponentNameToPath.emplace(alias, path);
+ }
}
}
}
- return list;
+ mVisited = true;
}
-c2_status_t C2PlatformComponentStore::findComponent(C2String name, ComponentLoader **loader) {
- *loader = nullptr;
- auto pos = mComponents.find(name);
- // TODO: check aliases
- if (pos == mComponents.end()) {
- return C2_NOT_FOUND;
+std::vector<std::shared_ptr<const C2Component::Traits>> C2PlatformComponentStore::listComponents() {
+ // This method SHALL return within 500ms.
+ visitComponents();
+ return mComponentList;
+}
+
+c2_status_t C2PlatformComponentStore::findComponent(
+ C2String name, std::shared_ptr<ComponentModule> *module) {
+ (*module).reset();
+ visitComponents();
+
+ auto pos = mComponentNameToPath.find(name);
+ if (pos != mComponentNameToPath.end()) {
+ return mComponents.at(pos->second).fetchModule(module);
}
- *loader = &pos->second;
- return C2_OK;
+ return C2_NOT_FOUND;
}
c2_status_t C2PlatformComponentStore::createComponent(
C2String name, std::shared_ptr<C2Component> *const component) {
// This method SHALL return within 100ms.
component->reset();
- ComponentLoader *loader;
- c2_status_t res = findComponent(name, &loader);
+ std::shared_ptr<ComponentModule> module;
+ c2_status_t res = findComponent(name, &module);
if (res == C2_OK) {
- std::shared_ptr<ComponentModule> module;
- res = loader->fetchModule(&module);
- if (res == C2_OK) {
- // TODO: get a unique node ID
- res = module->createComponent(0, component);
- }
+ // TODO: get a unique node ID
+ res = module->createComponent(0, component);
}
return res;
}
@@ -930,15 +949,11 @@
C2String name, std::shared_ptr<C2ComponentInterface> *const interface) {
// This method SHALL return within 100ms.
interface->reset();
- ComponentLoader *loader;
- c2_status_t res = findComponent(name, &loader);
+ std::shared_ptr<ComponentModule> module;
+ c2_status_t res = findComponent(name, &module);
if (res == C2_OK) {
- std::shared_ptr<ComponentModule> module;
- res = loader->fetchModule(&module);
- if (res == C2_OK) {
- // TODO: get a unique node ID
- res = module->createInterface(0, interface);
- }
+ // TODO: get a unique node ID
+ res = module->createInterface(0, interface);
}
return res;
}
diff --git a/media/extractors/mp3/MP3Extractor.cpp b/media/extractors/mp3/MP3Extractor.cpp
index 61838f6..a838ae6 100644
--- a/media/extractors/mp3/MP3Extractor.cpp
+++ b/media/extractors/mp3/MP3Extractor.cpp
@@ -708,6 +708,7 @@
}
static const char *extensions[] = {
+ "mp2",
"mp3",
"mpeg",
"mpg",
diff --git a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
index 9711b86..8eb70b1 100644
--- a/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
+++ b/media/libaaudio/examples/loopback/src/LoopbackAnalyzer.h
@@ -310,7 +310,7 @@
}
// Write SHORT data from the first channel.
- int write(int16_t *inputData, int inputChannelCount, int numFrames) {
+ int32_t write(int16_t *inputData, int32_t inputChannelCount, int32_t numFrames) {
// stop at end of buffer
if ((mFrameCounter + numFrames) > mMaxFrames) {
numFrames = mMaxFrames - mFrameCounter;
@@ -322,7 +322,7 @@
}
// Write FLOAT data from the first channel.
- int write(float *inputData, int inputChannelCount, int numFrames) {
+ int32_t write(float *inputData, int32_t inputChannelCount, int32_t numFrames) {
// stop at end of buffer
if ((mFrameCounter + numFrames) > mMaxFrames) {
numFrames = mMaxFrames - mFrameCounter;
@@ -333,7 +333,7 @@
return numFrames;
}
- int size() {
+ int32_t size() {
return mFrameCounter;
}
@@ -443,9 +443,14 @@
virtual ~LoopbackProcessor() = default;
+ enum process_result {
+ PROCESS_RESULT_OK,
+ PROCESS_RESULT_GLITCH
+ };
+
virtual void reset() {}
- virtual void process(float *inputData, int inputChannelCount,
+ virtual process_result process(float *inputData, int inputChannelCount,
float *outputData, int outputChannelCount,
int numFrames) = 0;
@@ -639,7 +644,7 @@
return getSampleRate() / 8;
}
- void process(float *inputData, int inputChannelCount,
+ process_result process(float *inputData, int inputChannelCount,
float *outputData, int outputChannelCount,
int numFrames) override {
int channelsValid = std::min(inputChannelCount, outputChannelCount);
@@ -750,6 +755,7 @@
mState = nextState;
mLoopCounter++;
+ return PROCESS_RESULT_OK;
}
int save(const char *fileName) override {
@@ -896,9 +902,10 @@
* @param inputData contains microphone data with sine signal feedback
* @param outputData contains the reference sine wave
*/
- void process(float *inputData, int inputChannelCount,
+ process_result process(float *inputData, int inputChannelCount,
float *outputData, int outputChannelCount,
int numFrames) override {
+ process_result result = PROCESS_RESULT_OK;
mProcessCount++;
float peak = measurePeakAmplitude(inputData, inputChannelCount, numFrames);
@@ -978,6 +985,7 @@
mMaxGlitchDelta = std::max(mMaxGlitchDelta, absDiff);
if (absDiff > mTolerance) {
mGlitchCount++;
+ result = PROCESS_RESULT_GLITCH;
//printf("%5d: Got a glitch # %d, predicted = %f, actual = %f\n",
// mFrameCounter, mGlitchCount, predicted, sample);
mState = STATE_IMMUNE;
@@ -1018,6 +1026,7 @@
mFrameCounter++;
}
+ return result;
}
void resetAccumulator() {
diff --git a/media/libaaudio/examples/loopback/src/loopback.cpp b/media/libaaudio/examples/loopback/src/loopback.cpp
index 3de1514..6578156 100644
--- a/media/libaaudio/examples/loopback/src/loopback.cpp
+++ b/media/libaaudio/examples/loopback/src/loopback.cpp
@@ -34,9 +34,13 @@
#include "AAudioSimpleRecorder.h"
#include "AAudioExampleUtils.h"
#include "LoopbackAnalyzer.h"
+#include "../../utils/AAudioExampleUtils.h"
-// V0.4.00 = rectify and low-pass filter the echos, use auto-correlation on entire echo
-#define APP_VERSION "0.4.00"
+// V0.4.00 = rectify and low-pass filter the echos, auto-correlate entire echo
+// V0.4.01 = add -h hang option
+// fix -n option to set output buffer for -tm
+// plot first glitch
+#define APP_VERSION "0.4.01"
// Tag for machine readable results as property = value pairs
#define RESULT_TAG "RESULT: "
@@ -47,10 +51,14 @@
constexpr int kLogPeriodMillis = 1000;
constexpr int kNumInputChannels = 1;
constexpr int kNumCallbacksToDrain = 20;
+constexpr int kNumCallbacksToNotRead = 0; // let input fill back up
constexpr int kNumCallbacksToDiscard = 20;
+constexpr int kDefaultHangTimeMillis = 50;
+constexpr int kMaxGlitchEventsToSave = 32;
struct LoopbackData {
AAudioStream *inputStream = nullptr;
+ AAudioStream *outputStream = nullptr;
int32_t inputFramesMaximum = 0;
int16_t *inputShortData = nullptr;
float *inputFloatData = nullptr;
@@ -58,6 +66,7 @@
int32_t actualInputChannelCount = 0;
int32_t actualOutputChannelCount = 0;
int32_t numCallbacksToDrain = kNumCallbacksToDrain;
+ int32_t numCallbacksToNotRead = kNumCallbacksToNotRead;
int32_t numCallbacksToDiscard = kNumCallbacksToDiscard;
int32_t minNumFrames = INT32_MAX;
int32_t maxNumFrames = 0;
@@ -65,6 +74,9 @@
int32_t insufficientReadFrames = 0;
int32_t framesReadTotal = 0;
int32_t framesWrittenTotal = 0;
+ int32_t hangPeriodMillis = 5 * 1000; // time between hangs
+ int32_t hangCountdownFrames = 5 * 48000; // frames til next hang
+ int32_t hangTimeMillis = 0; // 0 for no hang
bool isDone = false;
aaudio_result_t inputError = AAUDIO_OK;
@@ -74,6 +86,29 @@
EchoAnalyzer echoAnalyzer;
AudioRecording audioRecording;
LoopbackProcessor *loopbackProcessor;
+
+ int32_t glitchFrames[kMaxGlitchEventsToSave];
+ int32_t numGlitchEvents = 0;
+
+ void hangIfRequested(int32_t numFrames) {
+ if (hangTimeMillis > 0) {
+ hangCountdownFrames -= numFrames;
+ if (hangCountdownFrames <= 0) {
+ const int64_t startNanos = getNanoseconds();
+ usleep(hangTimeMillis * 1000);
+ const int64_t endNanos = getNanoseconds();
+ const int32_t elapsedMicros = (int32_t)
+ ((endNanos - startNanos) / 1000);
+ printf("callback hanging for %d millis, actual = %d micros\n",
+ hangTimeMillis, elapsedMicros);
+ hangCountdownFrames = (int64_t) hangPeriodMillis
+ * AAudioStream_getSampleRate(outputStream)
+ / 1000;
+ }
+ }
+
+
+ }
};
static void convertPcm16ToFloat(const int16_t *source,
@@ -166,6 +201,9 @@
myData->numCallbacksToDrain--;
}
+ } else if (myData->numCallbacksToNotRead > 0) {
+ // Let the input fill up a bit so we are not so close to the write pointer.
+ myData->numCallbacksToNotRead--;
} else if (myData->numCallbacksToDiscard > 0) {
// Ignore. Allow the input to fill back up to equilibrium with the output.
actualFramesRead = readFormattedData(myData, numFrames);
@@ -175,6 +213,7 @@
myData->numCallbacksToDiscard--;
} else {
+ myData->hangIfRequested(numFrames);
int32_t numInputBytes = numFrames * myData->actualInputChannelCount * sizeof(float);
memset(myData->inputFloatData, 0 /* value */, numInputBytes);
@@ -191,7 +230,7 @@
if (actualFramesRead < numFrames) {
if(actualFramesRead < (int32_t) framesAvailable) {
- printf("insufficient but numFrames = %d"
+ printf("insufficient for no reason, numFrames = %d"
", actualFramesRead = %d"
", inputFramesWritten = %d"
", inputFramesRead = %d"
@@ -212,16 +251,25 @@
if (myData->actualInputFormat == AAUDIO_FORMAT_PCM_I16) {
convertPcm16ToFloat(myData->inputShortData, myData->inputFloatData, numSamples);
}
- // Save for later.
- myData->audioRecording.write(myData->inputFloatData,
- myData->actualInputChannelCount,
- numFrames);
+
// Analyze the data.
- myData->loopbackProcessor->process(myData->inputFloatData,
+ LoopbackProcessor::process_result procResult = myData->loopbackProcessor->process(myData->inputFloatData,
myData->actualInputChannelCount,
outputData,
myData->actualOutputChannelCount,
numFrames);
+
+ if (procResult == LoopbackProcessor::PROCESS_RESULT_GLITCH) {
+ if (myData->numGlitchEvents < kMaxGlitchEventsToSave) {
+ myData->glitchFrames[myData->numGlitchEvents++] = myData->audioRecording.size();
+ }
+ }
+
+ // Save for later.
+ myData->audioRecording.write(myData->inputFloatData,
+ myData->actualInputChannelCount,
+ actualFramesRead);
+
myData->isDone = myData->loopbackProcessor->isDone();
if (myData->isDone) {
result = AAUDIO_CALLBACK_RESULT_STOP;
@@ -249,6 +297,7 @@
printf(" -C{channels} number of input channels\n");
printf(" -F{0,1,2} input format, 1=I16, 2=FLOAT\n");
printf(" -g{gain} recirculating loopback gain\n");
+ printf(" -h{hangMillis} occasionally hang in the callback\n");
printf(" -P{inPerf} set input AAUDIO_PERFORMANCE_MODE*\n");
printf(" n for _NONE\n");
printf(" l for _LATENCY\n");
@@ -307,9 +356,7 @@
return testMode;
}
-void printAudioGraph(AudioRecording &recording, int numSamples) {
- int32_t start = recording.size() / 2;
- int32_t end = start + numSamples;
+void printAudioGraphRegion(AudioRecording &recording, int32_t start, int32_t end) {
if (end >= recording.size()) {
end = recording.size() - 1;
}
@@ -352,7 +399,7 @@
int32_t requestedInputCapacity = AAUDIO_UNSPECIFIED;
aaudio_performance_mode_t inputPerformanceLevel = AAUDIO_PERFORMANCE_MODE_LOW_LATENCY;
- int32_t outputFramesPerBurst = 0;
+ int32_t outputFramesPerBurst = 0;
aaudio_format_t actualOutputFormat = AAUDIO_FORMAT_INVALID;
int32_t actualSampleRate = 0;
@@ -360,6 +407,7 @@
int testMode = TEST_ECHO_LATENCY;
double gain = 1.0;
+ int hangTimeMillis = 0;
// Make printf print immediately so that debug info is not stuck
// in a buffer if we hang or crash.
@@ -389,6 +437,15 @@
case 'g':
gain = atof(&arg[2]);
break;
+ case 'h':
+ // Was there a number after the "-h"?
+ if (arg[2]) {
+ hangTimeMillis = atoi(&arg[2]);
+ } else {
+ // If no number then use the default.
+ hangTimeMillis = kDefaultHangTimeMillis;
+ }
+ break;
case 'P':
inputPerformanceLevel = parsePerformanceMode(arg[2]);
break;
@@ -422,6 +479,8 @@
int32_t timeMillis = 0;
int32_t recordingDuration = std::min(60 * 5, requestedDuration);
+ int32_t requestedOutputBursts = argParser.getNumberOfBursts();
+
switch(testMode) {
case TEST_SINE_MAGNITUDE:
loopbackData.loopbackProcessor = &loopbackData.sineAnalyzer;
@@ -453,7 +512,7 @@
fprintf(stderr, "ERROR - player.open() returned %d\n", result);
exit(1);
}
- outputStream = player.getStream();
+ outputStream = loopbackData.outputStream = player.getStream();
actualOutputFormat = AAudioStream_getFormat(outputStream);
if (actualOutputFormat != AAUDIO_FORMAT_PCM_FLOAT) {
@@ -489,24 +548,29 @@
{
int32_t actualCapacity = AAudioStream_getBufferCapacityInFrames(inputStream);
- result = AAudioStream_setBufferSizeInFrames(inputStream, actualCapacity);
- if (result < 0) {
- fprintf(stderr, "ERROR - AAudioStream_setBufferSizeInFrames() returned %d\n", result);
- goto finish;
- } else {}
- }
+ (void) AAudioStream_setBufferSizeInFrames(inputStream, actualCapacity);
- argParser.compareWithStream(inputStream);
+ if (testMode == TEST_SINE_MAGNITUDE
+ && requestedOutputBursts == AAUDIO_UNSPECIFIED) {
+ result = AAudioStream_setBufferSizeInFrames(outputStream, actualCapacity);
+ if (result < 0) {
+ fprintf(stderr, "ERROR - AAudioStream_setBufferSizeInFrames(output) returned %d\n",
+ result);
+ goto finish;
+ } else {
+ printf("Output buffer size set to match input capacity = %d frames!\n", result);
+ }
+ }
- // If the input stream is too small then we cannot satisfy the output callback.
- {
- int32_t actualCapacity = AAudioStream_getBufferCapacityInFrames(inputStream);
+ // If the input stream is too small then we cannot satisfy the output callback.
if (actualCapacity < 2 * outputFramesPerBurst) {
fprintf(stderr, "ERROR - input capacity < 2 * outputFramesPerBurst\n");
goto finish;
}
}
+ argParser.compareWithStream(inputStream);
+
// ------- Setup loopbackData -----------------------------
loopbackData.actualInputFormat = AAudioStream_getFormat(inputStream);
@@ -525,6 +589,8 @@
loopbackData.loopbackProcessor->reset();
+ loopbackData.hangTimeMillis = hangTimeMillis;
+
// Start OUTPUT first so INPUT does not overflow.
result = player.start();
if (result != AAUDIO_OK) {
@@ -611,7 +677,17 @@
if (loopbackData.inputError == AAUDIO_OK) {
if (testMode == TEST_SINE_MAGNITUDE) {
- printAudioGraph(loopbackData.audioRecording, 200);
+ if (loopbackData.numGlitchEvents > 0) {
+ // Graph around the first glitch if there is one.
+ const int32_t start = loopbackData.glitchFrames[0] - 8;
+ const int32_t end = start + outputFramesPerBurst + 8 + 8;
+ printAudioGraphRegion(loopbackData.audioRecording, start, end);
+ } else {
+ // Or graph the middle of the signal.
+ const int32_t start = loopbackData.audioRecording.size() / 2;
+ const int32_t end = start + 200;
+ printAudioGraphRegion(loopbackData.audioRecording, start, end);
+ }
}
loopbackData.loopbackProcessor->report();
@@ -661,6 +737,11 @@
delete[] loopbackData.inputShortData;
report_result:
+
+ for (int i = 0; i < loopbackData.numGlitchEvents; i++) {
+ printf(" glitch at frame %d\n", loopbackData.glitchFrames[i]);
+ }
+
written = loopbackData.loopbackProcessor->save(FILENAME_PROCESSED);
if (written > 0) {
printf("main() wrote %8d processed samples to \"%s\" on Android device\n",
diff --git a/media/libaaudio/examples/utils/AAudioArgsParser.h b/media/libaaudio/examples/utils/AAudioArgsParser.h
index a5dc55f..f5ed7aa 100644
--- a/media/libaaudio/examples/utils/AAudioArgsParser.h
+++ b/media/libaaudio/examples/utils/AAudioArgsParser.h
@@ -130,12 +130,10 @@
}
int32_t getBufferCapacity() const {
- printf("%s() returns %d\n", __func__, mBufferCapacity);
return mBufferCapacity;
}
void setBufferCapacity(int32_t frames) {
- printf("%s(%d)\n", __func__, frames);
mBufferCapacity = frames;
}
diff --git a/media/libaudioclient/AudioRecord.cpp b/media/libaudioclient/AudioRecord.cpp
index 72a23e3..8afb1cc 100644
--- a/media/libaudioclient/AudioRecord.cpp
+++ b/media/libaudioclient/AudioRecord.cpp
@@ -355,7 +355,10 @@
}
// create the IAudioRecord
- status = createRecord_l(0 /*epoch*/, mOpPackageName);
+ {
+ AutoMutex lock(mLock);
+ status = createRecord_l(0 /*epoch*/, mOpPackageName);
+ }
ALOGV("%s(%d): status %d", __func__, mPortId, status);
@@ -1358,12 +1361,14 @@
ALOGW("%s(%d): removing NULL callback!", __func__, mPortId);
return BAD_VALUE;
}
- AutoMutex lock(mLock);
- if (mDeviceCallback.unsafe_get() != callback.get()) {
- ALOGW("%s(%d): removing different callback!", __func__, mPortId);
- return INVALID_OPERATION;
+ {
+ AutoMutex lock(mLock);
+ if (mDeviceCallback.unsafe_get() != callback.get()) {
+ ALOGW("%s(%d): removing different callback!", __func__, mPortId);
+ return INVALID_OPERATION;
+ }
+ mDeviceCallback.clear();
}
- mDeviceCallback.clear();
if (mInput != AUDIO_IO_HANDLE_NONE) {
AudioSystem::removeAudioDeviceCallback(this, mInput);
}
diff --git a/media/libaudioclient/AudioSystem.cpp b/media/libaudioclient/AudioSystem.cpp
index 41a7ff0..52ad5a6 100644
--- a/media/libaudioclient/AudioSystem.cpp
+++ b/media/libaudioclient/AudioSystem.cpp
@@ -522,8 +522,9 @@
if (ioDesc == 0 || ioDesc->mIoHandle == AUDIO_IO_HANDLE_NONE) return;
audio_port_handle_t deviceId = AUDIO_PORT_HANDLE_NONE;
- Vector < wp<AudioDeviceCallback> > callbacks;
-
+ AudioDeviceCallbacks callbacks;
+ bool deviceValidOrChanged = false;
+ Mutex::Autolock _l(mCallbacksLock);
{
Mutex::Autolock _l(mLock);
@@ -546,6 +547,13 @@
ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(ioDesc->mIoHandle);
if (ioIndex >= 0) {
callbacks = mAudioDeviceCallbacks.valueAt(ioIndex);
+ deviceValidOrChanged = true;
+ }
+ }
+ if (event == AUDIO_OUTPUT_REGISTERED || event == AUDIO_INPUT_REGISTERED) {
+ ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(ioDesc->mIoHandle);
+ if ((ioIndex >= 0) && !mAudioDeviceCallbacks.valueAt(ioIndex).notifiedOnce()) {
+ callbacks = mAudioDeviceCallbacks.valueAt(ioIndex);
}
}
}
@@ -584,6 +592,7 @@
mIoDescriptors.replaceValueFor(ioDesc->mIoHandle, ioDesc);
if (deviceId != ioDesc->getDeviceId()) {
+ deviceValidOrChanged = true;
deviceId = ioDesc->getDeviceId();
ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(ioDesc->mIoHandle);
if (ioIndex >= 0) {
@@ -600,22 +609,28 @@
} break;
}
}
- bool callbackRemoved = false;
// callbacks.size() != 0 => ioDesc->mIoHandle and deviceId are valid
- for (size_t i = 0; i < callbacks.size(); ) {
- sp<AudioDeviceCallback> callback = callbacks[i].promote();
- if (callback.get() != nullptr) {
- callback->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId);
- i++;
- } else {
- callbacks.removeAt(i);
- callbackRemoved = true;
+ if (callbacks.size() != 0) {
+ for (size_t i = 0; i < callbacks.size(); ) {
+ sp<AudioDeviceCallback> callback = callbacks[i].promote();
+ if (callback.get() != nullptr) {
+ // Call the callback only if the device actually changed, the input or output was
+ // opened or closed or the client was newly registered and the callback was never
+ // called
+ if (!callback->notifiedOnce() || deviceValidOrChanged) {
+ // Must be called without mLock held. May lead to dead lock if calling for
+ // example getRoutedDevice that updates the device and tries to acquire mLock.
+ callback->onAudioDeviceUpdate(ioDesc->mIoHandle, deviceId);
+ callback->setNotifiedOnce();
+ }
+ i++;
+ } else {
+ callbacks.removeAt(i);
+ }
}
- }
- // clean up callback list while we are here if some clients have disappeared without
- // unregistering their callback
- if (callbackRemoved) {
- Mutex::Autolock _l(mLock);
+ callbacks.setNotifiedOnce();
+ // clean up callback list while we are here if some clients have disappeared without
+ // unregistering their callback, or if cb was served for the first time since registered
mAudioDeviceCallbacks.replaceValueFor(ioDesc->mIoHandle, callbacks);
}
}
@@ -671,8 +686,8 @@
status_t AudioSystem::AudioFlingerClient::addAudioDeviceCallback(
const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
{
- Mutex::Autolock _l(mLock);
- Vector < wp<AudioDeviceCallback> > callbacks;
+ Mutex::Autolock _l(mCallbacksLock);
+ AudioDeviceCallbacks callbacks;
ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(audioIo);
if (ioIndex >= 0) {
callbacks = mAudioDeviceCallbacks.valueAt(ioIndex);
@@ -684,7 +699,7 @@
}
}
callbacks.add(callback);
-
+ callbacks.resetNotifiedOnce();
mAudioDeviceCallbacks.replaceValueFor(audioIo, callbacks);
return NO_ERROR;
}
@@ -692,12 +707,12 @@
status_t AudioSystem::AudioFlingerClient::removeAudioDeviceCallback(
const wp<AudioDeviceCallback>& callback, audio_io_handle_t audioIo)
{
- Mutex::Autolock _l(mLock);
+ Mutex::Autolock _l(mCallbacksLock);
ssize_t ioIndex = mAudioDeviceCallbacks.indexOfKey(audioIo);
if (ioIndex < 0) {
return INVALID_OPERATION;
}
- Vector < wp<AudioDeviceCallback> > callbacks = mAudioDeviceCallbacks.valueAt(ioIndex);
+ AudioDeviceCallbacks callbacks = mAudioDeviceCallbacks.valueAt(ioIndex);
size_t cbIndex;
for (cbIndex = 0; cbIndex < callbacks.size(); cbIndex++) {
diff --git a/media/libaudioclient/AudioTrack.cpp b/media/libaudioclient/AudioTrack.cpp
index 79abea0..b5a7ebe 100644
--- a/media/libaudioclient/AudioTrack.cpp
+++ b/media/libaudioclient/AudioTrack.cpp
@@ -621,8 +621,10 @@
}
// create the IAudioTrack
- status = createTrack_l();
-
+ {
+ AutoMutex lock(mLock);
+ status = createTrack_l();
+ }
if (status != NO_ERROR) {
if (mAudioTrackThread != 0) {
mAudioTrackThread->requestExit(); // see comment in AudioTrack.h
@@ -2957,12 +2959,14 @@
ALOGW("%s(%d): removing NULL callback!", __func__, mPortId);
return BAD_VALUE;
}
- AutoMutex lock(mLock);
- if (mDeviceCallback.unsafe_get() != callback.get()) {
- ALOGW("%s(%d): removing different callback!", __func__, mPortId);
- return INVALID_OPERATION;
+ {
+ AutoMutex lock(mLock);
+ if (mDeviceCallback.unsafe_get() != callback.get()) {
+ ALOGW("%s removing different callback!", __FUNCTION__);
+ return INVALID_OPERATION;
+ }
+ mDeviceCallback.clear();
}
- mDeviceCallback.clear();
if (mOutput != AUDIO_IO_HANDLE_NONE) {
AudioSystem::removeAudioDeviceCallback(this, mOutput);
}
diff --git a/media/libaudioclient/include/media/AudioRecord.h b/media/libaudioclient/include/media/AudioRecord.h
index 1f71844..4707c4a 100644
--- a/media/libaudioclient/include/media/AudioRecord.h
+++ b/media/libaudioclient/include/media/AudioRecord.h
@@ -677,7 +677,7 @@
sp<IMemory> mCblkMemory;
audio_track_cblk_t* mCblk; // re-load after mLock.unlock()
sp<IMemory> mBufferMemory;
- audio_io_handle_t mInput; // returned by AudioSystem::getInput()
+ audio_io_handle_t mInput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getInputforAttr()
int mPreviousPriority; // before start()
SchedPolicy mPreviousSchedulingGroup;
diff --git a/media/libaudioclient/include/media/AudioSystem.h b/media/libaudioclient/include/media/AudioSystem.h
index 6060894..3df49e6 100644
--- a/media/libaudioclient/include/media/AudioSystem.h
+++ b/media/libaudioclient/include/media/AudioSystem.h
@@ -399,6 +399,15 @@
virtual void onAudioDeviceUpdate(audio_io_handle_t audioIo,
audio_port_handle_t deviceId) = 0;
+ bool notifiedOnce() const { return mNotifiedOnce; }
+ void setNotifiedOnce() { mNotifiedOnce = true; }
+ private:
+ /**
+ * @brief mNotifiedOnce it forces the callback to be called at least once when
+ * registered with a VALID AudioDevice, and allows not to flood other listeners
+ * on this iohandle that already know the valid device.
+ */
+ bool mNotifiedOnce = false;
};
static status_t addAudioDeviceCallback(const wp<AudioDeviceCallback>& callback,
@@ -444,8 +453,27 @@
private:
Mutex mLock;
DefaultKeyedVector<audio_io_handle_t, sp<AudioIoDescriptor> > mIoDescriptors;
- DefaultKeyedVector<audio_io_handle_t, Vector < wp<AudioDeviceCallback> > >
- mAudioDeviceCallbacks;
+
+ class AudioDeviceCallbacks : public Vector<wp<AudioDeviceCallback>>
+ {
+ public:
+ /**
+ * @brief notifiedOnce ensures that if a client adds a callback, it must at least be
+ * called once with the device on which it will be routed to.
+ * @return true if already notified or nobody waits for a callback, false otherwise.
+ */
+ bool notifiedOnce() const { return (size() == 0) || mNotifiedOnce; }
+ void setNotifiedOnce() { mNotifiedOnce = true; }
+ void resetNotifiedOnce() { mNotifiedOnce = false; }
+ private:
+ /**
+ * @brief mNotifiedOnce it forces each callback to be called at least once when
+ * registered with a VALID AudioDevice
+ */
+ bool mNotifiedOnce = false;
+ };
+ Mutex mCallbacksLock; // prevents race on Callbacks
+ DefaultKeyedVector<audio_io_handle_t, AudioDeviceCallbacks> mAudioDeviceCallbacks;
// cached values for recording getInputBufferSize() queries
size_t mInBuffSize; // zero indicates cache is invalid
uint32_t mInSamplingRate;
diff --git a/media/libaudioclient/include/media/AudioTrack.h b/media/libaudioclient/include/media/AudioTrack.h
index cbb750f..12f5d71 100644
--- a/media/libaudioclient/include/media/AudioTrack.h
+++ b/media/libaudioclient/include/media/AudioTrack.h
@@ -1021,7 +1021,7 @@
sp<IAudioTrack> mAudioTrack;
sp<IMemory> mCblkMemory;
audio_track_cblk_t* mCblk; // re-load after mLock.unlock()
- audio_io_handle_t mOutput; // returned by AudioSystem::getOutputForAttr()
+ audio_io_handle_t mOutput = AUDIO_IO_HANDLE_NONE; // from AudioSystem::getOutputForAttr()
sp<AudioTrackThread> mAudioTrackThread;
bool mThreadCanCallJava;
diff --git a/media/libmedia/IMediaSource.cpp b/media/libmedia/IMediaSource.cpp
index e7da488..4dece96 100644
--- a/media/libmedia/IMediaSource.cpp
+++ b/media/libmedia/IMediaSource.cpp
@@ -107,6 +107,7 @@
data.writeInterfaceToken(BpMediaSource::getInterfaceDescriptor());
status_t ret = remote()->transact(GETFORMAT, data, &reply);
if (ret == NO_ERROR) {
+ AutoMutex _l(mLock);
mMetaData = MetaData::createFromParcel(reply);
return mMetaData;
}
@@ -222,6 +223,8 @@
// NuPlayer passes pointers-to-metadata around, so we use this to keep the metadata alive
// XXX: could we use this for caching, or does metadata change on the fly?
sp<MetaData> mMetaData;
+ // ensure synchronize access to mMetaData
+ Mutex mLock;
// Cache all IMemory objects received from MediaExtractor.
// We gc IMemory objects that are no longer active (referenced by a MediaBuffer).
diff --git a/media/libmediaplayer2/Android.bp b/media/libmediaplayer2/Android.bp
index 00f537d..08519cd 100644
--- a/media/libmediaplayer2/Android.bp
+++ b/media/libmediaplayer2/Android.bp
@@ -123,9 +123,6 @@
"signed-integer-overflow",
],
cfi: true,
- diag: {
- cfi: true,
- },
},
}
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index a1a2660..52cb5fa 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -2092,7 +2092,8 @@
if (usingSwRenderer) {
outputFormat->setInt32("using-sw-renderer", 1);
}
- } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
+ } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG) ||
+ !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II)) {
int32_t numChannels, sampleRate;
if (!msg->findInt32("channel-count", &numChannels)
|| !msg->findInt32("sample-rate", &sampleRate)) {
@@ -4297,24 +4298,27 @@
int maxDimension = max(width, height);
static const int limits[][5] = {
- /* MBps MB dim bitrate level */
- { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 },
- { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b },
- { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 },
- { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 },
- { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 },
- { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 },
- { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 },
- { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 },
- { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 },
- { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 },
- { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 },
- { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 },
- { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 },
- { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 },
- { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 },
- { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 },
- { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 },
+ /* MBps MB dim bitrate level */
+ { 1485, 99, 28, 64, OMX_VIDEO_AVCLevel1 },
+ { 1485, 99, 28, 128, OMX_VIDEO_AVCLevel1b },
+ { 3000, 396, 56, 192, OMX_VIDEO_AVCLevel11 },
+ { 6000, 396, 56, 384, OMX_VIDEO_AVCLevel12 },
+ { 11880, 396, 56, 768, OMX_VIDEO_AVCLevel13 },
+ { 11880, 396, 56, 2000, OMX_VIDEO_AVCLevel2 },
+ { 19800, 792, 79, 4000, OMX_VIDEO_AVCLevel21 },
+ { 20250, 1620, 113, 4000, OMX_VIDEO_AVCLevel22 },
+ { 40500, 1620, 113, 10000, OMX_VIDEO_AVCLevel3 },
+ { 108000, 3600, 169, 14000, OMX_VIDEO_AVCLevel31 },
+ { 216000, 5120, 202, 20000, OMX_VIDEO_AVCLevel32 },
+ { 245760, 8192, 256, 20000, OMX_VIDEO_AVCLevel4 },
+ { 245760, 8192, 256, 50000, OMX_VIDEO_AVCLevel41 },
+ { 522240, 8704, 263, 50000, OMX_VIDEO_AVCLevel42 },
+ { 589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5 },
+ { 983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 },
+ { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 },
+ { 4177920, 139264, 1055, 240000, OMX_VIDEO_AVCLevel6 },
+ { 8355840, 139264, 1055, 480000, OMX_VIDEO_AVCLevel61 },
+ { 16711680, 139264, 1055, 800000, OMX_VIDEO_AVCLevel62 },
};
for (size_t i = 0; i < ARRAY_SIZE(limits); i++) {
diff --git a/media/libstagefright/Android.bp b/media/libstagefright/Android.bp
index 26464b8..488890d 100644
--- a/media/libstagefright/Android.bp
+++ b/media/libstagefright/Android.bp
@@ -220,6 +220,7 @@
],
header_libs:[
+ "libnativeloader-dummy-headers",
"libstagefright_xmlparser_headers",
"media_ndk_headers",
],
diff --git a/media/libstagefright/MPEG4Writer.cpp b/media/libstagefright/MPEG4Writer.cpp
index c4015fb..6259b15 100644
--- a/media/libstagefright/MPEG4Writer.cpp
+++ b/media/libstagefright/MPEG4Writer.cpp
@@ -3595,7 +3595,7 @@
}
int64_t MPEG4Writer::Track::getDurationUs() const {
- return mTrackDurationUs + getStartTimeOffsetTimeUs();
+ return mTrackDurationUs + getStartTimeOffsetTimeUs() + mOwner->getStartTimeOffsetBFramesUs();
}
int64_t MPEG4Writer::Track::getEstimatedTrackSizeBytes() const {
@@ -4059,7 +4059,7 @@
// Prepone video playback.
if (mMinCttsOffsetTicks != mMaxCttsOffsetTicks) {
int32_t mvhdTimeScale = mOwner->getTimeScale();
- uint32_t tkhdDuration = (mTrackDurationUs * mvhdTimeScale + 5E5) / 1E6;
+ uint32_t tkhdDuration = (getDurationUs() * mvhdTimeScale + 5E5) / 1E6;
int64_t mediaTime = ((kMaxCttsOffsetTimeUs - getMinCttsOffsetTimeUs())
* mTimeScale + 5E5) / 1E6;
if (tkhdDuration > 0 && mediaTime > 0) {
diff --git a/media/libstagefright/MediaCodecList.cpp b/media/libstagefright/MediaCodecList.cpp
index 93478e9..3d58d4b 100644
--- a/media/libstagefright/MediaCodecList.cpp
+++ b/media/libstagefright/MediaCodecList.cpp
@@ -77,7 +77,8 @@
return profilingNeeded;
}
-OmxInfoBuilder sOmxInfoBuilder;
+OmxInfoBuilder sOmxInfoBuilder{true /* allowSurfaceEncoders */};
+OmxInfoBuilder sOmxNoSurfaceEncoderInfoBuilder{false /* allowSurfaceEncoders */};
Mutex sCodec2InfoBuilderMutex;
std::unique_ptr<MediaCodecListBuilderBase> sCodec2InfoBuilder;
@@ -98,7 +99,11 @@
sp<PersistentSurface> surfaceTest =
StagefrightPluginLoader::GetCCodecInstance()->createInputSurface();
if (surfaceTest == nullptr) {
+ ALOGD("Allowing all OMX codecs");
builders.push_back(&sOmxInfoBuilder);
+ } else {
+ ALOGD("Allowing only non-surface-encoder OMX codecs");
+ builders.push_back(&sOmxNoSurfaceEncoderInfoBuilder);
}
builders.push_back(GetCodec2InfoBuilder());
return builders;
@@ -219,6 +224,21 @@
return info1 == nullptr
|| (info2 != nullptr && info1->getRank() < info2->getRank());
});
+
+ // remove duplicate entries
+ bool dedupe = property_get_bool("debug.stagefright.dedupe-codecs", true);
+ if (dedupe) {
+ std::set<std::string> codecsSeen;
+ for (auto it = mCodecInfos.begin(); it != mCodecInfos.end(); ) {
+ std::string codecName = (*it)->getCodecName();
+ if (codecsSeen.count(codecName) == 0) {
+ codecsSeen.emplace(codecName);
+ it++;
+ } else {
+ it = mCodecInfos.erase(it);
+ }
+ }
+ }
}
MediaCodecList::~MediaCodecList() {
@@ -268,10 +288,17 @@
}
ssize_t MediaCodecList::findCodecByName(const char *name) const {
+ Vector<AString> aliases;
for (size_t i = 0; i < mCodecInfos.size(); ++i) {
if (strcmp(mCodecInfos[i]->getCodecName(), name) == 0) {
return i;
}
+ mCodecInfos[i]->getAliases(&aliases);
+ for (const AString &alias : aliases) {
+ if (alias == name) {
+ return i;
+ }
+ }
}
return -ENOENT;
diff --git a/media/libstagefright/MediaCodecListWriter.cpp b/media/libstagefright/MediaCodecListWriter.cpp
index b32e470..c4fb199 100644
--- a/media/libstagefright/MediaCodecListWriter.cpp
+++ b/media/libstagefright/MediaCodecListWriter.cpp
@@ -37,6 +37,16 @@
new MediaCodecInfoWriter(info.get()));
}
+std::unique_ptr<MediaCodecInfoWriter>
+ MediaCodecListWriter::findMediaCodecInfo(const char *name) {
+ for (const sp<MediaCodecInfo> &info : mCodecInfos) {
+ if (!strcmp(info->getCodecName(), name)) {
+ return std::unique_ptr<MediaCodecInfoWriter>(new MediaCodecInfoWriter(info.get()));
+ }
+ }
+ return nullptr;
+}
+
void MediaCodecListWriter::writeGlobalSettings(
const sp<AMessage> &globalSettings) const {
for (const std::pair<std::string, std::string> &kv : mGlobalSettings) {
diff --git a/media/libstagefright/MediaExtractorFactory.cpp b/media/libstagefright/MediaExtractorFactory.cpp
index 40b5421..a938d51 100644
--- a/media/libstagefright/MediaExtractorFactory.cpp
+++ b/media/libstagefright/MediaExtractorFactory.cpp
@@ -30,6 +30,7 @@
#include <media/stagefright/MediaExtractorFactory.h>
#include <media/IMediaExtractor.h>
#include <media/IMediaExtractorService.h>
+#include <nativeloader/dlext_namespaces.h>
#include <private/android_filesystem_config.h>
#include <cutils/properties.h>
#include <utils/String8.h>
@@ -38,23 +39,6 @@
#include <dirent.h>
#include <dlfcn.h>
-// Copied from GraphicsEnv.cpp
-// TODO(b/37049319) Get this from a header once one exists
-extern "C" {
- android_namespace_t* android_create_namespace(const char* name,
- const char* ld_library_path,
- const char* default_library_path,
- uint64_t type,
- const char* permitted_when_isolated_path,
- android_namespace_t* parent);
- bool android_link_namespaces(android_namespace_t* from,
- android_namespace_t* to,
- const char* shared_libs_sonames);
- enum {
- ANDROID_NAMESPACE_TYPE_ISOLATED = 1,
- };
-}
-
namespace android {
// static
diff --git a/media/libstagefright/OmxInfoBuilder.cpp b/media/libstagefright/OmxInfoBuilder.cpp
index 382c947..8910463 100644
--- a/media/libstagefright/OmxInfoBuilder.cpp
+++ b/media/libstagefright/OmxInfoBuilder.cpp
@@ -21,8 +21,8 @@
#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
#endif
+#include <android-base/properties.h>
#include <utils/Log.h>
-#include <cutils/properties.h>
#include <media/stagefright/foundation/MediaDefs.h>
#include <media/stagefright/OmxInfoBuilder.h>
@@ -53,7 +53,7 @@
namespace /* unnamed */ {
bool hasPrefix(const hidl_string& s, const char* prefix) {
- return strncmp(s.c_str(), prefix, strlen(prefix)) == 0;
+ return strncasecmp(s.c_str(), prefix, strlen(prefix)) == 0;
}
status_t queryCapabilities(
@@ -87,7 +87,8 @@
} // unnamed namespace
-OmxInfoBuilder::OmxInfoBuilder() {
+OmxInfoBuilder::OmxInfoBuilder(bool allowSurfaceEncoders)
+ : mAllowSurfaceEncoders(allowSurfaceEncoders) {
}
status_t OmxInfoBuilder::buildMediaCodecList(MediaCodecListWriter* writer) {
@@ -135,81 +136,80 @@
// Convert roles to lists of codecs
// codec name -> index into swCodecs/hwCodecs
- std::map<hidl_string, std::unique_ptr<MediaCodecInfoWriter>>
- swCodecName2Info, hwCodecName2Info;
+ std::map<hidl_string, std::unique_ptr<MediaCodecInfoWriter>> codecName2Info;
- char rank[PROPERTY_VALUE_MAX];
- uint32_t defaultRank = 0x100;
- if (property_get("debug.stagefright.omx_default_rank", rank, nullptr)) {
- defaultRank = std::strtoul(rank, nullptr, 10);
- }
+ uint32_t defaultRank =
+ ::android::base::GetUintProperty("debug.stagefright.omx_default_rank", 0x100u);
+ uint32_t defaultSwAudioRank =
+ ::android::base::GetUintProperty("debug.stagefright.omx_default_rank.sw-audio", 0x10u);
+ uint32_t defaultSwOtherRank =
+ ::android::base::GetUintProperty("debug.stagefright.omx_default_rank.sw-other", 0x210u);
+
for (const IOmxStore::RoleInfo& role : roles) {
const hidl_string& typeName = role.type;
bool isEncoder = role.isEncoder;
- bool preferPlatformNodes = role.preferPlatformNodes;
- // If preferPlatformNodes is true, hardware nodes must be added after
- // platform (software) nodes. hwCodecs is used to hold hardware nodes
- // that need to be added after software nodes for the same role.
- std::vector<const IOmxStore::NodeInfo*> hwCodecs;
- for (const IOmxStore::NodeInfo& node : role.nodes) {
+ bool isAudio = hasPrefix(role.type, "audio/");
+ bool isVideoOrImage = hasPrefix(role.type, "video/") || hasPrefix(role.type, "image/");
+
+ for (const IOmxStore::NodeInfo &node : role.nodes) {
const hidl_string& nodeName = node.name;
+
+ // currently image and video encoders use surface input
+ if (!mAllowSurfaceEncoders && isVideoOrImage && isEncoder) {
+ ALOGD("disabling %s for media type %s because we are not using OMX input surface",
+ nodeName.c_str(), role.type.c_str());
+ continue;
+ }
+
bool isSoftware = hasPrefix(nodeName, "OMX.google");
- MediaCodecInfoWriter* info;
- if (isSoftware) {
- auto c2i = swCodecName2Info.find(nodeName);
- if (c2i == swCodecName2Info.end()) {
- // Create a new MediaCodecInfo for a new node.
- c2i = swCodecName2Info.insert(std::make_pair(
- nodeName, writer->addMediaCodecInfo())).first;
- info = c2i->second.get();
- info->setName(nodeName.c_str());
- info->setOwner(node.owner.c_str());
- info->setAttributes(
- // all OMX codecs are vendor codecs (in the vendor partition), but
- // treat OMX.google codecs as non-hardware-accelerated and non-vendor
- (isEncoder ? MediaCodecInfo::kFlagIsEncoder : 0));
- info->setRank(defaultRank);
- } else {
- // The node has been seen before. Simply retrieve the
- // existing MediaCodecInfoWriter.
- info = c2i->second.get();
- }
- } else {
- auto c2i = hwCodecName2Info.find(nodeName);
- if (c2i == hwCodecName2Info.end()) {
- // Create a new MediaCodecInfo for a new node.
- if (!preferPlatformNodes) {
- c2i = hwCodecName2Info.insert(std::make_pair(
- nodeName, writer->addMediaCodecInfo())).first;
- info = c2i->second.get();
- info->setName(nodeName.c_str());
- info->setOwner(node.owner.c_str());
- typename std::underlying_type<MediaCodecInfo::Attributes>::type attrs =
- MediaCodecInfo::kFlagIsVendor;
- if (isEncoder) {
- attrs |= MediaCodecInfo::kFlagIsEncoder;
- }
- if (std::count_if(
- node.attributes.begin(), node.attributes.end(),
- [](const IOmxStore::Attribute &i) -> bool {
- return i.key == "attribute::software-codec";
- })) {
- attrs |= MediaCodecInfo::kFlagIsHardwareAccelerated;
- }
- info->setAttributes(attrs);
- info->setRank(defaultRank);
- } else {
- // If preferPlatformNodes is true, this node must be
- // added after all software nodes.
- hwCodecs.push_back(&node);
- continue;
+ uint32_t rank = isSoftware
+ ? (isAudio ? defaultSwAudioRank : defaultSwOtherRank)
+ : defaultRank;
+ // get rank from IOmxStore via attribute
+ for (const IOmxStore::Attribute& attribute : node.attributes) {
+ if (attribute.key == "rank") {
+ uint32_t oldRank = rank;
+ char dummy;
+ if (sscanf(attribute.value.c_str(), "%u%c", &rank, &dummy) != 1) {
+ rank = oldRank;
}
- } else {
- // The node has been seen before. Simply retrieve the
- // existing MediaCodecInfoWriter.
- info = c2i->second.get();
+ break;
}
}
+
+ MediaCodecInfoWriter* info;
+ auto c2i = codecName2Info.find(nodeName);
+ if (c2i == codecName2Info.end()) {
+ // Create a new MediaCodecInfo for a new node.
+ c2i = codecName2Info.insert(std::make_pair(
+ nodeName, writer->addMediaCodecInfo())).first;
+ info = c2i->second.get();
+ info->setName(nodeName.c_str());
+ info->setOwner(node.owner.c_str());
+ info->setRank(rank);
+
+ typename std::underlying_type<MediaCodecInfo::Attributes>::type attrs = 0;
+ // all OMX codecs are vendor codecs (in the vendor partition), but
+ // treat OMX.google codecs as non-hardware-accelerated and non-vendor
+ if (!isSoftware) {
+ attrs |= MediaCodecInfo::kFlagIsVendor;
+ if (std::count_if(
+ node.attributes.begin(), node.attributes.end(),
+ [](const IOmxStore::Attribute &i) -> bool {
+ return i.key == "attribute::software-codec";
+ })) {
+ attrs |= MediaCodecInfo::kFlagIsHardwareAccelerated;
+ }
+ }
+ if (isEncoder) {
+ attrs |= MediaCodecInfo::kFlagIsEncoder;
+ }
+ info->setAttributes(attrs);
+ } else {
+ // The node has been seen before. Simply retrieve the
+ // existing MediaCodecInfoWriter.
+ info = c2i->second.get();
+ }
std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
info->addMediaType(typeName.c_str());
if (queryCapabilities(
@@ -219,54 +219,8 @@
info->removeMediaType(typeName.c_str());
}
}
-
- // If preferPlatformNodes is true, hardware nodes will not have been
- // added in the loop above, but rather saved in hwCodecs. They are
- // going to be added here.
- if (preferPlatformNodes) {
- for (const IOmxStore::NodeInfo *node : hwCodecs) {
- MediaCodecInfoWriter* info;
- const hidl_string& nodeName = node->name;
- auto c2i = hwCodecName2Info.find(nodeName);
- if (c2i == hwCodecName2Info.end()) {
- // Create a new MediaCodecInfo for a new node.
- c2i = hwCodecName2Info.insert(std::make_pair(
- nodeName, writer->addMediaCodecInfo())).first;
- info = c2i->second.get();
- info->setName(nodeName.c_str());
- info->setOwner(node->owner.c_str());
- typename std::underlying_type<MediaCodecInfo::Attributes>::type attrs =
- MediaCodecInfo::kFlagIsVendor;
- if (isEncoder) {
- attrs |= MediaCodecInfo::kFlagIsEncoder;
- }
- if (std::count_if(
- node->attributes.begin(), node->attributes.end(),
- [](const IOmxStore::Attribute &i) -> bool {
- return i.key == "attribute::software-codec";
- })) {
- attrs |= MediaCodecInfo::kFlagIsHardwareAccelerated;
- }
- info->setRank(defaultRank);
- } else {
- // The node has been seen before. Simply retrieve the
- // existing MediaCodecInfoWriter.
- info = c2i->second.get();
- }
- std::unique_ptr<MediaCodecInfo::CapabilitiesWriter> caps =
- info->addMediaType(typeName.c_str());
- if (queryCapabilities(
- *node, typeName.c_str(), isEncoder, caps.get()) != OK) {
- ALOGW("Fail to add media type %s to codec %s "
- "after software codecs",
- typeName.c_str(), nodeName.c_str());
- info->removeMediaType(typeName.c_str());
- }
- }
- }
}
return OK;
}
} // namespace android
-
diff --git a/media/libstagefright/Utils.cpp b/media/libstagefright/Utils.cpp
index 2e7da01..82f7026 100644
--- a/media/libstagefright/Utils.cpp
+++ b/media/libstagefright/Utils.cpp
@@ -192,6 +192,9 @@
{ 50, OMX_VIDEO_AVCLevel5 },
{ 51, OMX_VIDEO_AVCLevel51 },
{ 52, OMX_VIDEO_AVCLevel52 },
+ { 60, OMX_VIDEO_AVCLevel6 },
+ { 61, OMX_VIDEO_AVCLevel61 },
+ { 62, OMX_VIDEO_AVCLevel62 },
};
const static ALookup<uint8_t, OMX_VIDEO_AVCPROFILETYPE> profiles {
{ 66, OMX_VIDEO_AVCProfileBaseline },
diff --git a/media/libstagefright/data/media_codecs_google_c2_audio.xml b/media/libstagefright/data/media_codecs_google_c2_audio.xml
index 88cd08d..f664395 100644
--- a/media/libstagefright/data/media_codecs_google_c2_audio.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_audio.xml
@@ -17,51 +17,61 @@
<Included>
<Decoders>
<MediaCodec name="c2.android.mp3.decoder" type="audio/mpeg">
+ <Alias name="OMX.google.mp3.decoder" />
<Limit name="channel-count" max="2" />
<Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
<Limit name="bitrate" range="8000-320000" />
</MediaCodec>
<MediaCodec name="c2.android.amrnb.decoder" type="audio/3gpp">
+ <Alias name="OMX.google.amrnb.decoder" />
<Limit name="channel-count" max="1" />
<Limit name="sample-rate" ranges="8000" />
<Limit name="bitrate" range="4750-12200" />
</MediaCodec>
<MediaCodec name="c2.android.amrwb.decoder" type="audio/amr-wb">
+ <Alias name="OMX.google.amrwb.decoder" />
<Limit name="channel-count" max="1" />
<Limit name="sample-rate" ranges="16000" />
<Limit name="bitrate" range="6600-23850" />
</MediaCodec>
<MediaCodec name="c2.android.aac.decoder" type="audio/mp4a-latm">
+ <Alias name="OMX.google.aac.decoder" />
<Limit name="channel-count" max="8" />
<Limit name="sample-rate" ranges="7350,8000,11025,12000,16000,22050,24000,32000,44100,48000" />
<Limit name="bitrate" range="8000-960000" />
</MediaCodec>
<MediaCodec name="c2.android.g711.alaw.decoder" type="audio/g711-alaw">
+ <Alias name="OMX.google.g711.alaw.decoder" />
<Limit name="channel-count" max="1" />
<Limit name="sample-rate" ranges="8000-48000" />
<Limit name="bitrate" range="64000" />
</MediaCodec>
<MediaCodec name="c2.android.g711.mlaw.decoder" type="audio/g711-mlaw">
+ <Alias name="OMX.google.g711.mlaw.decoder" />
<Limit name="channel-count" max="1" />
<Limit name="sample-rate" ranges="8000-48000" />
<Limit name="bitrate" range="64000" />
</MediaCodec>
<MediaCodec name="c2.android.vorbis.decoder" type="audio/vorbis">
+ <Alias name="OMX.google.vorbis.decoder" />
<Limit name="channel-count" max="8" />
<Limit name="sample-rate" ranges="8000-96000" />
<Limit name="bitrate" range="32000-500000" />
</MediaCodec>
<MediaCodec name="c2.android.opus.decoder" type="audio/opus">
+ <Alias name="OMX.google.opus.decoder" />
<Limit name="channel-count" max="8" />
<Limit name="sample-rate" ranges="48000" />
<Limit name="bitrate" range="6000-510000" />
</MediaCodec>
<MediaCodec name="c2.android.raw.decoder" type="audio/raw">
+ <Alias name="OMX.google.raw.decoder" />
<Limit name="channel-count" max="8" />
<Limit name="sample-rate" ranges="8000-96000" />
<Limit name="bitrate" range="1-10000000" />
</MediaCodec>
<MediaCodec name="c2.android.flac.decoder" type="audio/flac">
+ <Alias name="OMX.google.flac.decoder" />
<Limit name="channel-count" max="8" />
<Limit name="sample-rate" ranges="1-655350" />
<Limit name="bitrate" range="1-21000000" />
@@ -69,24 +79,28 @@
</Decoders>
<Encoders>
<MediaCodec name="c2.android.aac.encoder" type="audio/mp4a-latm">
+ <Alias name="OMX.google.aac.decoder" />
<Limit name="channel-count" max="6" />
<Limit name="sample-rate" ranges="8000,11025,12000,16000,22050,24000,32000,44100,48000" />
<!-- also may support 64000, 88200 and 96000 Hz -->
<Limit name="bitrate" range="8000-960000" />
</MediaCodec>
<MediaCodec name="c2.android.amrnb.encoder" type="audio/3gpp">
+ <Alias name="OMX.google.amrnb.decoder" />
<Limit name="channel-count" max="1" />
<Limit name="sample-rate" ranges="8000" />
<Limit name="bitrate" range="4750-12200" />
<Feature name="bitrate-modes" value="CBR" />
</MediaCodec>
<MediaCodec name="c2.android.amrwb.encoder" type="audio/amr-wb">
+ <Alias name="OMX.google.amrwb.decoder" />
<Limit name="channel-count" max="1" />
<Limit name="sample-rate" ranges="16000" />
<Limit name="bitrate" range="6600-23850" />
<Feature name="bitrate-modes" value="CBR" />
</MediaCodec>
<MediaCodec name="c2.android.flac.encoder" type="audio/flac">
+ <Alias name="OMX.google.flac.decoder" />
<Limit name="channel-count" max="2" />
<Limit name="sample-rate" ranges="1-655350" />
<Limit name="bitrate" range="1-21000000" />
diff --git a/media/libstagefright/data/media_codecs_google_c2_telephony.xml b/media/libstagefright/data/media_codecs_google_c2_telephony.xml
index d1055b3..950b092 100644
--- a/media/libstagefright/data/media_codecs_google_c2_telephony.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_telephony.xml
@@ -17,6 +17,7 @@
<Included>
<Decoders>
<MediaCodec name="c2.android.gsm.decoder" type="audio/gsm">
+ <Alias name="OMX.google.gsm.decoder" />
<Limit name="channel-count" max="1" />
<Limit name="sample-rate" ranges="8000" />
<Limit name="bitrate" range="13000" />
diff --git a/media/libstagefright/data/media_codecs_google_c2_tv.xml b/media/libstagefright/data/media_codecs_google_c2_tv.xml
index fa082c7..1b00dc9 100644
--- a/media/libstagefright/data/media_codecs_google_c2_tv.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_tv.xml
@@ -17,6 +17,7 @@
<Included>
<Decoders>
<MediaCodec name="c2.android.mpeg2.decoder" type="video/mpeg2">
+ <Alias name="OMX.google.mpeg2.decoder" />
<!-- profiles and levels: ProfileMain : LevelHL -->
<Limit name="size" min="16x16" max="1920x1088" />
<Limit name="alignment" value="2x2" />
diff --git a/media/libstagefright/data/media_codecs_google_c2_video.xml b/media/libstagefright/data/media_codecs_google_c2_video.xml
index c49789e..5c2d96d 100644
--- a/media/libstagefright/data/media_codecs_google_c2_video.xml
+++ b/media/libstagefright/data/media_codecs_google_c2_video.xml
@@ -17,6 +17,7 @@
<Included>
<Decoders>
<MediaCodec name="c2.android.mpeg4.decoder" type="video/mp4v-es">
+ <Alias name="OMX.google.mpeg4.decoder" />
<!-- profiles and levels: ProfileSimple : Level3 -->
<Limit name="size" min="2x2" max="352x288" />
<Limit name="alignment" value="2x2" />
@@ -26,6 +27,7 @@
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="c2.android.h263.decoder" type="video/3gpp">
+ <Alias name="OMX.google.h263.decoder" />
<!-- profiles and levels: ProfileBaseline : Level30, ProfileBaseline : Level45
ProfileISWV2 : Level30, ProfileISWV2 : Level45 -->
<Limit name="size" min="2x2" max="352x288" />
@@ -34,6 +36,7 @@
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="c2.android.avc.decoder" type="video/avc">
+ <Alias name="OMX.google.h264.decoder" />
<!-- profiles and levels: ProfileHigh : Level52 -->
<Limit name="size" min="2x2" max="4080x4080" />
<Limit name="alignment" value="2x2" />
@@ -44,6 +47,7 @@
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="c2.android.hevc.decoder" type="video/hevc">
+ <Alias name="OMX.google.hevc.decoder" />
<!-- profiles and levels: ProfileMain : MainTierLevel51 -->
<Limit name="size" min="2x2" max="4096x4096" />
<Limit name="alignment" value="2x2" />
@@ -54,6 +58,7 @@
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="c2.android.vp8.decoder" type="video/x-vnd.on2.vp8">
+ <Alias name="OMX.google.vp8.decoder" />
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
@@ -63,6 +68,7 @@
<Feature name="adaptive-playback" />
</MediaCodec>
<MediaCodec name="c2.android.vp9.decoder" type="video/x-vnd.on2.vp9">
+ <Alias name="OMX.google.vp9.decoder" />
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
<Limit name="block-size" value="16x16" />
@@ -84,12 +90,14 @@
<Encoders>
<MediaCodec name="c2.android.h263.encoder" type="video/3gpp">
+ <Alias name="OMX.google.h263.encoder" />
<!-- profiles and levels: ProfileBaseline : Level45 -->
<Limit name="size" min="176x144" max="176x144" />
<Limit name="alignment" value="16x16" />
<Limit name="bitrate" range="1-128000" />
</MediaCodec>
<MediaCodec name="c2.android.avc.encoder" type="video/avc">
+ <Alias name="OMX.google.h264.encoder" />
<!-- profiles and levels: ProfileBaseline : Level41 -->
<Limit name="size" min="16x16" max="2048x2048" />
<Limit name="alignment" value="2x2" />
@@ -100,6 +108,7 @@
<Feature name="intra-refresh" />
</MediaCodec>
<MediaCodec name="c2.android.mpeg4.encoder" type="video/mp4v-es">
+ <Alias name="OMX.google.mpeg4.encoder" />
<!-- profiles and levels: ProfileCore : Level2 -->
<Limit name="size" min="16x16" max="176x144" />
<Limit name="alignment" value="16x16" />
@@ -108,6 +117,7 @@
<Limit name="bitrate" range="1-64000" />
</MediaCodec>
<MediaCodec name="c2.android.vp8.encoder" type="video/x-vnd.on2.vp8">
+ <Alias name="OMX.google.vp8.encoder" />
<!-- profiles and levels: ProfileMain : Level_Version0-3 -->
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
@@ -118,6 +128,7 @@
<Feature name="bitrate-modes" value="VBR,CBR" />
</MediaCodec>
<MediaCodec name="c2.android.vp9.encoder" type="video/x-vnd.on2.vp9">
+ <Alias name="OMX.google.vp9.encoder" />
<!-- profiles and levels: ProfileMain : Level_Version0-3 -->
<Limit name="size" min="2x2" max="2048x2048" />
<Limit name="alignment" value="2x2" />
diff --git a/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h b/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
index 59f57c7..f53b23e 100644
--- a/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
+++ b/media/libstagefright/include/media/stagefright/MediaCodecListWriter.h
@@ -48,6 +48,13 @@
* added `MediaCodecInfo` object.
*/
std::unique_ptr<MediaCodecInfoWriter> addMediaCodecInfo();
+ /**
+ * Find an existing `MediaCodecInfo` object for a codec name and return a
+ * `MediaCodecInfoWriter` object associated with the found added `MediaCodecInfo`.
+ *
+ * @return The `MediaCodecInfoWriter` object if found, or nullptr if not found.
+ */
+ std::unique_ptr<MediaCodecInfoWriter> findMediaCodecInfo(const char *codecName);
private:
MediaCodecListWriter() = default;
diff --git a/media/libstagefright/include/media/stagefright/OmxInfoBuilder.h b/media/libstagefright/include/media/stagefright/OmxInfoBuilder.h
index 28f6094..1410a16 100644
--- a/media/libstagefright/include/media/stagefright/OmxInfoBuilder.h
+++ b/media/libstagefright/include/media/stagefright/OmxInfoBuilder.h
@@ -23,8 +23,11 @@
namespace android {
class OmxInfoBuilder : public MediaCodecListBuilderBase {
+private:
+ bool mAllowSurfaceEncoders; // allow surface encoders
+
public:
- OmxInfoBuilder();
+ explicit OmxInfoBuilder(bool allowSurfaceEncoders);
~OmxInfoBuilder() override = default;
status_t buildMediaCodecList(MediaCodecListWriter* writer) override;
};
diff --git a/media/libstagefright/omx/1.0/OmxStore.cpp b/media/libstagefright/omx/1.0/OmxStore.cpp
index 447af6f..2e041e3 100644
--- a/media/libstagefright/omx/1.0/OmxStore.cpp
+++ b/media/libstagefright/omx/1.0/OmxStore.cpp
@@ -61,10 +61,7 @@
role.role = rolePair.first;
role.type = rolePair.second.type;
role.isEncoder = rolePair.second.isEncoder;
- // TODO: Currently, preferPlatformNodes information is not available in
- // the xml file. Once we have a way to provide this information, it
- // should be parsed properly.
- role.preferPlatformNodes = rolePair.first.compare(0, 5, "audio") == 0;
+ role.preferPlatformNodes = false; // deprecated and ignored, using rank instead
hidl_vec<NodeInfo>& nodeList = role.nodes;
nodeList.resize(rolePair.second.nodeList.size());
size_t j = 0;
diff --git a/media/libstagefright/timedtext/Android.bp b/media/libstagefright/timedtext/Android.bp
index 97e1ec6..6935655 100644
--- a/media/libstagefright/timedtext/Android.bp
+++ b/media/libstagefright/timedtext/Android.bp
@@ -44,9 +44,6 @@
"signed-integer-overflow",
],
cfi: true,
- diag: {
- cfi: true,
- },
},
include_dirs: [
diff --git a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
index 6e541ba..7046f61 100644
--- a/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
+++ b/media/libstagefright/xmlparser/MediaCodecsXmlParser.cpp
@@ -502,6 +502,7 @@
const char *name = nullptr;
const char *type = nullptr;
const char *update = nullptr;
+ const char *rank = nullptr;
size_t i = 0;
while (attrs[i] != nullptr) {
@@ -523,6 +524,12 @@
return BAD_VALUE;
}
update = attrs[i];
+ } else if (strEq(attrs[i], "rank")) {
+ if (attrs[++i] == nullptr) {
+ ALOGE("addMediaCodecFromAttributes: rank is null");
+ return BAD_VALUE;
+ }
+ rank = attrs[i];
} else {
ALOGE("addMediaCodecFromAttributes: unrecognized attribute: %s", attrs[i]);
return BAD_VALUE;
@@ -579,6 +586,15 @@
}
}
+ if (rank != nullptr) {
+ if (!mCurrentCodec->second.rank.empty() && mCurrentCodec->second.rank != rank) {
+ ALOGE("addMediaCodecFromAttributes: code \"%s\" rank changed from \"%s\" to \"%s\"",
+ name, mCurrentCodec->second.rank.c_str(), rank);
+ return BAD_VALUE;
+ }
+ mCurrentCodec->second.rank = rank;
+ }
+
return OK;
}
@@ -1035,6 +1051,7 @@
const auto& codecName = codec.first;
bool isEncoder = codec.second.isEncoder;
size_t order = codec.second.order;
+ std::string rank = codec.second.rank;
const auto& typeMap = codec.second.typeMap;
for (const auto& type : typeMap) {
const auto& typeName = type.first;
@@ -1090,6 +1107,9 @@
nodeInfo.attributeList.push_back(Attribute{quirk, "present"});
}
}
+ if (!rank.empty()) {
+ nodeInfo.attributeList.push_back(Attribute{"rank", rank});
+ }
nodeList->insert(std::make_pair(
std::move(order), std::move(nodeInfo)));
}
diff --git a/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h b/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h
index fd949da..7a986b7 100644
--- a/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h
+++ b/media/libstagefright/xmlparser/include/media/stagefright/xmlparser/MediaCodecsXmlParser.h
@@ -66,6 +66,7 @@
QuirkSet quirkSet; ///< Set of quirks requested by this codec
TypeMap typeMap; ///< Map of types supported by this codec
std::vector<std::string> aliases; ///< Name aliases for this codec
+ std::string rank; ///< Rank of this codec. This is a numeric string.
};
typedef std::pair<std::string, CodecProperties> Codec;
diff --git a/media/ndk/NdkImageReader.cpp b/media/ndk/NdkImageReader.cpp
index 010c1aa..c3eb437 100644
--- a/media/ndk/NdkImageReader.cpp
+++ b/media/ndk/NdkImageReader.cpp
@@ -69,6 +69,7 @@
case AIMAGE_FORMAT_DEPTH16:
case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
case AIMAGE_FORMAT_Y8:
+ case AIMAGE_FORMAT_HEIC:
return true;
case AIMAGE_FORMAT_PRIVATE:
// For private format, cpu usage is prohibited.
@@ -96,6 +97,7 @@
case AIMAGE_FORMAT_DEPTH16:
case AIMAGE_FORMAT_DEPTH_POINT_CLOUD:
case AIMAGE_FORMAT_Y8:
+ case AIMAGE_FORMAT_HEIC:
return 1;
case AIMAGE_FORMAT_PRIVATE:
return 0;
diff --git a/media/ndk/include/media/NdkImage.h b/media/ndk/include/media/NdkImage.h
index 15b340c..14d88cb 100644
--- a/media/ndk/include/media/NdkImage.h
+++ b/media/ndk/include/media/NdkImage.h
@@ -526,7 +526,15 @@
* (in bytes) between adjacent rows.</p>
*
*/
- AIMAGE_FORMAT_Y8 = 0x20203859
+ AIMAGE_FORMAT_Y8 = 0x20203859,
+
+ /**
+ * Compressed HEIC format.
+ *
+ * <p>This format defines the HEIC brand of High Efficiency Image File
+ * Format as described in ISO/IEC 23008-12.</p>
+ */
+ AIMAGE_FORMAT_HEIC = 0x48454946,
};
/**
diff --git a/services/audioflinger/FastMixer.cpp b/services/audioflinger/FastMixer.cpp
index e78c98b..c5b9953 100644
--- a/services/audioflinger/FastMixer.cpp
+++ b/services/audioflinger/FastMixer.cpp
@@ -139,6 +139,75 @@
}
}
+void FastMixer::updateMixerTrack(int index, Reason reason) {
+ const FastMixerState * const current = (const FastMixerState *) mCurrent;
+ const FastTrack * const fastTrack = ¤t->mFastTracks[index];
+
+ // check and update generation
+ if (reason == REASON_MODIFY && mGenerations[index] == fastTrack->mGeneration) {
+ return; // no change on an already configured track.
+ }
+ mGenerations[index] = fastTrack->mGeneration;
+
+ // mMixer == nullptr on configuration failure (check done after generation update).
+ if (mMixer == nullptr) {
+ return;
+ }
+
+ switch (reason) {
+ case REASON_REMOVE:
+ mMixer->destroy(index);
+ break;
+ case REASON_ADD: {
+ const status_t status = mMixer->create(
+ index, fastTrack->mChannelMask, fastTrack->mFormat, AUDIO_SESSION_OUTPUT_MIX);
+ LOG_ALWAYS_FATAL_IF(status != NO_ERROR,
+ "%s: cannot create fast track index"
+ " %d, mask %#x, format %#x in AudioMixer",
+ __func__, index, fastTrack->mChannelMask, fastTrack->mFormat);
+ }
+ [[fallthrough]]; // now fallthrough to update the newly created track.
+ case REASON_MODIFY:
+ mMixer->setBufferProvider(index, fastTrack->mBufferProvider);
+
+ float vlf, vrf;
+ if (fastTrack->mVolumeProvider != nullptr) {
+ const gain_minifloat_packed_t vlr = fastTrack->mVolumeProvider->getVolumeLR();
+ vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
+ vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
+ } else {
+ vlf = vrf = AudioMixer::UNITY_GAIN_FLOAT;
+ }
+
+ // set volume to avoid ramp whenever the track is updated (or created).
+ // Note: this does not distinguish from starting fresh or
+ // resuming from a paused state.
+ mMixer->setParameter(index, AudioMixer::VOLUME, AudioMixer::VOLUME0, &vlf);
+ mMixer->setParameter(index, AudioMixer::VOLUME, AudioMixer::VOLUME1, &vrf);
+
+ mMixer->setParameter(index, AudioMixer::RESAMPLE, AudioMixer::REMOVE, nullptr);
+ mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER,
+ (void *)mMixerBuffer);
+ mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::MIXER_FORMAT,
+ (void *)(uintptr_t)mMixerBufferFormat);
+ mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::FORMAT,
+ (void *)(uintptr_t)fastTrack->mFormat);
+ mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK,
+ (void *)(uintptr_t)fastTrack->mChannelMask);
+ mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::MIXER_CHANNEL_MASK,
+ (void *)(uintptr_t)mSinkChannelMask);
+ mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_ENABLED,
+ (void *)(uintptr_t)fastTrack->mHapticPlaybackEnabled);
+ mMixer->setParameter(index, AudioMixer::TRACK, AudioMixer::HAPTIC_INTENSITY,
+ (void *)(uintptr_t)fastTrack->mHapticIntensity);
+
+ mMixer->enable(index);
+ break;
+ default:
+ LOG_ALWAYS_FATAL("%s: invalid update reason %d", __func__, reason);
+ }
+}
+
void FastMixer::onStateChange()
{
const FastMixerState * const current = (const FastMixerState *) mCurrent;
@@ -240,21 +309,16 @@
// check for change in active track set
const unsigned currentTrackMask = current->mTrackMask;
dumpState->mTrackMask = currentTrackMask;
+ dumpState->mNumTracks = popcount(currentTrackMask);
if (current->mFastTracksGen != mFastTracksGen) {
- ALOG_ASSERT(mMixerBuffer != NULL);
// process removed tracks first to avoid running out of track names
unsigned removedTracks = previousTrackMask & ~currentTrackMask;
while (removedTracks != 0) {
int i = __builtin_ctz(removedTracks);
removedTracks &= ~(1 << i);
- const FastTrack* fastTrack = ¤t->mFastTracks[i];
- ALOG_ASSERT(fastTrack->mBufferProvider == NULL);
- if (mMixer != NULL) {
- mMixer->destroy(i);
- }
+ updateMixerTrack(i, REASON_REMOVE);
// don't reset track dump state, since other side is ignoring it
- mGenerations[i] = fastTrack->mGeneration;
}
// now process added tracks
@@ -262,40 +326,7 @@
while (addedTracks != 0) {
int i = __builtin_ctz(addedTracks);
addedTracks &= ~(1 << i);
- const FastTrack* fastTrack = ¤t->mFastTracks[i];
- AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider;
- if (mMixer != NULL) {
- const int name = i; // for clarity, choose name as fast track index.
- status_t status = mMixer->create(
- name,
- fastTrack->mChannelMask,
- fastTrack->mFormat, AUDIO_SESSION_OUTPUT_MIX);
- LOG_ALWAYS_FATAL_IF(status != NO_ERROR,
- "%s: cannot create track name"
- " %d, mask %#x, format %#x, sessionId %d in AudioMixer",
- __func__, name,
- fastTrack->mChannelMask, fastTrack->mFormat, AUDIO_SESSION_OUTPUT_MIX);
- mMixer->setBufferProvider(name, bufferProvider);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MAIN_BUFFER,
- (void *)mMixerBuffer);
- // newly allocated track names default to full scale volume
- mMixer->setParameter(
- name,
- AudioMixer::TRACK,
- AudioMixer::MIXER_FORMAT, (void *)mMixerBufferFormat);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FORMAT,
- (void *)(uintptr_t)fastTrack->mFormat);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK,
- (void *)(uintptr_t)fastTrack->mChannelMask);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MIXER_CHANNEL_MASK,
- (void *)(uintptr_t)mSinkChannelMask);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::HAPTIC_ENABLED,
- (void *)(uintptr_t)fastTrack->mHapticPlaybackEnabled);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::HAPTIC_INTENSITY,
- (void *)(uintptr_t)fastTrack->mHapticIntensity);
- mMixer->enable(name);
- }
- mGenerations[i] = fastTrack->mGeneration;
+ updateMixerTrack(i, REASON_ADD);
}
// finally process (potentially) modified tracks; these use the same slot
@@ -304,44 +335,10 @@
while (modifiedTracks != 0) {
int i = __builtin_ctz(modifiedTracks);
modifiedTracks &= ~(1 << i);
- const FastTrack* fastTrack = ¤t->mFastTracks[i];
- if (fastTrack->mGeneration != mGenerations[i]) {
- // this track was actually modified
- AudioBufferProvider *bufferProvider = fastTrack->mBufferProvider;
- ALOG_ASSERT(bufferProvider != NULL);
- if (mMixer != NULL) {
- const int name = i;
- mMixer->setBufferProvider(name, bufferProvider);
- if (fastTrack->mVolumeProvider == NULL) {
- float f = AudioMixer::UNITY_GAIN_FLOAT;
- mMixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, &f);
- mMixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, &f);
- }
- mMixer->setParameter(name, AudioMixer::RESAMPLE,
- AudioMixer::REMOVE, NULL);
- mMixer->setParameter(
- name,
- AudioMixer::TRACK,
- AudioMixer::MIXER_FORMAT, (void *)mMixerBufferFormat);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::FORMAT,
- (void *)(uintptr_t)fastTrack->mFormat);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::CHANNEL_MASK,
- (void *)(uintptr_t)fastTrack->mChannelMask);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::MIXER_CHANNEL_MASK,
- (void *)(uintptr_t)mSinkChannelMask);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::HAPTIC_ENABLED,
- (void *)(uintptr_t)fastTrack->mHapticPlaybackEnabled);
- mMixer->setParameter(name, AudioMixer::TRACK, AudioMixer::HAPTIC_INTENSITY,
- (void *)(uintptr_t)fastTrack->mHapticIntensity);
- // already enabled
- }
- mGenerations[i] = fastTrack->mGeneration;
- }
+ updateMixerTrack(i, REASON_MODIFY);
}
mFastTracksGen = current->mFastTracksGen;
-
- dumpState->mNumTracks = popcount(currentTrackMask);
}
}
@@ -408,8 +405,8 @@
float vlf = float_from_gain(gain_minifloat_unpack_left(vlr));
float vrf = float_from_gain(gain_minifloat_unpack_right(vlr));
- mMixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME0, &vlf);
- mMixer->setParameter(name, AudioMixer::VOLUME, AudioMixer::VOLUME1, &vrf);
+ mMixer->setParameter(name, AudioMixer::RAMP_VOLUME, AudioMixer::VOLUME0, &vlf);
+ mMixer->setParameter(name, AudioMixer::RAMP_VOLUME, AudioMixer::VOLUME1, &vrf);
}
// FIXME The current implementation of framesReady() for fast tracks
// takes a tryLock, which can block
diff --git a/services/audioflinger/FastMixer.h b/services/audioflinger/FastMixer.h
index c31d476..97ab635 100644
--- a/services/audioflinger/FastMixer.h
+++ b/services/audioflinger/FastMixer.h
@@ -59,6 +59,14 @@
virtual void onStateChange();
virtual void onWork();
+ enum Reason {
+ REASON_REMOVE,
+ REASON_ADD,
+ REASON_MODIFY,
+ };
+ // called when a fast track of index has been removed, added, or modified
+ void updateMixerTrack(int index, Reason reason);
+
// FIXME these former local variables need comments
static const FastMixerState sInitial;
diff --git a/services/audioflinger/PatchPanel.h b/services/audioflinger/PatchPanel.h
index 2d9bd8e..612855f 100644
--- a/services/audioflinger/PatchPanel.h
+++ b/services/audioflinger/PatchPanel.h
@@ -81,13 +81,16 @@
class Endpoint {
public:
Endpoint() = default;
- Endpoint(Endpoint&& other) { *this = std::move(other); }
- Endpoint& operator=(Endpoint&& other) {
+ Endpoint(const Endpoint&) = delete;
+ Endpoint& operator=(const Endpoint&) = delete;
+ Endpoint(Endpoint&& other) noexcept { swap(other); }
+ Endpoint& operator=(Endpoint&& other) noexcept {
+ swap(other);
+ return *this;
+ }
+ ~Endpoint() {
ALOGE_IF(mHandle != AUDIO_PATCH_HANDLE_NONE,
"A non empty Patch Endpoint leaked, handle %d", mHandle);
- *this = other;
- other.mHandle = AUDIO_PATCH_HANDLE_NONE;
- return *this;
}
status_t checkTrack(TrackType *trackOrNull) const {
@@ -127,10 +130,19 @@
}
void stopTrack() { if (mTrack) mTrack->stop(); }
- private:
- Endpoint(const Endpoint&) = default;
- Endpoint& operator=(const Endpoint&) = default;
+ void swap(Endpoint &other) noexcept {
+ using std::swap;
+ swap(mThread, other.mThread);
+ swap(mCloseThread, other.mCloseThread);
+ swap(mHandle, other.mHandle);
+ swap(mTrack, other.mTrack);
+ }
+ friend void swap(Endpoint &a, Endpoint &b) noexcept {
+ a.swap(b);
+ }
+
+ private:
sp<ThreadType> mThread;
bool mCloseThread = true;
audio_patch_handle_t mHandle = AUDIO_PATCH_HANDLE_NONE;
diff --git a/services/audiopolicy/engine/config/Android.mk b/services/audiopolicy/engine/config/Android.mk
index cb35c37..fe7d961 100644
--- a/services/audiopolicy/engine/config/Android.mk
+++ b/services/audiopolicy/engine/config/Android.mk
@@ -19,7 +19,6 @@
LOCAL_CFLAGS += -Wall -Werror -Wextra
LOCAL_SHARED_LIBRARIES := \
- libparameter \
libmedia_helper \
libandroidicu \
libxml2 \
diff --git a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
index 391e38d..8f0a213 100644
--- a/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
+++ b/services/audiopolicy/managerdefault/AudioPolicyManager.cpp
@@ -33,6 +33,8 @@
#define AUDIO_POLICY_XML_CONFIG_FILE_NAME "audio_policy_configuration.xml"
#define AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME \
"audio_policy_configuration_a2dp_offload_disabled.xml"
+#define AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME \
+ "audio_policy_configuration_bluetooth_hal_enabled.xml"
#include <inttypes.h>
#include <math.h>
@@ -4035,7 +4037,11 @@
if (property_get_bool("ro.bluetooth.a2dp_offload.supported", false) &&
property_get_bool("persist.bluetooth.a2dp_offload.disabled", false)) {
// A2DP offload supported but disabled: try to use special XML file
- fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+ if (property_get_bool("persist.bluetooth.bluetooth_audio_hal.enabled", false)) {
+ fileNames.push_back(AUDIO_POLICY_BLUETOOTH_HAL_ENABLED_XML_CONFIG_FILE_NAME);
+ } else {
+ fileNames.push_back(AUDIO_POLICY_A2DP_OFFLOAD_DISABLED_XML_CONFIG_FILE_NAME);
+ }
}
fileNames.push_back(AUDIO_POLICY_XML_CONFIG_FILE_NAME);
diff --git a/services/camera/libcameraservice/Android.bp b/services/camera/libcameraservice/Android.bp
index a090479..2d923bf 100644
--- a/services/camera/libcameraservice/Android.bp
+++ b/services/camera/libcameraservice/Android.bp
@@ -41,6 +41,8 @@
"api2/CameraDeviceClient.cpp",
"api2/CompositeStream.cpp",
"api2/DepthCompositeStream.cpp",
+ "api2/HeicEncoderInfoManager.cpp",
+ "api2/HeicCompositeStream.cpp",
"device1/CameraHardwareInterface.cpp",
"device3/Camera3Device.cpp",
"device3/Camera3Stream.cpp",
@@ -62,12 +64,14 @@
"hidl/HidlCameraService.cpp",
"utils/CameraTraces.cpp",
"utils/AutoConditionLock.cpp",
+ "utils/ExifUtils.cpp",
"utils/TagMonitor.cpp",
"utils/LatencyHistogram.cpp",
],
shared_libs: [
"libdl",
+ "libexif",
"libui",
"liblog",
"libutilscallstack",
@@ -85,8 +89,10 @@
"libhidlbase",
"libhidltransport",
"libjpeg",
+ "libmedia_omx",
"libmemunreachable",
"libsensorprivacy",
+ "libstagefright",
"libstagefright_foundation",
"android.frameworks.cameraservice.common@2.0",
"android.frameworks.cameraservice.service@2.0",
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index c3113bf..ee8d7e1 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -49,6 +49,7 @@
#include <hardware/hardware.h>
#include "hidl/HidlCameraService.h"
#include <hidl/HidlTransportSupport.h>
+#include <hwbinder/IPCThreadState.h>
#include <memunreachable/memunreachable.h>
#include <media/AudioSystem.h>
#include <media/IMediaHTTPService.h>
@@ -226,7 +227,7 @@
Mutex::Autolock lock(mStatusListenerLock);
for (auto& i : mListenerList) {
- i->onTorchStatusChanged(mapToInterface(status), String16{cameraId});
+ i.second->onTorchStatusChanged(mapToInterface(status), String16{cameraId});
}
}
@@ -1287,6 +1288,18 @@
return ret;
}
+bool CameraService::shouldRejectHiddenCameraConnection(const String8 & cameraId) {
+ // If the thread serving this call is not a hwbinder thread and the caller
+ // isn't the cameraserver itself, and the camera id being requested is to be
+ // publically hidden, we should reject the connection.
+ if (!hardware::IPCThreadState::self()->isServingCall() &&
+ CameraThreadState::getCallingPid() != getpid() &&
+ mCameraProviderManager->isPublicallyHiddenSecureCamera(cameraId.c_str())) {
+ return true;
+ }
+ return false;
+}
+
Status CameraService::connectDevice(
const sp<hardware::camera2::ICameraDeviceCallbacks>& cameraCb,
const String16& cameraId,
@@ -1299,6 +1312,7 @@
Status ret = Status::ok();
String8 id = String8(cameraId);
sp<CameraDeviceClient> client = nullptr;
+
ret = connectHelper<hardware::camera2::ICameraDeviceCallbacks,CameraDeviceClient>(cameraCb, id,
/*api1CameraId*/-1,
CAMERA_HAL_API_VERSION_UNSPECIFIED, clientPackageName,
@@ -1330,6 +1344,14 @@
(halVersion == -1) ? "default" : std::to_string(halVersion).c_str(),
static_cast<int>(effectiveApiLevel));
+ if (shouldRejectHiddenCameraConnection(cameraId)) {
+ ALOGW("Attempting to connect to system-only camera id %s, connection rejected",
+ cameraId.c_str());
+ return STATUS_ERROR_FMT(ERROR_DISCONNECTED,
+ "No camera device with ID \"%s\" currently available",
+ cameraId.string());
+
+ }
sp<CLIENT> client = nullptr;
{
// Acquire mServiceLock and prevent other clients from connecting
@@ -1635,6 +1657,14 @@
Status CameraService::addListener(const sp<ICameraServiceListener>& listener,
/*out*/
std::vector<hardware::CameraStatus> *cameraStatuses) {
+ return addListenerHelper(listener, cameraStatuses);
+}
+
+Status CameraService::addListenerHelper(const sp<ICameraServiceListener>& listener,
+ /*out*/
+ std::vector<hardware::CameraStatus> *cameraStatuses,
+ bool isVendorListener) {
+
ATRACE_CALL();
ALOGV("%s: Add listener %p", __FUNCTION__, listener.get());
@@ -1649,20 +1679,26 @@
{
Mutex::Autolock lock(mStatusListenerLock);
for (auto& it : mListenerList) {
- if (IInterface::asBinder(it) == IInterface::asBinder(listener)) {
+ if (IInterface::asBinder(it.second) == IInterface::asBinder(listener)) {
ALOGW("%s: Tried to add listener %p which was already subscribed",
__FUNCTION__, listener.get());
return STATUS_ERROR(ERROR_ALREADY_EXISTS, "Listener already registered");
}
}
- mListenerList.push_back(listener);
+ mListenerList.emplace_back(isVendorListener, listener);
}
/* Collect current devices and status */
{
Mutex::Autolock lock(mCameraStatesLock);
for (auto& i : mCameraStates) {
+ if (!isVendorListener &&
+ mCameraProviderManager->isPublicallyHiddenSecureCamera(i.first.c_str())) {
+ ALOGV("Cannot add public listener for hidden system-only %s for pid %d",
+ i.first.c_str(), CameraThreadState::getCallingPid());
+ continue;
+ }
cameraStatuses->emplace_back(i.first, mapToInterface(i.second->getStatus()));
}
}
@@ -1697,7 +1733,7 @@
{
Mutex::Autolock lock(mStatusListenerLock);
for (auto it = mListenerList.begin(); it != mListenerList.end(); it++) {
- if (IInterface::asBinder(*it) == IInterface::asBinder(listener)) {
+ if (IInterface::asBinder(it->second) == IInterface::asBinder(listener)) {
mListenerList.erase(it);
return Status::ok();
}
@@ -3033,7 +3069,13 @@
Mutex::Autolock lock(mStatusListenerLock);
for (auto& listener : mListenerList) {
- listener->onStatusChanged(mapToInterface(status), String16(cameraId));
+ if (!listener.first &&
+ mCameraProviderManager->isPublicallyHiddenSecureCamera(cameraId.c_str())) {
+ ALOGV("Skipping camera discovery callback for system-only camera %s",
+ cameraId.c_str());
+ continue;
+ }
+ listener.second->onStatusChanged(mapToInterface(status), String16(cameraId));
}
});
}
diff --git a/services/camera/libcameraservice/CameraService.h b/services/camera/libcameraservice/CameraService.h
index a296198..3af52fa 100644
--- a/services/camera/libcameraservice/CameraService.h
+++ b/services/camera/libcameraservice/CameraService.h
@@ -173,6 +173,10 @@
virtual status_t shellCommand(int in, int out, int err, const Vector<String16>& args);
+ binder::Status addListenerHelper(const sp<hardware::ICameraServiceListener>& listener,
+ /*out*/
+ std::vector<hardware::CameraStatus>* cameraStatuses, bool isVendor = false);
+
/////////////////////////////////////////////////////////////////////
// Client functionality
@@ -615,6 +619,10 @@
sp<BasicClient>* client,
std::shared_ptr<resource_policy::ClientDescriptor<String8, sp<BasicClient>>>* partial);
+ // Should an operation attempt on a cameraId be rejected, if the camera id is
+ // advertised as a publically hidden secure camera, by the camera HAL ?
+ bool shouldRejectHiddenCameraConnection(const String8 & cameraId);
+
// Single implementation shared between the various connect calls
template<class CALLBACK, class CLIENT>
binder::Status connectHelper(const sp<CALLBACK>& cameraCb, const String8& cameraId,
@@ -781,7 +789,8 @@
sp<CameraProviderManager> mCameraProviderManager;
// Guarded by mStatusListenerMutex
- std::vector<sp<hardware::ICameraServiceListener>> mListenerList;
+ std::vector<std::pair<bool, sp<hardware::ICameraServiceListener>>> mListenerList;
+
Mutex mStatusListenerLock;
/**
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
index e6f75f4..36395f3 100755
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.cpp
@@ -62,7 +62,8 @@
}
}
-void JpegProcessor::onBufferRequestForFrameNumber(uint64_t /*frameNumber*/, int /*streamId*/) {
+void JpegProcessor::onBufferRequestForFrameNumber(uint64_t /*frameNumber*/,
+ int /*streamId*/, const CameraMetadata& /*settings*/) {
// Intentionally left empty
}
diff --git a/services/camera/libcameraservice/api1/client2/JpegProcessor.h b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
index 2ee930e..53e6836 100644
--- a/services/camera/libcameraservice/api1/client2/JpegProcessor.h
+++ b/services/camera/libcameraservice/api1/client2/JpegProcessor.h
@@ -54,7 +54,8 @@
// Camera3StreamBufferListener implementation
void onBufferAcquired(const BufferInfo& bufferInfo) override;
void onBufferReleased(const BufferInfo& bufferInfo) override;
- void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) override;
+ void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
+ const CameraMetadata& settings) override;
status_t updateStream(const Parameters ¶ms);
status_t deleteStream();
diff --git a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
index 9e203da..b512f2b 100644
--- a/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
+++ b/services/camera/libcameraservice/api2/CameraDeviceClient.cpp
@@ -34,6 +34,7 @@
#include <camera_metadata_hidden.h>
#include "DepthCompositeStream.h"
+#include "HeicCompositeStream.h"
// Convenience methods for constructing binder::Status objects for error returns
@@ -711,21 +712,35 @@
return res;
if (!isStreamInfoValid) {
- if (camera3::DepthCompositeStream::isDepthCompositeStream(surface)) {
+ bool isDepthCompositeStream =
+ camera3::DepthCompositeStream::isDepthCompositeStream(surface);
+ bool isHeicCompositeStream =
+ camera3::HeicCompositeStream::isHeicCompositeStream(surface);
+ if (isDepthCompositeStream || isHeicCompositeStream) {
// We need to take in to account that composite streams can have
// additional internal camera streams.
std::vector<OutputStreamInfo> compositeStreams;
- ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+ if (isDepthCompositeStream) {
+ ret = camera3::DepthCompositeStream::getCompositeStreamInfo(streamInfo,
+ mDevice->info(), &compositeStreams);
+ } else {
+ ret = camera3::HeicCompositeStream::getCompositeStreamInfo(streamInfo,
mDevice->info(), &compositeStreams);
+ }
if (ret != OK) {
String8 msg = String8::format(
- "Camera %s: Failed adding depth composite streams: %s (%d)",
+ "Camera %s: Failed adding composite streams: %s (%d)",
mCameraIdStr.string(), strerror(-ret), ret);
ALOGE("%s: %s", __FUNCTION__, msg.string());
return STATUS_ERROR(CameraService::ERROR_ILLEGAL_ARGUMENT, msg.string());
}
- if (compositeStreams.size() > 1) {
+ if (compositeStreams.size() == 0) {
+ // No internal streams means composite stream not
+ // supported.
+ *status = false;
+ return binder::Status::ok();
+ } else if (compositeStreams.size() > 1) {
streamCount += compositeStreams.size() - 1;
streamConfiguration.streams.resize(streamCount);
}
@@ -937,15 +952,16 @@
int streamId = camera3::CAMERA3_STREAM_ID_INVALID;
std::vector<int> surfaceIds;
- if (!camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0])) {
- err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
- streamInfo.height, streamInfo.format, streamInfo.dataSpace,
- static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
- &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
- isShared);
- } else {
- sp<CompositeStream> compositeStream = new camera3::DepthCompositeStream(mDevice,
- getRemoteCallback());
+ bool isDepthCompositeStream = camera3::DepthCompositeStream::isDepthCompositeStream(surfaces[0]);
+ bool isHeicCompisiteStream = camera3::HeicCompositeStream::isHeicCompositeStream(surfaces[0]);
+ if (isDepthCompositeStream || isHeicCompisiteStream) {
+ sp<CompositeStream> compositeStream;
+ if (isDepthCompositeStream) {
+ compositeStream = new camera3::DepthCompositeStream(mDevice, getRemoteCallback());
+ } else {
+ compositeStream = new camera3::HeicCompositeStream(mDevice, getRemoteCallback());
+ }
+
err = compositeStream->createStream(surfaces, deferredConsumer, streamInfo.width,
streamInfo.height, streamInfo.format,
static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
@@ -955,6 +971,12 @@
mCompositeStreamMap.add(IInterface::asBinder(surfaces[0]->getIGraphicBufferProducer()),
compositeStream);
}
+ } else {
+ err = mDevice->createStream(surfaces, deferredConsumer, streamInfo.width,
+ streamInfo.height, streamInfo.format, streamInfo.dataSpace,
+ static_cast<camera3_stream_rotation_t>(outputConfiguration.getRotation()),
+ &streamId, physicalCameraId, &surfaceIds, outputConfiguration.getSurfaceSetID(),
+ isShared);
}
if (err != OK) {
@@ -1437,6 +1459,8 @@
camera_metadata_ro_entry streamConfigs =
(dataSpace == HAL_DATASPACE_DEPTH) ?
info.find(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS) :
+ (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_HEIF)) ?
+ info.find(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS) :
info.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
int32_t bestWidth = -1;
@@ -1930,6 +1954,10 @@
remoteCb->onCaptureStarted(resultExtras, timestamp);
}
Camera2ClientBase::notifyShutter(resultExtras, timestamp);
+
+ for (size_t i = 0; i < mCompositeStreamMap.size(); i++) {
+ mCompositeStreamMap.valueAt(i)->onShutter(resultExtras, timestamp);
+ }
}
void CameraDeviceClient::notifyPrepared(int streamId) {
diff --git a/services/camera/libcameraservice/api2/CompositeStream.cpp b/services/camera/libcameraservice/api2/CompositeStream.cpp
index 796bf42..354eaf9 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.cpp
+++ b/services/camera/libcameraservice/api2/CompositeStream.cpp
@@ -82,7 +82,8 @@
return deleteInternalStreams();
}
-void CompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) {
+void CompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
+ const CameraMetadata& /*settings*/) {
Mutex::Autolock l(mMutex);
if (!mErrorState && (streamId == getStreamId())) {
mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
diff --git a/services/camera/libcameraservice/api2/CompositeStream.h b/services/camera/libcameraservice/api2/CompositeStream.h
index 5837745..a401a82 100644
--- a/services/camera/libcameraservice/api2/CompositeStream.h
+++ b/services/camera/libcameraservice/api2/CompositeStream.h
@@ -23,6 +23,7 @@
#include <android/hardware/camera2/ICameraDeviceCallbacks.h>
#include <camera/CameraMetadata.h>
#include <camera/camera2/OutputConfiguration.h>
+#include <gui/IProducerListener.h>
#include "common/CameraDeviceBase.h"
#include "device3/Camera3StreamInterface.h"
@@ -66,15 +67,24 @@
// Return composite stream id.
virtual int getStreamId() = 0;
+ // Notify when shutter notify is triggered
+ virtual void onShutter(const CaptureResultExtras& /*resultExtras*/, nsecs_t /*timestamp*/) {}
+
void onResultAvailable(const CaptureResult& result);
bool onError(int32_t errorCode, const CaptureResultExtras& resultExtras);
// Camera3StreamBufferListener implementation
void onBufferAcquired(const BufferInfo& /*bufferInfo*/) override { /*Empty for now */ }
void onBufferReleased(const BufferInfo& bufferInfo) override;
- void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) override;
+ void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
+ const CameraMetadata& settings) override;
protected:
+ struct ProducerListener : public BnProducerListener {
+ // ProducerListener impementation
+ void onBufferReleased() override { /*No impl. for now*/ };
+ };
+
status_t registerCompositeStreamListener(int32_t streamId);
void eraseResult(int64_t frameNumber);
void flagAnErrorFrameNumber(int64_t frameNumber);
diff --git a/services/camera/libcameraservice/api2/DepthCompositeStream.h b/services/camera/libcameraservice/api2/DepthCompositeStream.h
index e8fe517..1bf31f4 100644
--- a/services/camera/libcameraservice/api2/DepthCompositeStream.h
+++ b/services/camera/libcameraservice/api2/DepthCompositeStream.h
@@ -21,7 +21,6 @@
#include <dynamic_depth/imaging_model.h>
#include <dynamic_depth/depth_map.h>
-#include <gui/IProducerListener.h>
#include <gui/CpuConsumer.h>
#include "CompositeStream.h"
@@ -116,11 +115,6 @@
static const auto kDepthMapDataSpace = HAL_DATASPACE_DEPTH;
static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
- struct ProducerListener : public BnProducerListener {
- // ProducerListener implementation
- void onBufferReleased() override { /*No impl. for now*/ };
- };
-
int mBlobStreamId, mBlobSurfaceId, mDepthStreamId, mDepthSurfaceId;
size_t mBlobWidth, mBlobHeight;
sp<CpuConsumer> mBlobConsumer, mDepthConsumer;
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.cpp b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
new file mode 100644
index 0000000..3eba863
--- /dev/null
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.cpp
@@ -0,0 +1,1606 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "Camera3-HeicCompositeStream"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <linux/memfd.h>
+#include <pthread.h>
+#include <sys/syscall.h>
+
+#include <android/hardware/camera/device/3.5/types.h>
+#include <gui/Surface.h>
+#include <utils/Log.h>
+#include <utils/Trace.h>
+
+#include <media/ICrypto.h>
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/ABuffer.h>
+#include <media/stagefright/foundation/AMessage.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/MediaCodecConstants.h>
+
+#include "common/CameraDeviceBase.h"
+#include "utils/ExifUtils.h"
+#include "HeicEncoderInfoManager.h"
+#include "HeicCompositeStream.h"
+
+using android::hardware::camera::device::V3_5::CameraBlob;
+using android::hardware::camera::device::V3_5::CameraBlobId;
+
+namespace android {
+namespace camera3 {
+
+HeicCompositeStream::HeicCompositeStream(wp<CameraDeviceBase> device,
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb) :
+ CompositeStream(device, cb),
+ mUseHeic(false),
+ mNumOutputTiles(1),
+ mOutputWidth(0),
+ mOutputHeight(0),
+ mMaxHeicBufferSize(0),
+ mGridWidth(HeicEncoderInfoManager::kGridWidth),
+ mGridHeight(HeicEncoderInfoManager::kGridHeight),
+ mGridRows(1),
+ mGridCols(1),
+ mUseGrid(false),
+ mAppSegmentStreamId(-1),
+ mAppSegmentSurfaceId(-1),
+ mAppSegmentBufferAcquired(false),
+ mMainImageStreamId(-1),
+ mMainImageSurfaceId(-1),
+ mYuvBufferAcquired(false),
+ mProducerListener(new ProducerListener()),
+ mOutputBufferCounter(0),
+ mGridTimestampUs(0) {
+}
+
+HeicCompositeStream::~HeicCompositeStream() {
+ // Call deinitCodec in case stream hasn't been deleted yet to avoid any
+ // memory/resource leak.
+ deinitCodec();
+
+ mInputAppSegmentBuffers.clear();
+ mCodecOutputBuffers.clear();
+
+ mAppSegmentStreamId = -1;
+ mAppSegmentSurfaceId = -1;
+ mAppSegmentConsumer.clear();
+ mAppSegmentSurface.clear();
+
+ mMainImageStreamId = -1;
+ mMainImageSurfaceId = -1;
+ mMainImageConsumer.clear();
+ mMainImageSurface.clear();
+}
+
+bool HeicCompositeStream::isHeicCompositeStream(const sp<Surface> &surface) {
+ ANativeWindow *anw = surface.get();
+ status_t err;
+ int format;
+ if ((err = anw->query(anw, NATIVE_WINDOW_FORMAT, &format)) != OK) {
+ String8 msg = String8::format("Failed to query Surface format: %s (%d)", strerror(-err),
+ err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return false;
+ }
+
+ int dataspace;
+ if ((err = anw->query(anw, NATIVE_WINDOW_DEFAULT_DATASPACE, &dataspace)) != OK) {
+ String8 msg = String8::format("Failed to query Surface dataspace: %s (%d)", strerror(-err),
+ err);
+ ALOGE("%s: %s", __FUNCTION__, msg.string());
+ return false;
+ }
+
+ return ((format == HAL_PIXEL_FORMAT_BLOB) && (dataspace == HAL_DATASPACE_HEIF));
+}
+
+status_t HeicCompositeStream::createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ bool /*hasDeferredConsumer*/, uint32_t width, uint32_t height, int format,
+ camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+ std::vector<int> *surfaceIds, int /*streamSetId*/, bool /*isShared*/) {
+
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (!device.get()) {
+ ALOGE("%s: Invalid camera device!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ status_t res = initializeCodec(width, height, device);
+ if (res != OK) {
+ ALOGE("%s: Failed to initialize HEIC/HEVC codec: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return NO_INIT;
+ }
+
+ sp<IGraphicBufferProducer> producer;
+ sp<IGraphicBufferConsumer> consumer;
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mAppSegmentConsumer = new CpuConsumer(consumer, 1);
+ mAppSegmentConsumer->setFrameAvailableListener(this);
+ mAppSegmentConsumer->setName(String8("Camera3-HeicComposite-AppSegmentStream"));
+ mAppSegmentSurface = new Surface(producer);
+
+ res = device->createStream(mAppSegmentSurface, mAppSegmentMaxSize, 1, format,
+ kAppSegmentDataSpace, rotation, &mAppSegmentStreamId, physicalCameraId, surfaceIds);
+ if (res == OK) {
+ mAppSegmentSurfaceId = (*surfaceIds)[0];
+ } else {
+ ALOGE("%s: Failed to create JPEG App segment stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ if (!mUseGrid) {
+ res = mCodec->createInputSurface(&producer);
+ if (res != OK) {
+ ALOGE("%s: Failed to create input surface for Heic codec: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ } else {
+ BufferQueue::createBufferQueue(&producer, &consumer);
+ mMainImageConsumer = new CpuConsumer(consumer, 1);
+ mMainImageConsumer->setFrameAvailableListener(this);
+ mMainImageConsumer->setName(String8("Camera3-HeicComposite-HevcInputYUVStream"));
+ }
+ mMainImageSurface = new Surface(producer);
+
+ res = mCodec->start();
+ if (res != OK) {
+ ALOGE("%s: Failed to start codec: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ std::vector<int> sourceSurfaceId;
+ //Use YUV_888 format if framework tiling is needed.
+ int srcStreamFmt = mUseGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ res = device->createStream(mMainImageSurface, width, height, srcStreamFmt, kHeifDataSpace,
+ rotation, id, physicalCameraId, &sourceSurfaceId);
+ if (res == OK) {
+ mMainImageSurfaceId = sourceSurfaceId[0];
+ mMainImageStreamId = *id;
+ } else {
+ ALOGE("%s: Failed to create main image stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ mOutputSurface = consumers[0];
+ res = registerCompositeStreamListener(getStreamId());
+ if (res != OK) {
+ ALOGE("%s: Failed to register HAL main image stream", __FUNCTION__);
+ return res;
+ }
+
+ return res;
+}
+
+status_t HeicCompositeStream::deleteInternalStreams() {
+ requestExit();
+ auto res = join();
+ if (res != OK) {
+ ALOGE("%s: Failed to join with the main processing thread: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ }
+
+ deinitCodec();
+
+ if (mAppSegmentStreamId >= 0) {
+ sp<CameraDeviceBase> device = mDevice.promote();
+ if (!device.get()) {
+ ALOGE("%s: Invalid camera device!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ res = device->deleteStream(mAppSegmentStreamId);
+ mAppSegmentStreamId = -1;
+ }
+
+ return res;
+}
+
+void HeicCompositeStream::onBufferReleased(const BufferInfo& bufferInfo) {
+ Mutex::Autolock l(mMutex);
+
+ if (bufferInfo.mError) return;
+
+ mCodecOutputBufferTimestamps.push(bufferInfo.mTimestamp);
+}
+
+// We need to get the settings early to handle the case where the codec output
+// arrives earlier than result metadata.
+void HeicCompositeStream::onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
+ const CameraMetadata& settings) {
+ ATRACE_ASYNC_BEGIN("HEIC capture", frameNumber);
+
+ Mutex::Autolock l(mMutex);
+ if (mErrorState || (streamId != getStreamId())) {
+ return;
+ }
+
+ mPendingCaptureResults.emplace(frameNumber, CameraMetadata());
+
+ camera_metadata_ro_entry entry;
+
+ int32_t orientation = 0;
+ entry = settings.find(ANDROID_JPEG_ORIENTATION);
+ if (entry.count == 1) {
+ orientation = entry.data.i32[0];
+ }
+
+ int32_t quality = kDefaultJpegQuality;
+ entry = settings.find(ANDROID_JPEG_QUALITY);
+ if (entry.count == 1) {
+ quality = entry.data.i32[0];
+ }
+
+ mSettingsByFrameNumber[frameNumber] = std::make_pair(orientation, quality);
+}
+
+void HeicCompositeStream::onFrameAvailable(const BufferItem& item) {
+ if (item.mDataSpace == static_cast<android_dataspace>(kAppSegmentDataSpace)) {
+ ALOGV("%s: JPEG APP segments buffer with ts: %" PRIu64 " ms. arrived!",
+ __func__, ns2ms(item.mTimestamp));
+
+ Mutex::Autolock l(mMutex);
+ if (!mErrorState) {
+ mInputAppSegmentBuffers.push_back(item.mTimestamp);
+ mInputReadyCondition.signal();
+ }
+ } else if (item.mDataSpace == kHeifDataSpace) {
+ ALOGV("%s: YUV_888 buffer with ts: %" PRIu64 " ms. arrived!",
+ __func__, ns2ms(item.mTimestamp));
+
+ Mutex::Autolock l(mMutex);
+ if (!mUseGrid) {
+ ALOGE("%s: YUV_888 internal stream is only supported for HEVC tiling",
+ __FUNCTION__);
+ return;
+ }
+ if (!mErrorState) {
+ mInputYuvBuffers.push_back(item.mTimestamp);
+ mInputReadyCondition.signal();
+ }
+ } else {
+ ALOGE("%s: Unexpected data space: 0x%x", __FUNCTION__, item.mDataSpace);
+ }
+}
+
+status_t HeicCompositeStream::getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+ const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/) {
+ if (compositeOutput == nullptr) {
+ return BAD_VALUE;
+ }
+
+ compositeOutput->clear();
+
+ bool useGrid, useHeic;
+ bool isSizeSupported = isSizeSupportedByHeifEncoder(
+ streamInfo.width, streamInfo.height, &useHeic, &useGrid, nullptr);
+ if (!isSizeSupported) {
+ // Size is not supported by either encoder.
+ return OK;
+ }
+
+ compositeOutput->insert(compositeOutput->end(), 2, streamInfo);
+
+ // JPEG APPS segments Blob stream info
+ (*compositeOutput)[0].width = calcAppSegmentMaxSize(ch);
+ (*compositeOutput)[0].height = 1;
+ (*compositeOutput)[0].format = HAL_PIXEL_FORMAT_BLOB;
+ (*compositeOutput)[0].dataSpace = kAppSegmentDataSpace;
+ (*compositeOutput)[0].consumerUsage = GRALLOC_USAGE_SW_READ_OFTEN;
+
+ // YUV/IMPLEMENTATION_DEFINED stream info
+ (*compositeOutput)[1].width = streamInfo.width;
+ (*compositeOutput)[1].height = streamInfo.height;
+ (*compositeOutput)[1].format = useGrid ? HAL_PIXEL_FORMAT_YCbCr_420_888 :
+ HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
+ (*compositeOutput)[1].dataSpace = kHeifDataSpace;
+ (*compositeOutput)[1].consumerUsage = useHeic ? GRALLOC_USAGE_HW_IMAGE_ENCODER :
+ useGrid ? GRALLOC_USAGE_SW_READ_OFTEN : GRALLOC_USAGE_HW_VIDEO_ENCODER;
+
+ return NO_ERROR;
+}
+
+bool HeicCompositeStream::isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
+ bool* useHeic, bool* useGrid, int64_t* stall) {
+ static HeicEncoderInfoManager& heicManager = HeicEncoderInfoManager::getInstance();
+ return heicManager.isSizeSupported(width, height, useHeic, useGrid, stall);
+}
+
+bool HeicCompositeStream::isInMemoryTempFileSupported() {
+ int memfd = syscall(__NR_memfd_create, "HEIF-try-memfd", MFD_CLOEXEC);
+ if (memfd == -1) {
+ if (errno != ENOSYS) {
+ ALOGE("%s: Failed to create tmpfs file. errno %d", __FUNCTION__, errno);
+ }
+ return false;
+ }
+ close(memfd);
+ return true;
+}
+
+void HeicCompositeStream::onHeicOutputFrameAvailable(
+ const CodecOutputBufferInfo& outputBufferInfo) {
+ Mutex::Autolock l(mMutex);
+
+ ALOGV("%s: index %d, offset %d, size %d, time %" PRId64 ", flags 0x%x",
+ __FUNCTION__, outputBufferInfo.index, outputBufferInfo.offset,
+ outputBufferInfo.size, outputBufferInfo.timeUs, outputBufferInfo.flags);
+
+ if (!mErrorState) {
+ if ((outputBufferInfo.size > 0) &&
+ ((outputBufferInfo.flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0)) {
+ mCodecOutputBuffers.push_back(outputBufferInfo);
+ mInputReadyCondition.signal();
+ } else {
+ mCodec->releaseOutputBuffer(outputBufferInfo.index);
+ }
+ } else {
+ mCodec->releaseOutputBuffer(outputBufferInfo.index);
+ }
+}
+
+void HeicCompositeStream::onHeicInputFrameAvailable(int32_t index) {
+ Mutex::Autolock l(mMutex);
+
+ if (!mUseGrid) {
+ ALOGE("%s: Codec YUV input mode must only be used for Hevc tiling mode", __FUNCTION__);
+ return;
+ }
+
+ mCodecInputBuffers.push_back(index);
+ mInputReadyCondition.signal();
+}
+
+void HeicCompositeStream::onHeicFormatChanged(sp<AMessage>& newFormat) {
+ if (newFormat == nullptr) {
+ ALOGE("%s: newFormat must not be null!", __FUNCTION__);
+ return;
+ }
+
+ Mutex::Autolock l(mMutex);
+
+ AString mime;
+ AString mimeHeic(MIMETYPE_IMAGE_ANDROID_HEIC);
+ newFormat->findString(KEY_MIME, &mime);
+ if (mime != mimeHeic) {
+ // For HEVC codec, below keys need to be filled out or overwritten so that the
+ // muxer can handle them as HEIC output image.
+ newFormat->setString(KEY_MIME, mimeHeic);
+ newFormat->setInt32(KEY_WIDTH, mOutputWidth);
+ newFormat->setInt32(KEY_HEIGHT, mOutputHeight);
+ if (mUseGrid) {
+ newFormat->setInt32(KEY_TILE_WIDTH, mGridWidth);
+ newFormat->setInt32(KEY_TILE_HEIGHT, mGridHeight);
+ newFormat->setInt32(KEY_GRID_ROWS, mGridRows);
+ newFormat->setInt32(KEY_GRID_COLUMNS, mGridCols);
+ }
+ }
+ newFormat->setInt32(KEY_IS_DEFAULT, 1 /*isPrimary*/);
+
+ int32_t gridRows, gridCols;
+ if (newFormat->findInt32(KEY_GRID_ROWS, &gridRows) &&
+ newFormat->findInt32(KEY_GRID_COLUMNS, &gridCols)) {
+ mNumOutputTiles = gridRows * gridCols;
+ } else {
+ mNumOutputTiles = 1;
+ }
+
+ ALOGV("%s: mNumOutputTiles is %zu", __FUNCTION__, mNumOutputTiles);
+ mFormat = newFormat;
+}
+
+void HeicCompositeStream::onHeicCodecError() {
+ Mutex::Autolock l(mMutex);
+ mErrorState = true;
+}
+
+status_t HeicCompositeStream::configureStream() {
+ if (isRunning()) {
+ // Processing thread is already running, nothing more to do.
+ return NO_ERROR;
+ }
+
+ if (mOutputSurface.get() == nullptr) {
+ ALOGE("%s: No valid output surface set!", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ auto res = mOutputSurface->connect(NATIVE_WINDOW_API_CAMERA, mProducerListener);
+ if (res != OK) {
+ ALOGE("%s: Unable to connect to native window for stream %d",
+ __FUNCTION__, mMainImageStreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_buffers_format(mOutputSurface.get(), HAL_PIXEL_FORMAT_BLOB))
+ != OK) {
+ ALOGE("%s: Unable to configure stream buffer format for stream %d", __FUNCTION__,
+ mMainImageStreamId);
+ return res;
+ }
+
+ ANativeWindow *anwConsumer = mOutputSurface.get();
+ int maxConsumerBuffers;
+ if ((res = anwConsumer->query(anwConsumer, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
+ &maxConsumerBuffers)) != OK) {
+ ALOGE("%s: Unable to query consumer undequeued"
+ " buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
+ return res;
+ }
+
+ // Cannot use SourceSurface buffer count since it could be codec's 512*512 tile
+ // buffer count.
+ int maxProducerBuffers = 1;
+ if ((res = native_window_set_buffer_count(
+ anwConsumer, maxProducerBuffers + maxConsumerBuffers)) != OK) {
+ ALOGE("%s: Unable to set buffer count for stream %d", __FUNCTION__, mMainImageStreamId);
+ return res;
+ }
+
+ if ((res = native_window_set_buffers_dimensions(anwConsumer, mMaxHeicBufferSize, 1)) != OK) {
+ ALOGE("%s: Unable to set buffer dimension %zu x 1 for stream %d: %s (%d)",
+ __FUNCTION__, mMaxHeicBufferSize, mMainImageStreamId, strerror(-res), res);
+ return res;
+ }
+
+ run("HeicCompositeStreamProc");
+
+ return NO_ERROR;
+}
+
+status_t HeicCompositeStream::insertGbp(SurfaceMap* /*out*/outSurfaceMap,
+ Vector<int32_t>* /*out*/outputStreamIds, int32_t* /*out*/currentStreamId) {
+ if (outSurfaceMap->find(mAppSegmentStreamId) == outSurfaceMap->end()) {
+ (*outSurfaceMap)[mAppSegmentStreamId] = std::vector<size_t>();
+ outputStreamIds->push_back(mAppSegmentStreamId);
+ }
+ (*outSurfaceMap)[mAppSegmentStreamId].push_back(mAppSegmentSurfaceId);
+
+ if (outSurfaceMap->find(mMainImageStreamId) == outSurfaceMap->end()) {
+ (*outSurfaceMap)[mMainImageStreamId] = std::vector<size_t>();
+ outputStreamIds->push_back(mMainImageStreamId);
+ }
+ (*outSurfaceMap)[mMainImageStreamId].push_back(mMainImageSurfaceId);
+
+ if (currentStreamId != nullptr) {
+ *currentStreamId = mMainImageStreamId;
+ }
+
+ return NO_ERROR;
+}
+
+void HeicCompositeStream::onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) {
+ Mutex::Autolock l(mMutex);
+ if (mErrorState) {
+ return;
+ }
+
+ if (mSettingsByFrameNumber.find(resultExtras.frameNumber) != mSettingsByFrameNumber.end()) {
+ mFrameNumberMap.emplace(resultExtras.frameNumber, timestamp);
+ mSettingsByTimestamp[timestamp] = mSettingsByFrameNumber[resultExtras.frameNumber];
+ mSettingsByFrameNumber.erase(resultExtras.frameNumber);
+ mInputReadyCondition.signal();
+ }
+}
+
+void HeicCompositeStream::compilePendingInputLocked() {
+ while (!mSettingsByTimestamp.empty()) {
+ auto it = mSettingsByTimestamp.begin();
+ mPendingInputFrames[it->first].orientation = it->second.first;
+ mPendingInputFrames[it->first].quality = it->second.second;
+ mSettingsByTimestamp.erase(it);
+ }
+
+ while (!mInputAppSegmentBuffers.empty() && !mAppSegmentBufferAcquired) {
+ CpuConsumer::LockedBuffer imgBuffer;
+ auto it = mInputAppSegmentBuffers.begin();
+ auto res = mAppSegmentConsumer->lockNextBuffer(&imgBuffer);
+ if (res == NOT_ENOUGH_DATA) {
+ // Canot not lock any more buffers.
+ break;
+ } else if ((res != OK) || (*it != imgBuffer.timestamp)) {
+ if (res != OK) {
+ ALOGE("%s: Error locking JPEG_APP_SEGMENTS image buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ } else {
+ ALOGE("%s: Expecting JPEG_APP_SEGMENTS buffer with time stamp: %" PRId64
+ " received buffer with time stamp: %" PRId64, __FUNCTION__,
+ *it, imgBuffer.timestamp);
+ }
+ mPendingInputFrames[*it].error = true;
+ mInputAppSegmentBuffers.erase(it);
+ continue;
+ }
+
+ if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+ (mPendingInputFrames[imgBuffer.timestamp].error)) {
+ mAppSegmentConsumer->unlockBuffer(imgBuffer);
+ } else {
+ mPendingInputFrames[imgBuffer.timestamp].appSegmentBuffer = imgBuffer;
+ mAppSegmentBufferAcquired = true;
+ }
+ mInputAppSegmentBuffers.erase(it);
+ }
+
+ while (!mInputYuvBuffers.empty() && !mYuvBufferAcquired) {
+ CpuConsumer::LockedBuffer imgBuffer;
+ auto it = mInputYuvBuffers.begin();
+ auto res = mMainImageConsumer->lockNextBuffer(&imgBuffer);
+ if (res == NOT_ENOUGH_DATA) {
+ // Canot not lock any more buffers.
+ break;
+ } else if (res != OK) {
+ ALOGE("%s: Error locking YUV_888 image buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ mPendingInputFrames[*it].error = true;
+ mInputYuvBuffers.erase(it);
+ continue;
+ } else if (*it != imgBuffer.timestamp) {
+ ALOGW("%s: Expecting YUV_888 buffer with time stamp: %" PRId64 " received buffer with "
+ "time stamp: %" PRId64, __FUNCTION__, *it, imgBuffer.timestamp);
+ mPendingInputFrames[*it].error = true;
+ mInputYuvBuffers.erase(it);
+ continue;
+ }
+
+ if ((mPendingInputFrames.find(imgBuffer.timestamp) != mPendingInputFrames.end()) &&
+ (mPendingInputFrames[imgBuffer.timestamp].error)) {
+ mMainImageConsumer->unlockBuffer(imgBuffer);
+ } else {
+ mPendingInputFrames[imgBuffer.timestamp].yuvBuffer = imgBuffer;
+ mYuvBufferAcquired = true;
+ }
+ mInputYuvBuffers.erase(it);
+ }
+
+ while (!mCodecOutputBuffers.empty()) {
+ auto it = mCodecOutputBuffers.begin();
+ // Bitstream buffer timestamp doesn't necessarily directly correlate with input
+ // buffer timestamp. Assume encoder input to output is FIFO, use a queue
+ // to look up timestamp.
+ int64_t bufferTime = -1;
+ if (mCodecOutputBufferTimestamps.empty()) {
+ ALOGE("%s: Failed to find buffer timestamp for codec output buffer!", __FUNCTION__);
+ } else {
+ // Direct mapping between camera timestamp (in ns) and codec timestamp (in us).
+ bufferTime = mCodecOutputBufferTimestamps.front();
+ mOutputBufferCounter++;
+ if (mOutputBufferCounter == mNumOutputTiles) {
+ mCodecOutputBufferTimestamps.pop();
+ mOutputBufferCounter = 0;
+ }
+
+ mPendingInputFrames[bufferTime].codecOutputBuffers.push_back(*it);
+ }
+ mCodecOutputBuffers.erase(it);
+ }
+
+ while (!mFrameNumberMap.empty()) {
+ auto it = mFrameNumberMap.begin();
+ mPendingInputFrames[it->second].frameNumber = it->first;
+ mFrameNumberMap.erase(it);
+ }
+
+ // Heic composition doesn't depend on capture result, so no need to check
+ // mErrorFrameNumbers. Just remove them.
+ mErrorFrameNumbers.clear();
+
+ // Distribute codec input buffers to be filled out from YUV output
+ for (auto it = mPendingInputFrames.begin();
+ it != mPendingInputFrames.end() && mCodecInputBuffers.size() > 0; it++) {
+ InputFrame& inputFrame(it->second);
+ if (inputFrame.codecInputCounter < mGridRows * mGridCols) {
+ // Available input tiles that are required for the current input
+ // image.
+ size_t newInputTiles = std::min(mCodecInputBuffers.size(),
+ mGridRows * mGridCols - inputFrame.codecInputCounter);
+ for (size_t i = 0; i < newInputTiles; i++) {
+ CodecInputBufferInfo inputInfo =
+ { mCodecInputBuffers[0], mGridTimestampUs++, inputFrame.codecInputCounter };
+ inputFrame.codecInputBuffers.push_back(inputInfo);
+
+ mCodecInputBuffers.erase(mCodecInputBuffers.begin());
+ inputFrame.codecInputCounter++;
+ }
+ break;
+ }
+ }
+}
+
+bool HeicCompositeStream::getNextReadyInputLocked(int64_t *currentTs /*out*/) {
+ if (currentTs == nullptr) {
+ return false;
+ }
+
+ bool newInputAvailable = false;
+ for (const auto& it : mPendingInputFrames) {
+ bool appSegmentBufferReady = (it.second.appSegmentBuffer.data != nullptr) &&
+ !it.second.appSegmentWritten;
+ bool codecOutputReady = !it.second.codecOutputBuffers.empty();
+ bool codecInputReady = (it.second.yuvBuffer.data != nullptr) &&
+ (!it.second.codecInputBuffers.empty());
+ if ((!it.second.error) &&
+ (it.first < *currentTs) &&
+ (appSegmentBufferReady || codecOutputReady || codecInputReady)) {
+ *currentTs = it.first;
+ newInputAvailable = true;
+ break;
+ }
+ }
+
+ return newInputAvailable;
+}
+
+int64_t HeicCompositeStream::getNextFailingInputLocked(int64_t *currentTs /*out*/) {
+ int64_t res = -1;
+ if (currentTs == nullptr) {
+ return res;
+ }
+
+ for (const auto& it : mPendingInputFrames) {
+ if (it.second.error && !it.second.errorNotified && (it.first < *currentTs)) {
+ *currentTs = it.first;
+ res = it.second.frameNumber;
+ break;
+ }
+ }
+
+ return res;
+}
+
+status_t HeicCompositeStream::processInputFrame(nsecs_t timestamp,
+ InputFrame &inputFrame) {
+ ATRACE_CALL();
+ status_t res = OK;
+
+ bool appSegmentBufferReady = inputFrame.appSegmentBuffer.data != nullptr &&
+ !inputFrame.appSegmentWritten;
+ bool codecOutputReady = inputFrame.codecOutputBuffers.size() > 0;
+ bool codecInputReady = inputFrame.yuvBuffer.data != nullptr &&
+ !inputFrame.codecInputBuffers.empty();
+
+ if (!appSegmentBufferReady && !codecOutputReady && !codecInputReady) {
+ ALOGW("%s: No valid appSegmentBuffer/codec input/outputBuffer available!", __FUNCTION__);
+ return OK;
+ }
+
+ // Handle inputs for Hevc tiling
+ if (codecInputReady) {
+ res = processCodecInputFrame(inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process codec input frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ // Initialize and start muxer if not yet done so
+ if (inputFrame.muxer == nullptr) {
+ res = startMuxerForInputFrame(timestamp, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to create and start muxer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ // Write JPEG APP segments data to the muxer.
+ if (appSegmentBufferReady && inputFrame.muxer != nullptr) {
+ res = processAppSegment(timestamp, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process JPEG APP segments: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ // Write media codec bitstream buffers to muxer.
+ while (!inputFrame.codecOutputBuffers.empty()) {
+ res = processOneCodecOutputFrame(timestamp, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process codec output frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ if (inputFrame.appSegmentWritten && inputFrame.pendingOutputTiles == 0) {
+ res = processCompletedInputFrame(timestamp, inputFrame);
+ if (res != OK) {
+ ALOGE("%s: Failed to process completed input frame: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ }
+
+ return res;
+}
+
+status_t HeicCompositeStream::startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame) {
+ sp<ANativeWindow> outputANW = mOutputSurface;
+ if (inputFrame.codecOutputBuffers.size() == 0) {
+ // No single codec output buffer has been generated. Continue to
+ // wait.
+ return OK;
+ }
+
+ auto res = outputANW->dequeueBuffer(mOutputSurface.get(), &inputFrame.anb, &inputFrame.fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Error retrieving output buffer: %s (%d)", __FUNCTION__, strerror(-res),
+ res);
+ return res;
+ }
+
+ // Combine current thread id, stream id and timestamp to uniquely identify image.
+ std::ostringstream tempOutputFile;
+ tempOutputFile << "HEIF-" << pthread_self() << "-"
+ << getStreamId() << "-" << timestamp;
+ inputFrame.fileFd = syscall(__NR_memfd_create, tempOutputFile.str().c_str(), MFD_CLOEXEC);
+ if (inputFrame.fileFd < 0) {
+ ALOGE("%s: Failed to create file %s. Error no is %d", __FUNCTION__,
+ tempOutputFile.str().c_str(), errno);
+ return NO_INIT;
+ }
+ inputFrame.muxer = new MediaMuxer(inputFrame.fileFd, MediaMuxer::OUTPUT_FORMAT_HEIF);
+ if (inputFrame.muxer == nullptr) {
+ ALOGE("%s: Failed to create MediaMuxer for file fd %d",
+ __FUNCTION__, inputFrame.fileFd);
+ return NO_INIT;
+ }
+
+ res = inputFrame.muxer->setOrientationHint(inputFrame.orientation);
+ if (res != OK) {
+ ALOGE("%s: Failed to setOrientationHint: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ // Set encoder quality
+ {
+ sp<AMessage> qualityParams = new AMessage;
+ qualityParams->setInt32(PARAMETER_KEY_VIDEO_BITRATE, inputFrame.quality);
+ res = mCodec->setParameters(qualityParams);
+ if (res != OK) {
+ ALOGE("%s: Failed to set codec quality: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ ssize_t trackId = inputFrame.muxer->addTrack(mFormat);
+ if (trackId < 0) {
+ ALOGE("%s: Failed to addTrack to the muxer: %zd", __FUNCTION__, trackId);
+ return NO_INIT;
+ }
+
+ inputFrame.trackIndex = trackId;
+ inputFrame.pendingOutputTiles = mNumOutputTiles;
+
+ res = inputFrame.muxer->start();
+ if (res != OK) {
+ ALOGE("%s: Failed to start MediaMuxer: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+
+ return OK;
+}
+
+status_t HeicCompositeStream::processAppSegment(nsecs_t timestamp, InputFrame &inputFrame) {
+ size_t app1Size = 0;
+ auto appSegmentSize = findAppSegmentsSize(inputFrame.appSegmentBuffer.data,
+ inputFrame.appSegmentBuffer.width * inputFrame.appSegmentBuffer.height,
+ &app1Size);
+ ALOGV("%s: appSegmentSize is %zu, width %d, height %d, app1Size %zu", __FUNCTION__,
+ appSegmentSize, inputFrame.appSegmentBuffer.width,
+ inputFrame.appSegmentBuffer.height, app1Size);
+ if (appSegmentSize == 0) {
+ ALOGE("%s: Failed to find JPEG APP segment size", __FUNCTION__);
+ return NO_INIT;
+ }
+
+ std::unique_ptr<ExifUtils> exifUtils(ExifUtils::create());
+ auto exifRes = exifUtils->initialize(inputFrame.appSegmentBuffer.data, app1Size);
+ if (!exifRes) {
+ ALOGE("%s: Failed to initialize ExifUtils object!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ //TODO: Use capture result metadata and static metadata to fill out the
+ //rest.
+ CameraMetadata dummyMeta;
+ exifRes = exifUtils->setFromMetadata(dummyMeta, mOutputWidth, mOutputHeight);
+ if (!exifRes) {
+ ALOGE("%s: Failed to set Exif tags using metadata and main image sizes", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ exifRes = exifUtils->setOrientation(inputFrame.orientation);
+ if (!exifRes) {
+ ALOGE("%s: ExifUtils failed to set orientation", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ exifRes = exifUtils->generateApp1();
+ if (!exifRes) {
+ ALOGE("%s: ExifUtils failed to generate APP1 segment", __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ unsigned int newApp1Length = exifUtils->getApp1Length();
+ const uint8_t *newApp1Segment = exifUtils->getApp1Buffer();
+
+ //Assemble the APP1 marker buffer required by MediaCodec
+ uint8_t kExifApp1Marker[] = {'E', 'x', 'i', 'f', 0xFF, 0xE1, 0x00, 0x00};
+ kExifApp1Marker[6] = static_cast<uint8_t>(newApp1Length >> 8);
+ kExifApp1Marker[7] = static_cast<uint8_t>(newApp1Length & 0xFF);
+ size_t appSegmentBufferSize = sizeof(kExifApp1Marker) +
+ appSegmentSize - app1Size + newApp1Length;
+ uint8_t* appSegmentBuffer = new uint8_t[appSegmentBufferSize];
+ memcpy(appSegmentBuffer, kExifApp1Marker, sizeof(kExifApp1Marker));
+ memcpy(appSegmentBuffer + sizeof(kExifApp1Marker), newApp1Segment, newApp1Length);
+ if (appSegmentSize - app1Size > 0) {
+ memcpy(appSegmentBuffer + sizeof(kExifApp1Marker) + newApp1Length,
+ inputFrame.appSegmentBuffer.data + app1Size, appSegmentSize - app1Size);
+ }
+
+ sp<ABuffer> aBuffer = new ABuffer(appSegmentBuffer, appSegmentBufferSize);
+ auto res = inputFrame.muxer->writeSampleData(aBuffer, inputFrame.trackIndex,
+ timestamp, MediaCodec::BUFFER_FLAG_MUXER_DATA);
+ delete[] appSegmentBuffer;
+
+ if (res != OK) {
+ ALOGE("%s: Failed to write JPEG APP segments to muxer: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ inputFrame.appSegmentWritten = true;
+
+ return OK;
+}
+
+status_t HeicCompositeStream::processCodecInputFrame(InputFrame &inputFrame) {
+ for (auto& inputBuffer : inputFrame.codecInputBuffers) {
+ sp<MediaCodecBuffer> buffer;
+ auto res = mCodec->getInputBuffer(inputBuffer.index, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Error getting codec input buffer: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ // Copy one tile from source to destination.
+ size_t tileX = inputBuffer.tileIndex % mGridCols;
+ size_t tileY = inputBuffer.tileIndex / mGridCols;
+ size_t top = mGridHeight * tileY;
+ size_t left = mGridWidth * tileX;
+ size_t width = (tileX == static_cast<size_t>(mGridCols) - 1) ?
+ mOutputWidth - tileX * mGridWidth : mGridWidth;
+ size_t height = (tileY == static_cast<size_t>(mGridRows) - 1) ?
+ mOutputHeight - tileY * mGridHeight : mGridHeight;
+ ALOGV("%s: inputBuffer tileIndex [%zu, %zu], top %zu, left %zu, width %zu, height %zu",
+ __FUNCTION__, tileX, tileY, top, left, width, height);
+
+ res = copyOneYuvTile(buffer, inputFrame.yuvBuffer, top, left, width, height);
+ if (res != OK) {
+ ALOGE("%s: Failed to copy YUV tile %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ res = mCodec->queueInputBuffer(inputBuffer.index, 0, buffer->capacity(),
+ inputBuffer.timeUs, 0, nullptr /*errorDetailMsg*/);
+ if (res != OK) {
+ ALOGE("%s: Failed to queueInputBuffer to Codec: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return res;
+ }
+ }
+
+ inputFrame.codecInputBuffers.clear();
+ return OK;
+}
+
+status_t HeicCompositeStream::processOneCodecOutputFrame(nsecs_t timestamp,
+ InputFrame &inputFrame) {
+ auto it = inputFrame.codecOutputBuffers.begin();
+ sp<MediaCodecBuffer> buffer;
+ status_t res = mCodec->getOutputBuffer(it->index, &buffer);
+ if (res != OK) {
+ ALOGE("%s: Error getting Heic codec output buffer at index %d: %s (%d)",
+ __FUNCTION__, it->index, strerror(-res), res);
+ return res;
+ }
+ if (buffer == nullptr) {
+ ALOGE("%s: Invalid Heic codec output buffer at index %d",
+ __FUNCTION__, it->index);
+ return BAD_VALUE;
+ }
+
+ sp<ABuffer> aBuffer = new ABuffer(buffer->data(), buffer->size());
+ res = inputFrame.muxer->writeSampleData(
+ aBuffer, inputFrame.trackIndex, timestamp, 0 /*flags*/);
+ if (res != OK) {
+ ALOGE("%s: Failed to write buffer index %d to muxer: %s (%d)",
+ __FUNCTION__, it->index, strerror(-res), res);
+ return res;
+ }
+
+ mCodec->releaseOutputBuffer(it->index);
+ if (inputFrame.pendingOutputTiles == 0) {
+ ALOGW("%s: Codec generated more tiles than expected!", __FUNCTION__);
+ } else {
+ inputFrame.pendingOutputTiles--;
+ }
+
+ inputFrame.codecOutputBuffers.erase(inputFrame.codecOutputBuffers.begin());
+ return OK;
+}
+
+status_t HeicCompositeStream::processCompletedInputFrame(nsecs_t timestamp,
+ InputFrame &inputFrame) {
+ sp<ANativeWindow> outputANW = mOutputSurface;
+ inputFrame.muxer->stop();
+
+ // Copy the content of the file to memory.
+ sp<GraphicBuffer> gb = GraphicBuffer::from(inputFrame.anb);
+ void* dstBuffer;
+ auto res = gb->lockAsync(GRALLOC_USAGE_SW_WRITE_OFTEN, &dstBuffer, inputFrame.fenceFd);
+ if (res != OK) {
+ ALOGE("%s: Error trying to lock output buffer fence: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ off_t fSize = lseek(inputFrame.fileFd, 0, SEEK_END);
+ if (static_cast<size_t>(fSize) > mMaxHeicBufferSize - sizeof(CameraBlob)) {
+ ALOGE("%s: Error: MediaMuxer output size %ld is larger than buffer sizer %zu",
+ __FUNCTION__, fSize, mMaxHeicBufferSize - sizeof(CameraBlob));
+ return BAD_VALUE;
+ }
+
+ lseek(inputFrame.fileFd, 0, SEEK_SET);
+ ssize_t bytesRead = read(inputFrame.fileFd, dstBuffer, fSize);
+ if (bytesRead < fSize) {
+ ALOGE("%s: Only %zd of %ld bytes read", __FUNCTION__, bytesRead, fSize);
+ return BAD_VALUE;
+ }
+
+ close(inputFrame.fileFd);
+ inputFrame.fileFd = -1;
+
+ // Fill in HEIC header
+ uint8_t *header = static_cast<uint8_t*>(dstBuffer) + mMaxHeicBufferSize - sizeof(CameraBlob);
+ struct CameraBlob *blobHeader = (struct CameraBlob *)header;
+ // Must be in sync with CAMERA3_HEIC_BLOB_ID in android_media_Utils.cpp
+ blobHeader->blobId = static_cast<CameraBlobId>(0x00FE);
+ blobHeader->blobSize = fSize;
+
+ res = native_window_set_buffers_timestamp(mOutputSurface.get(), timestamp);
+ if (res != OK) {
+ ALOGE("%s: Stream %d: Error setting timestamp: %s (%d)",
+ __FUNCTION__, getStreamId(), strerror(-res), res);
+ return res;
+ }
+
+ res = outputANW->queueBuffer(mOutputSurface.get(), inputFrame.anb, /*fence*/ -1);
+ if (res != OK) {
+ ALOGE("%s: Failed to queueBuffer to Heic stream: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+ inputFrame.anb = nullptr;
+
+ return OK;
+}
+
+
+void HeicCompositeStream::releaseInputFrameLocked(InputFrame *inputFrame /*out*/) {
+ if (inputFrame == nullptr) {
+ return;
+ }
+
+ if (inputFrame->appSegmentBuffer.data != nullptr) {
+ mAppSegmentConsumer->unlockBuffer(inputFrame->appSegmentBuffer);
+ inputFrame->appSegmentBuffer.data = nullptr;
+ mAppSegmentBufferAcquired = false;
+ }
+
+ while (!inputFrame->codecOutputBuffers.empty()) {
+ auto it = inputFrame->codecOutputBuffers.begin();
+ ALOGV("%s: releaseOutputBuffer index %d", __FUNCTION__, it->index);
+ mCodec->releaseOutputBuffer(it->index);
+ inputFrame->codecOutputBuffers.erase(it);
+ }
+
+ if (inputFrame->yuvBuffer.data != nullptr) {
+ mMainImageConsumer->unlockBuffer(inputFrame->yuvBuffer);
+ inputFrame->yuvBuffer.data = nullptr;
+ mYuvBufferAcquired = false;
+ }
+
+ while (!inputFrame->codecInputBuffers.empty()) {
+ auto it = inputFrame->codecInputBuffers.begin();
+ inputFrame->codecInputBuffers.erase(it);
+ }
+
+ if ((inputFrame->error || mErrorState) && !inputFrame->errorNotified) {
+ notifyError(inputFrame->frameNumber);
+ inputFrame->errorNotified = true;
+ }
+
+ if (inputFrame->fileFd >= 0) {
+ close(inputFrame->fileFd);
+ inputFrame->fileFd = -1;
+ }
+
+ if (inputFrame->anb != nullptr) {
+ sp<ANativeWindow> outputANW = mOutputSurface;
+ outputANW->cancelBuffer(mOutputSurface.get(), inputFrame->anb, /*fence*/ -1);
+ inputFrame->anb = nullptr;
+ }
+}
+
+void HeicCompositeStream::releaseInputFramesLocked(int64_t currentTs) {
+ auto it = mPendingInputFrames.begin();
+ while (it != mPendingInputFrames.end()) {
+ if (it->first <= currentTs) {
+ releaseInputFrameLocked(&it->second);
+ it = mPendingInputFrames.erase(it);
+ } else {
+ it++;
+ }
+ }
+}
+
+status_t HeicCompositeStream::initializeCodec(uint32_t width, uint32_t height,
+ const sp<CameraDeviceBase>& cameraDevice) {
+ ALOGV("%s", __FUNCTION__);
+
+ bool useGrid = false;
+ bool isSizeSupported = isSizeSupportedByHeifEncoder(width, height,
+ &mUseHeic, &useGrid, nullptr);
+ if (!isSizeSupported) {
+ ALOGE("%s: Encoder doesnt' support size %u x %u!",
+ __FUNCTION__, width, height);
+ return BAD_VALUE;
+ }
+
+ // Create Looper for MediaCodec.
+ auto desiredMime = mUseHeic ? MIMETYPE_IMAGE_ANDROID_HEIC : MIMETYPE_VIDEO_HEVC;
+ mCodecLooper = new ALooper;
+ mCodecLooper->setName("Camera3-HeicComposite-MediaCodecLooper");
+ status_t res = mCodecLooper->start(
+ false, // runOnCallingThread
+ false, // canCallJava
+ PRIORITY_AUDIO);
+ if (res != OK) {
+ ALOGE("%s: Failed to start codec looper: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return NO_INIT;
+ }
+
+ // Create HEIC/HEVC codec.
+ mCodec = MediaCodec::CreateByType(mCodecLooper, desiredMime, true /*encoder*/);
+ if (mCodec == nullptr) {
+ ALOGE("%s: Failed to create codec for %s", __FUNCTION__, desiredMime);
+ return NO_INIT;
+ }
+
+ // Create Looper and handler for Codec callback.
+ mCodecCallbackHandler = new CodecCallbackHandler(this);
+ if (mCodecCallbackHandler == nullptr) {
+ ALOGE("%s: Failed to create codec callback handler", __FUNCTION__);
+ return NO_MEMORY;
+ }
+ mCallbackLooper = new ALooper;
+ mCallbackLooper->setName("Camera3-HeicComposite-MediaCodecCallbackLooper");
+ res = mCallbackLooper->start(
+ false, // runOnCallingThread
+ false, // canCallJava
+ PRIORITY_AUDIO);
+ if (res != OK) {
+ ALOGE("%s: Failed to start media callback looper: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ return NO_INIT;
+ }
+ mCallbackLooper->registerHandler(mCodecCallbackHandler);
+
+ mAsyncNotify = new AMessage(kWhatCallbackNotify, mCodecCallbackHandler);
+ res = mCodec->setCallback(mAsyncNotify);
+ if (res != OK) {
+ ALOGE("%s: Failed to set MediaCodec callback: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ // Create output format and configure the Codec.
+ sp<AMessage> outputFormat = new AMessage();
+ outputFormat->setString(KEY_MIME, desiredMime);
+ outputFormat->setInt32(KEY_BITRATE_MODE, BITRATE_MODE_CQ);
+ outputFormat->setInt32(KEY_QUALITY, kDefaultJpegQuality);
+ // Ask codec to skip timestamp check and encode all frames.
+ outputFormat->setInt64("max-pts-gap-to-encoder", kNoFrameDropMaxPtsGap);
+
+ int32_t gridWidth, gridHeight, gridRows, gridCols;
+ if (useGrid || mUseHeic) {
+ gridWidth = HeicEncoderInfoManager::kGridWidth;
+ gridHeight = HeicEncoderInfoManager::kGridHeight;
+ gridRows = (height + gridHeight - 1)/gridHeight;
+ gridCols = (width + gridWidth - 1)/gridWidth;
+
+ if (mUseHeic) {
+ outputFormat->setInt32(KEY_TILE_WIDTH, gridWidth);
+ outputFormat->setInt32(KEY_TILE_HEIGHT, gridHeight);
+ outputFormat->setInt32(KEY_GRID_COLUMNS, gridCols);
+ outputFormat->setInt32(KEY_GRID_ROWS, gridRows);
+ }
+
+ } else {
+ gridWidth = width;
+ gridHeight = height;
+ gridRows = 1;
+ gridCols = 1;
+ }
+
+ outputFormat->setInt32(KEY_WIDTH, !useGrid ? width : gridWidth);
+ outputFormat->setInt32(KEY_HEIGHT, !useGrid ? height : gridHeight);
+ outputFormat->setInt32(KEY_I_FRAME_INTERVAL, 0);
+ outputFormat->setInt32(KEY_COLOR_FORMAT,
+ useGrid ? COLOR_FormatYUV420Flexible : COLOR_FormatSurface);
+ outputFormat->setInt32(KEY_FRAME_RATE, gridRows * gridCols);
+ // This only serves as a hint to encoder when encoding is not real-time.
+ outputFormat->setInt32(KEY_OPERATING_RATE, useGrid ? kGridOpRate : kNoGridOpRate);
+
+ res = mCodec->configure(outputFormat, nullptr /*nativeWindow*/,
+ nullptr /*crypto*/, CONFIGURE_FLAG_ENCODE);
+ if (res != OK) {
+ ALOGE("%s: Failed to configure codec: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ mGridWidth = gridWidth;
+ mGridHeight = gridHeight;
+ mGridRows = gridRows;
+ mGridCols = gridCols;
+ mUseGrid = useGrid;
+ mOutputWidth = width;
+ mOutputHeight = height;
+ mAppSegmentMaxSize = calcAppSegmentMaxSize(cameraDevice->info());
+ mMaxHeicBufferSize = mOutputWidth * mOutputHeight * 3 / 2 + mAppSegmentMaxSize;
+
+ return OK;
+}
+
+void HeicCompositeStream::deinitCodec() {
+ ALOGV("%s", __FUNCTION__);
+ if (mCodec != nullptr) {
+ mCodec->stop();
+ mCodec->release();
+ mCodec.clear();
+ }
+
+ if (mCodecLooper != nullptr) {
+ mCodecLooper->stop();
+ mCodecLooper.clear();
+ }
+
+ if (mCallbackLooper != nullptr) {
+ mCallbackLooper->stop();
+ mCallbackLooper.clear();
+ }
+
+ mAsyncNotify.clear();
+ mFormat.clear();
+}
+
+// Return the size of the complete list of app segment, 0 indicates failure
+size_t HeicCompositeStream::findAppSegmentsSize(const uint8_t* appSegmentBuffer,
+ size_t maxSize, size_t *app1SegmentSize) {
+ if (appSegmentBuffer == nullptr || app1SegmentSize == nullptr) {
+ ALOGE("%s: Invalid input appSegmentBuffer %p, app1SegmentSize %p",
+ __FUNCTION__, appSegmentBuffer, app1SegmentSize);
+ return 0;
+ }
+
+ size_t expectedSize = 0;
+ // First check for EXIF transport header at the end of the buffer
+ const uint8_t *header = appSegmentBuffer + (maxSize - sizeof(struct CameraBlob));
+ const struct CameraBlob *blob = (const struct CameraBlob*)(header);
+ if (blob->blobId != CameraBlobId::JPEG_APP_SEGMENTS) {
+ ALOGE("%s: Invalid EXIF blobId %hu", __FUNCTION__, blob->blobId);
+ return 0;
+ }
+
+ expectedSize = blob->blobSize;
+ if (expectedSize == 0 || expectedSize > maxSize - sizeof(struct CameraBlob)) {
+ ALOGE("%s: Invalid blobSize %zu.", __FUNCTION__, expectedSize);
+ return 0;
+ }
+
+ uint32_t totalSize = 0;
+
+ // Verify APP1 marker (mandatory)
+ uint8_t app1Marker[] = {0xFF, 0xE1};
+ if (memcmp(appSegmentBuffer, app1Marker, sizeof(app1Marker))) {
+ ALOGE("%s: Invalid APP1 marker: %x, %x", __FUNCTION__,
+ appSegmentBuffer[0], appSegmentBuffer[1]);
+ return 0;
+ }
+ totalSize += sizeof(app1Marker);
+
+ uint16_t app1Size = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
+ appSegmentBuffer[totalSize+1];
+ totalSize += app1Size;
+
+ ALOGV("%s: Expected APP segments size %zu, APP1 segment size %u",
+ __FUNCTION__, expectedSize, app1Size);
+ while (totalSize < expectedSize) {
+ if (appSegmentBuffer[totalSize] != 0xFF ||
+ appSegmentBuffer[totalSize+1] <= 0xE1 ||
+ appSegmentBuffer[totalSize+1] > 0xEF) {
+ // Invalid APPn marker
+ ALOGE("%s: Invalid APPn marker: %x, %x", __FUNCTION__,
+ appSegmentBuffer[totalSize], appSegmentBuffer[totalSize+1]);
+ return 0;
+ }
+ totalSize += 2;
+
+ uint16_t appnSize = (static_cast<uint16_t>(appSegmentBuffer[totalSize]) << 8) +
+ appSegmentBuffer[totalSize+1];
+ totalSize += appnSize;
+ }
+
+ if (totalSize != expectedSize) {
+ ALOGE("%s: Invalid JPEG APP segments: totalSize %u vs expected size %zu",
+ __FUNCTION__, totalSize, expectedSize);
+ return 0;
+ }
+
+ *app1SegmentSize = app1Size + sizeof(app1Marker);
+ return expectedSize;
+}
+
+int64_t HeicCompositeStream::findTimestampInNsLocked(int64_t timeInUs) {
+ for (const auto& fn : mFrameNumberMap) {
+ if (timeInUs == ns2us(fn.second)) {
+ return fn.second;
+ }
+ }
+ for (const auto& inputFrame : mPendingInputFrames) {
+ if (timeInUs == ns2us(inputFrame.first)) {
+ return inputFrame.first;
+ }
+ }
+ return -1;
+}
+
+status_t HeicCompositeStream::copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
+ const CpuConsumer::LockedBuffer& yuvBuffer,
+ size_t top, size_t left, size_t width, size_t height) {
+ ATRACE_CALL();
+
+ // Get stride information for codecBuffer
+ sp<ABuffer> imageData;
+ if (!codecBuffer->meta()->findBuffer("image-data", &imageData)) {
+ ALOGE("%s: Codec input buffer is not for image data!", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ if (imageData->size() != sizeof(MediaImage2)) {
+ ALOGE("%s: Invalid codec input image size %zu, expected %zu",
+ __FUNCTION__, imageData->size(), sizeof(MediaImage2));
+ return BAD_VALUE;
+ }
+ MediaImage2* imageInfo = reinterpret_cast<MediaImage2*>(imageData->data());
+ if (imageInfo->mType != MediaImage2::MEDIA_IMAGE_TYPE_YUV ||
+ imageInfo->mBitDepth != 8 ||
+ imageInfo->mBitDepthAllocated != 8 ||
+ imageInfo->mNumPlanes != 3) {
+ ALOGE("%s: Invalid codec input image info: mType %d, mBitDepth %d, "
+ "mBitDepthAllocated %d, mNumPlanes %d!", __FUNCTION__,
+ imageInfo->mType, imageInfo->mBitDepth,
+ imageInfo->mBitDepthAllocated, imageInfo->mNumPlanes);
+ return BAD_VALUE;
+ }
+
+ ALOGV("%s: yuvBuffer chromaStep %d, chromaStride %d",
+ __FUNCTION__, yuvBuffer.chromaStep, yuvBuffer.chromaStride);
+ ALOGV("%s: U offset %u, V offset %u, U rowInc %d, V rowInc %d, U colInc %d, V colInc %d",
+ __FUNCTION__, imageInfo->mPlane[MediaImage2::U].mOffset,
+ imageInfo->mPlane[MediaImage2::V].mOffset,
+ imageInfo->mPlane[MediaImage2::U].mRowInc,
+ imageInfo->mPlane[MediaImage2::V].mRowInc,
+ imageInfo->mPlane[MediaImage2::U].mColInc,
+ imageInfo->mPlane[MediaImage2::V].mColInc);
+
+ // Y
+ for (auto row = top; row < top+height; row++) {
+ uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::Y].mOffset +
+ imageInfo->mPlane[MediaImage2::Y].mRowInc * (row - top);
+ memcpy(dst, yuvBuffer.data+row*yuvBuffer.stride+left, width);
+ }
+
+ // U is Cb, V is Cr
+ bool codecUPlaneFirst = imageInfo->mPlane[MediaImage2::V].mOffset >
+ imageInfo->mPlane[MediaImage2::U].mOffset;
+ uint32_t codecUvOffsetDiff = codecUPlaneFirst ?
+ imageInfo->mPlane[MediaImage2::V].mOffset - imageInfo->mPlane[MediaImage2::U].mOffset :
+ imageInfo->mPlane[MediaImage2::U].mOffset - imageInfo->mPlane[MediaImage2::V].mOffset;
+ bool isCodecUvSemiplannar = (codecUvOffsetDiff == 1) &&
+ (imageInfo->mPlane[MediaImage2::U].mRowInc ==
+ imageInfo->mPlane[MediaImage2::V].mRowInc) &&
+ (imageInfo->mPlane[MediaImage2::U].mColInc == 2) &&
+ (imageInfo->mPlane[MediaImage2::V].mColInc == 2);
+ bool isCodecUvPlannar =
+ ((codecUPlaneFirst && codecUvOffsetDiff >=
+ imageInfo->mPlane[MediaImage2::U].mRowInc * imageInfo->mHeight/2) ||
+ ((!codecUPlaneFirst && codecUvOffsetDiff >=
+ imageInfo->mPlane[MediaImage2::V].mRowInc * imageInfo->mHeight/2))) &&
+ imageInfo->mPlane[MediaImage2::U].mColInc == 1 &&
+ imageInfo->mPlane[MediaImage2::V].mColInc == 1;
+ bool cameraUPlaneFirst = yuvBuffer.dataCr > yuvBuffer.dataCb;
+
+ if (isCodecUvSemiplannar && yuvBuffer.chromaStep == 2 &&
+ (codecUPlaneFirst == cameraUPlaneFirst)) {
+ // UV semiplannar
+ // The chrome plane could be either Cb first, or Cr first. Take the
+ // smaller address.
+ uint8_t *src = std::min(yuvBuffer.dataCb, yuvBuffer.dataCr);
+ MediaImage2::PlaneIndex dstPlane = codecUvOffsetDiff > 0 ? MediaImage2::U : MediaImage2::V;
+ for (auto row = top/2; row < (top+height)/2; row++) {
+ uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[dstPlane].mOffset +
+ imageInfo->mPlane[dstPlane].mRowInc * (row - top/2);
+ memcpy(dst, src+row*yuvBuffer.chromaStride+left, width);
+ }
+ } else if (isCodecUvPlannar && yuvBuffer.chromaStep == 1) {
+ // U plane
+ for (auto row = top/2; row < (top+height)/2; row++) {
+ uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::U].mOffset +
+ imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2);
+ memcpy(dst, yuvBuffer.dataCb+row*yuvBuffer.chromaStride+left/2, width/2);
+ }
+
+ // V plane
+ for (auto row = top/2; row < (top+height)/2; row++) {
+ uint8_t *dst = codecBuffer->data() + imageInfo->mPlane[MediaImage2::V].mOffset +
+ imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2);
+ memcpy(dst, yuvBuffer.dataCr+row*yuvBuffer.chromaStride+left/2, width/2);
+ }
+ } else {
+ // Convert between semiplannar and plannar
+ uint8_t *dst = codecBuffer->data();
+ for (auto row = top/2; row < (top+height)/2; row++) {
+ for (auto col = left/2; col < (left+width)/2; col++) {
+ // U/Cb
+ int32_t dstIndex = imageInfo->mPlane[MediaImage2::U].mOffset +
+ imageInfo->mPlane[MediaImage2::U].mRowInc * (row - top/2) +
+ imageInfo->mPlane[MediaImage2::U].mColInc * (col - left/2);
+ int32_t srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
+ dst[dstIndex] = yuvBuffer.dataCb[srcIndex];
+
+ // V/Cr
+ dstIndex = imageInfo->mPlane[MediaImage2::V].mOffset +
+ imageInfo->mPlane[MediaImage2::V].mRowInc * (row - top/2) +
+ imageInfo->mPlane[MediaImage2::V].mColInc * (col - left/2);
+ srcIndex = row * yuvBuffer.chromaStride + yuvBuffer.chromaStep * col;
+ dst[dstIndex] = yuvBuffer.dataCr[srcIndex];
+ }
+ }
+ }
+ return OK;
+}
+
+size_t HeicCompositeStream::calcAppSegmentMaxSize(const CameraMetadata& info) {
+ camera_metadata_ro_entry_t entry = info.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
+ size_t maxAppsSegment = 1;
+ if (entry.count > 0) {
+ maxAppsSegment = entry.data.u8[0] < 1 ? 1 :
+ entry.data.u8[0] > 16 ? 16 : entry.data.u8[0];
+ }
+ return maxAppsSegment * (2 + 0xFFFF) + sizeof(struct CameraBlob);
+}
+
+bool HeicCompositeStream::threadLoop() {
+ int64_t currentTs = INT64_MAX;
+ bool newInputAvailable = false;
+
+ {
+ Mutex::Autolock l(mMutex);
+ if (mErrorState) {
+ // In case we landed in error state, return any pending buffers and
+ // halt all further processing.
+ compilePendingInputLocked();
+ releaseInputFramesLocked(currentTs);
+ return false;
+ }
+
+
+ while (!newInputAvailable) {
+ compilePendingInputLocked();
+ newInputAvailable = getNextReadyInputLocked(¤tTs);
+
+ if (!newInputAvailable) {
+ auto failingFrameNumber = getNextFailingInputLocked(¤tTs);
+ if (failingFrameNumber >= 0) {
+ // We cannot erase 'mPendingInputFrames[currentTs]' at this point because it is
+ // possible for two internal stream buffers to fail. In such scenario the
+ // composite stream should notify the client about a stream buffer error only
+ // once and this information is kept within 'errorNotified'.
+ // Any present failed input frames will be removed on a subsequent call to
+ // 'releaseInputFramesLocked()'.
+ releaseInputFrameLocked(&mPendingInputFrames[currentTs]);
+ currentTs = INT64_MAX;
+ }
+
+ auto ret = mInputReadyCondition.waitRelative(mMutex, kWaitDuration);
+ if (ret == TIMED_OUT) {
+ return true;
+ } else if (ret != OK) {
+ ALOGE("%s: Timed wait on condition failed: %s (%d)", __FUNCTION__,
+ strerror(-ret), ret);
+ return false;
+ }
+ }
+ }
+ }
+
+ auto res = processInputFrame(currentTs, mPendingInputFrames[currentTs]);
+ Mutex::Autolock l(mMutex);
+ if (res != OK) {
+ ALOGE("%s: Failed processing frame with timestamp: %" PRIu64 ": %s (%d)",
+ __FUNCTION__, currentTs, strerror(-res), res);
+ mPendingInputFrames[currentTs].error = true;
+ }
+
+ if (mPendingInputFrames[currentTs].error ||
+ (mPendingInputFrames[currentTs].appSegmentWritten &&
+ mPendingInputFrames[currentTs].pendingOutputTiles == 0)) {
+ releaseInputFramesLocked(currentTs);
+ }
+
+ return true;
+}
+
+bool HeicCompositeStream::onStreamBufferError(const CaptureResultExtras& resultExtras) {
+ bool res = false;
+ // Buffer errors concerning internal composite streams should not be directly visible to
+ // camera clients. They must only receive a single buffer error with the public composite
+ // stream id.
+ if ((resultExtras.errorStreamId == mAppSegmentStreamId) ||
+ (resultExtras.errorStreamId == mMainImageStreamId)) {
+ flagAnErrorFrameNumber(resultExtras.frameNumber);
+ res = true;
+ }
+
+ return res;
+}
+
+void HeicCompositeStream::CodecCallbackHandler::onMessageReceived(const sp<AMessage> &msg) {
+ sp<HeicCompositeStream> parent = mParent.promote();
+ if (parent == nullptr) return;
+
+ switch (msg->what()) {
+ case kWhatCallbackNotify: {
+ int32_t cbID;
+ if (!msg->findInt32("callbackID", &cbID)) {
+ ALOGE("kWhatCallbackNotify: callbackID is expected.");
+ break;
+ }
+
+ ALOGV("kWhatCallbackNotify: cbID = %d", cbID);
+
+ switch (cbID) {
+ case MediaCodec::CB_INPUT_AVAILABLE: {
+ int32_t index;
+ if (!msg->findInt32("index", &index)) {
+ ALOGE("CB_INPUT_AVAILABLE: index is expected.");
+ break;
+ }
+ parent->onHeicInputFrameAvailable(index);
+ break;
+ }
+
+ case MediaCodec::CB_OUTPUT_AVAILABLE: {
+ int32_t index;
+ size_t offset;
+ size_t size;
+ int64_t timeUs;
+ int32_t flags;
+
+ if (!msg->findInt32("index", &index)) {
+ ALOGE("CB_OUTPUT_AVAILABLE: index is expected.");
+ break;
+ }
+ if (!msg->findSize("offset", &offset)) {
+ ALOGE("CB_OUTPUT_AVAILABLE: offset is expected.");
+ break;
+ }
+ if (!msg->findSize("size", &size)) {
+ ALOGE("CB_OUTPUT_AVAILABLE: size is expected.");
+ break;
+ }
+ if (!msg->findInt64("timeUs", &timeUs)) {
+ ALOGE("CB_OUTPUT_AVAILABLE: timeUs is expected.");
+ break;
+ }
+ if (!msg->findInt32("flags", &flags)) {
+ ALOGE("CB_OUTPUT_AVAILABLE: flags is expected.");
+ break;
+ }
+
+ CodecOutputBufferInfo bufferInfo = {
+ index,
+ (int32_t)offset,
+ (int32_t)size,
+ timeUs,
+ (uint32_t)flags};
+
+ parent->onHeicOutputFrameAvailable(bufferInfo);
+ break;
+ }
+
+ case MediaCodec::CB_OUTPUT_FORMAT_CHANGED: {
+ sp<AMessage> format;
+ if (!msg->findMessage("format", &format)) {
+ ALOGE("CB_OUTPUT_FORMAT_CHANGED: format is expected.");
+ break;
+ }
+
+ parent->onHeicFormatChanged(format);
+ break;
+ }
+
+ case MediaCodec::CB_ERROR: {
+ status_t err;
+ int32_t actionCode;
+ AString detail;
+ if (!msg->findInt32("err", &err)) {
+ ALOGE("CB_ERROR: err is expected.");
+ break;
+ }
+ if (!msg->findInt32("action", &actionCode)) {
+ ALOGE("CB_ERROR: action is expected.");
+ break;
+ }
+ msg->findString("detail", &detail);
+ ALOGE("Codec reported error(0x%x), actionCode(%d), detail(%s)",
+ err, actionCode, detail.c_str());
+
+ parent->onHeicCodecError();
+ break;
+ }
+
+ default: {
+ ALOGE("kWhatCallbackNotify: callbackID(%d) is unexpected.", cbID);
+ break;
+ }
+ }
+ break;
+ }
+
+ default:
+ ALOGE("shouldn't be here");
+ break;
+ }
+}
+
+}; // namespace camera3
+}; // namespace android
diff --git a/services/camera/libcameraservice/api2/HeicCompositeStream.h b/services/camera/libcameraservice/api2/HeicCompositeStream.h
new file mode 100644
index 0000000..0a76256
--- /dev/null
+++ b/services/camera/libcameraservice/api2/HeicCompositeStream.h
@@ -0,0 +1,250 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
+#define ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
+
+#include <queue>
+
+#include <gui/IProducerListener.h>
+#include <gui/CpuConsumer.h>
+
+#include <media/hardware/VideoAPI.h>
+#include <media/MediaCodecBuffer.h>
+#include <media/stagefright/foundation/ALooper.h>
+#include <media/stagefright/MediaCodec.h>
+#include <media/stagefright/MediaMuxer.h>
+
+#include "CompositeStream.h"
+
+namespace android {
+namespace camera3 {
+
+class HeicCompositeStream : public CompositeStream, public Thread,
+ public CpuConsumer::FrameAvailableListener {
+public:
+ HeicCompositeStream(wp<CameraDeviceBase> device,
+ wp<hardware::camera2::ICameraDeviceCallbacks> cb);
+ ~HeicCompositeStream() override;
+
+ static bool isHeicCompositeStream(const sp<Surface> &surface);
+
+ status_t createInternalStreams(const std::vector<sp<Surface>>& consumers,
+ bool hasDeferredConsumer, uint32_t width, uint32_t height, int format,
+ camera3_stream_rotation_t rotation, int *id, const String8& physicalCameraId,
+ std::vector<int> *surfaceIds, int streamSetId, bool isShared) override;
+
+ status_t deleteInternalStreams() override;
+
+ status_t configureStream() override;
+
+ status_t insertGbp(SurfaceMap* /*out*/outSurfaceMap, Vector<int32_t>* /*out*/outputStreamIds,
+ int32_t* /*out*/currentStreamId) override;
+
+ void onShutter(const CaptureResultExtras& resultExtras, nsecs_t timestamp) override;
+
+ int getStreamId() override { return mMainImageStreamId; }
+
+ // Use onShutter to keep track of frame number <-> timestamp mapping.
+ void onBufferReleased(const BufferInfo& bufferInfo) override;
+ void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
+ const CameraMetadata& settings) override;
+
+ // CpuConsumer listener implementation
+ void onFrameAvailable(const BufferItem& item) override;
+
+ // Return stream information about the internal camera streams
+ static status_t getCompositeStreamInfo(const OutputStreamInfo &streamInfo,
+ const CameraMetadata& ch, std::vector<OutputStreamInfo>* compositeOutput /*out*/);
+
+ static bool isSizeSupportedByHeifEncoder(int32_t width, int32_t height,
+ bool* useHeic, bool* useGrid, int64_t* stall);
+ static bool isInMemoryTempFileSupported();
+protected:
+
+ bool threadLoop() override;
+ bool onStreamBufferError(const CaptureResultExtras& resultExtras) override;
+ void onResultError(const CaptureResultExtras& /*resultExtras*/) override {}
+
+private:
+ //
+ // HEIC/HEVC Codec related structures, utility functions, and callbacks
+ //
+ struct CodecOutputBufferInfo {
+ int32_t index;
+ int32_t offset;
+ int32_t size;
+ int64_t timeUs;
+ uint32_t flags;
+ };
+
+ struct CodecInputBufferInfo {
+ int32_t index;
+ int64_t timeUs;
+ size_t tileIndex;
+ };
+
+ class CodecCallbackHandler : public AHandler {
+ public:
+ explicit CodecCallbackHandler(wp<HeicCompositeStream> parent) {
+ mParent = parent;
+ }
+ virtual void onMessageReceived(const sp<AMessage> &msg);
+ private:
+ wp<HeicCompositeStream> mParent;
+ };
+
+ enum {
+ kWhatCallbackNotify,
+ };
+
+ bool mUseHeic;
+ sp<MediaCodec> mCodec;
+ sp<ALooper> mCodecLooper, mCallbackLooper;
+ sp<CodecCallbackHandler> mCodecCallbackHandler;
+ sp<AMessage> mAsyncNotify;
+ sp<AMessage> mFormat;
+ size_t mNumOutputTiles;
+
+ int32_t mOutputWidth, mOutputHeight;
+ size_t mMaxHeicBufferSize;
+ int32_t mGridWidth, mGridHeight;
+ size_t mGridRows, mGridCols;
+ bool mUseGrid; // Whether to use framework YUV frame tiling.
+
+ static const int64_t kNoFrameDropMaxPtsGap = -1000000;
+ static const int32_t kNoGridOpRate = 30;
+ static const int32_t kGridOpRate = 120;
+
+ void onHeicOutputFrameAvailable(const CodecOutputBufferInfo& bufferInfo);
+ void onHeicInputFrameAvailable(int32_t index); // Only called for YUV input mode.
+ void onHeicFormatChanged(sp<AMessage>& newFormat);
+ void onHeicCodecError();
+
+ status_t initializeCodec(uint32_t width, uint32_t height,
+ const sp<CameraDeviceBase>& cameraDevice);
+ void deinitCodec();
+
+ //
+ // Composite stream related structures, utility functions and callbacks.
+ //
+ struct InputFrame {
+ int32_t orientation;
+ int32_t quality;
+
+ CpuConsumer::LockedBuffer appSegmentBuffer;
+ std::vector<CodecOutputBufferInfo> codecOutputBuffers;
+
+ // Fields that are only applicable to HEVC tiling.
+ CpuConsumer::LockedBuffer yuvBuffer;
+ std::vector<CodecInputBufferInfo> codecInputBuffers;
+
+ bool error;
+ bool errorNotified;
+ int64_t frameNumber;
+
+ sp<MediaMuxer> muxer;
+ int fenceFd;
+ int fileFd;
+ ssize_t trackIndex;
+ ANativeWindowBuffer *anb;
+
+ bool appSegmentWritten;
+ size_t pendingOutputTiles;
+ size_t codecInputCounter;
+
+ InputFrame() : orientation(0), quality(kDefaultJpegQuality), error(false),
+ errorNotified(false), frameNumber(-1), fenceFd(-1), fileFd(-1),
+ trackIndex(-1), anb(nullptr), appSegmentWritten(false),
+ pendingOutputTiles(0), codecInputCounter(0) { }
+ };
+
+ void compilePendingInputLocked();
+ // Find first complete and valid frame with smallest timestamp
+ bool getNextReadyInputLocked(int64_t *currentTs /*out*/);
+ // Find next failing frame number with smallest timestamp and return respective frame number
+ int64_t getNextFailingInputLocked(int64_t *currentTs /*out*/);
+
+ status_t processInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
+ status_t processCodecInputFrame(InputFrame &inputFrame);
+ status_t startMuxerForInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
+ status_t processAppSegment(nsecs_t timestamp, InputFrame &inputFrame);
+ status_t processOneCodecOutputFrame(nsecs_t timestamp, InputFrame &inputFrame);
+ status_t processCompletedInputFrame(nsecs_t timestamp, InputFrame &inputFrame);
+
+ void releaseInputFrameLocked(InputFrame *inputFrame /*out*/);
+ void releaseInputFramesLocked(int64_t currentTs);
+
+ size_t findAppSegmentsSize(const uint8_t* appSegmentBuffer, size_t maxSize,
+ size_t* app1SegmentSize);
+ int64_t findTimestampInNsLocked(int64_t timeInUs);
+ status_t copyOneYuvTile(sp<MediaCodecBuffer>& codecBuffer,
+ const CpuConsumer::LockedBuffer& yuvBuffer,
+ size_t top, size_t left, size_t width, size_t height);
+ static size_t calcAppSegmentMaxSize(const CameraMetadata& info);
+
+ static const nsecs_t kWaitDuration = 10000000; // 10 ms
+ static const int32_t kDefaultJpegQuality = 99;
+ static const auto kJpegDataSpace = HAL_DATASPACE_V0_JFIF;
+ static const android_dataspace kAppSegmentDataSpace =
+ static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS);
+ static const android_dataspace kHeifDataSpace =
+ static_cast<android_dataspace>(HAL_DATASPACE_HEIF);
+
+ int mAppSegmentStreamId, mAppSegmentSurfaceId;
+ sp<CpuConsumer> mAppSegmentConsumer;
+ sp<Surface> mAppSegmentSurface;
+ bool mAppSegmentBufferAcquired;
+ size_t mAppSegmentMaxSize;
+
+ int mMainImageStreamId, mMainImageSurfaceId;
+ sp<Surface> mMainImageSurface;
+ sp<CpuConsumer> mMainImageConsumer; // Only applicable for HEVC codec.
+ bool mYuvBufferAcquired; // Only applicable to HEVC codec
+
+ sp<Surface> mOutputSurface;
+ sp<ProducerListener> mProducerListener;
+
+
+ // Map from frame number to JPEG setting of orientation+quality
+ std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByFrameNumber;
+ // Map from timestamp to JPEG setting of orientation+quality
+ std::map<int64_t, std::pair<int32_t, int32_t>> mSettingsByTimestamp;
+
+ // Keep all incoming APP segment Blob buffer pending further processing.
+ std::vector<int64_t> mInputAppSegmentBuffers;
+
+ // Keep all incoming HEIC blob buffer pending further processing.
+ std::vector<CodecOutputBufferInfo> mCodecOutputBuffers;
+ std::queue<int64_t> mCodecOutputBufferTimestamps;
+ size_t mOutputBufferCounter;
+
+ // Keep all incoming Yuv buffer pending tiling and encoding (for HEVC YUV tiling only)
+ std::vector<int64_t> mInputYuvBuffers;
+ // Keep all codec input buffers ready to be filled out (for HEVC YUV tiling only)
+ std::vector<int32_t> mCodecInputBuffers;
+
+ // Artificial strictly incremental YUV grid timestamp to make encoder happy.
+ int64_t mGridTimestampUs;
+
+ // In most common use case, entries are accessed in order.
+ std::map<int64_t, InputFrame> mPendingInputFrames;
+};
+
+}; // namespace camera3
+}; // namespace android
+
+#endif //ANDROID_SERVERS_CAMERA_CAMERA3_HEIC_COMPOSITE_STREAM_H
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
new file mode 100644
index 0000000..ed9be6e
--- /dev/null
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.cpp
@@ -0,0 +1,294 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "HeicEncoderInfoManager"
+//#define LOG_NDEBUG 0
+
+#include <cstdint>
+#include <regex>
+
+#include <cutils/properties.h>
+#include <log/log_main.h>
+#include <system/graphics.h>
+
+#include <media/stagefright/MediaCodecList.h>
+#include <media/stagefright/foundation/MediaDefs.h>
+#include <media/stagefright/foundation/ABuffer.h>
+
+#include "HeicEncoderInfoManager.h"
+
+namespace android {
+namespace camera3 {
+
+HeicEncoderInfoManager::HeicEncoderInfoManager() :
+ mIsInited(false),
+ mMinSizeHeic(0, 0),
+ mMaxSizeHeic(INT32_MAX, INT32_MAX),
+ mHasHEVC(false),
+ mHasHEIC(false),
+ mDisableGrid(false) {
+ if (initialize() == OK) {
+ mIsInited = true;
+ }
+}
+
+HeicEncoderInfoManager::~HeicEncoderInfoManager() {
+}
+
+bool HeicEncoderInfoManager::isSizeSupported(int32_t width, int32_t height, bool* useHeic,
+ bool* useGrid, int64_t* stall) const {
+ if (useHeic == nullptr || useGrid == nullptr) {
+ ALOGE("%s: invalid parameters: useHeic %p, useGrid %p",
+ __FUNCTION__, useHeic, useGrid);
+ return false;
+ }
+ if (!mIsInited) return false;
+
+ bool chooseHeic = false, enableGrid = true;
+ if (mHasHEIC && width >= mMinSizeHeic.first &&
+ height >= mMinSizeHeic.second && width <= mMaxSizeHeic.first &&
+ height <= mMaxSizeHeic.second) {
+ chooseHeic = true;
+ enableGrid = false;
+ } else if (mHasHEVC) {
+ bool fullSizeSupportedByHevc = (width >= mMinSizeHevc.first &&
+ height >= mMinSizeHevc.second &&
+ width <= mMaxSizeHevc.first &&
+ height <= mMaxSizeHevc.second);
+ if (fullSizeSupportedByHevc && (mDisableGrid ||
+ (width <= 1920 && height <= 1080))) {
+ enableGrid = false;
+ }
+ } else {
+ // No encoder available for the requested size.
+ return false;
+ }
+
+ if (stall != nullptr) {
+ // Find preferred encoder which advertise
+ // "measured-frame-rate-WIDTHxHEIGHT-range" key.
+ const FrameRateMaps& maps =
+ (chooseHeic && mHeicFrameRateMaps.size() > 0) ?
+ mHeicFrameRateMaps : mHevcFrameRateMaps;
+ const auto& closestSize = findClosestSize(maps, width, height);
+ if (closestSize == maps.end()) {
+ // The "measured-frame-rate-WIDTHxHEIGHT-range" key is optional.
+ // Hardcode to some default value (3.33ms * tile count) based on resolution.
+ *stall = 3333333LL * width * height / (kGridWidth * kGridHeight);
+ return true;
+ }
+
+ // Derive stall durations based on average fps of the closest size.
+ constexpr int64_t NSEC_PER_SEC = 1000000000LL;
+ int32_t avgFps = (closestSize->second.first + closestSize->second.second)/2;
+ float ratio = 1.0f * width * height /
+ (closestSize->first.first * closestSize->first.second);
+ *stall = ratio * NSEC_PER_SEC / avgFps;
+ }
+
+ *useHeic = chooseHeic;
+ *useGrid = enableGrid;
+ return true;
+}
+
+status_t HeicEncoderInfoManager::initialize() {
+ mDisableGrid = property_get_bool("camera.heic.disable_grid", false);
+ sp<IMediaCodecList> codecsList = MediaCodecList::getInstance();
+ if (codecsList == nullptr) {
+ // No media codec available.
+ return OK;
+ }
+
+ sp<AMessage> heicDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC);
+ sp<AMessage> hevcDetails = getCodecDetails(codecsList, MEDIA_MIMETYPE_VIDEO_HEVC);
+
+ if (hevcDetails == nullptr) {
+ if (heicDetails != nullptr) {
+ ALOGE("%s: Device must support HEVC codec if HEIC codec is available!",
+ __FUNCTION__);
+ return BAD_VALUE;
+ }
+ return OK;
+ }
+
+ // Check CQ mode for HEVC codec
+ {
+ AString bitrateModes;
+ auto hasItem = hevcDetails->findString("feature-bitrate-modes", &bitrateModes);
+ if (!hasItem) {
+ ALOGE("%s: Failed to query bitrate modes for HEVC codec", __FUNCTION__);
+ return BAD_VALUE;
+ }
+ ALOGV("%s: HEVC codec's feature-bitrate-modes value is %d, %s",
+ __FUNCTION__, hasItem, bitrateModes.c_str());
+ std::regex pattern("(^|,)CQ($|,)", std::regex_constants::icase);
+ if (!std::regex_search(bitrateModes.c_str(), pattern)) {
+ return OK;
+ }
+ }
+
+ // HEIC size range
+ if (heicDetails != nullptr) {
+ auto res = getCodecSizeRange(MEDIA_MIMETYPE_IMAGE_ANDROID_HEIC,
+ heicDetails, &mMinSizeHeic, &mMaxSizeHeic, &mHeicFrameRateMaps);
+ if (res != OK) {
+ ALOGE("%s: Failed to get HEIC codec size range: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return BAD_VALUE;
+ }
+ mHasHEIC = true;
+ }
+
+ // HEVC size range
+ {
+ auto res = getCodecSizeRange(MEDIA_MIMETYPE_VIDEO_HEVC,
+ hevcDetails, &mMinSizeHevc, &mMaxSizeHevc, &mHevcFrameRateMaps);
+ if (res != OK) {
+ ALOGE("%s: Failed to get HEVC codec size range: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return BAD_VALUE;
+ }
+
+ mHasHEVC = true;
+ }
+
+ return OK;
+}
+
+status_t HeicEncoderInfoManager::getFrameRateMaps(sp<AMessage> details, FrameRateMaps* maps) {
+ if (details == nullptr || maps == nullptr) {
+ ALOGE("%s: Invalid input: details: %p, maps: %p", __FUNCTION__, details.get(), maps);
+ return BAD_VALUE;
+ }
+
+ for (size_t i = 0; i < details->countEntries(); i++) {
+ AMessage::Type type;
+ const char* entryName = details->getEntryNameAt(i, &type);
+ if (type != AMessage::kTypeString) continue;
+ std::regex frameRateNamePattern("measured-frame-rate-([0-9]+)[*x]([0-9]+)-range",
+ std::regex_constants::icase);
+ std::cmatch sizeMatch;
+ if (std::regex_match(entryName, sizeMatch, frameRateNamePattern) &&
+ sizeMatch.size() == 3) {
+ AMessage::ItemData item = details->getEntryAt(i);
+ AString fpsRangeStr;
+ if (item.find(&fpsRangeStr)) {
+ ALOGV("%s: %s", entryName, fpsRangeStr.c_str());
+ std::regex frameRatePattern("([0-9]+)-([0-9]+)");
+ std::cmatch fpsMatch;
+ if (std::regex_match(fpsRangeStr.c_str(), fpsMatch, frameRatePattern) &&
+ fpsMatch.size() == 3) {
+ maps->emplace(
+ std::make_pair(stoi(sizeMatch[1]), stoi(sizeMatch[2])),
+ std::make_pair(stoi(fpsMatch[1]), stoi(fpsMatch[2])));
+ } else {
+ return BAD_VALUE;
+ }
+ }
+ }
+ }
+ return OK;
+}
+
+status_t HeicEncoderInfoManager::getCodecSizeRange(
+ const char* codecName,
+ sp<AMessage> details,
+ std::pair<int32_t, int32_t>* minSize,
+ std::pair<int32_t, int32_t>* maxSize,
+ FrameRateMaps* frameRateMaps) {
+ if (codecName == nullptr || minSize == nullptr || maxSize == nullptr ||
+ details == nullptr || frameRateMaps == nullptr) {
+ return BAD_VALUE;
+ }
+
+ AString sizeRange;
+ auto hasItem = details->findString("size-range", &sizeRange);
+ if (!hasItem) {
+ ALOGE("%s: Failed to query size range for codec %s", __FUNCTION__, codecName);
+ return BAD_VALUE;
+ }
+ ALOGV("%s: %s codec's size range is %s", __FUNCTION__, codecName, sizeRange.c_str());
+ std::regex pattern("([0-9]+)[*x]([0-9]+)-([0-9]+)[*x]([0-9]+)");
+ std::cmatch match;
+ if (std::regex_match(sizeRange.c_str(), match, pattern)) {
+ if (match.size() == 5) {
+ minSize->first = stoi(match[1]);
+ minSize->second = stoi(match[2]);
+ maxSize->first = stoi(match[3]);
+ maxSize->second = stoi(match[4]);
+ if (minSize->first > maxSize->first ||
+ minSize->second > maxSize->second) {
+ ALOGE("%s: Invalid %s code size range: %s",
+ __FUNCTION__, codecName, sizeRange.c_str());
+ return BAD_VALUE;
+ }
+ } else {
+ return BAD_VALUE;
+ }
+ }
+
+ auto res = getFrameRateMaps(details, frameRateMaps);
+ if (res != OK) {
+ return res;
+ }
+
+ return OK;
+}
+
+HeicEncoderInfoManager::FrameRateMaps::const_iterator HeicEncoderInfoManager::findClosestSize(
+ const FrameRateMaps& maps, int32_t width, int32_t height) const {
+ int32_t minDiff = INT32_MAX;
+ FrameRateMaps::const_iterator closestIter = maps.begin();
+ for (auto iter = maps.begin(); iter != maps.end(); iter++) {
+ // Use area difference between the sizes to approximate size
+ // difference.
+ int32_t diff = abs(iter->first.first * iter->first.second - width * height);
+ if (diff < minDiff) {
+ closestIter = iter;
+ minDiff = diff;
+ }
+ }
+ return closestIter;
+}
+
+sp<AMessage> HeicEncoderInfoManager::getCodecDetails(
+ sp<IMediaCodecList> codecsList, const char* name) {
+ ssize_t idx = codecsList->findCodecByType(name, true /*encoder*/);
+ if (idx < 0) {
+ return nullptr;
+ }
+
+ const sp<MediaCodecInfo> info = codecsList->getCodecInfo(idx);
+ if (info == nullptr) {
+ ALOGE("%s: Failed to get codec info for %s", __FUNCTION__, name);
+ return nullptr;
+ }
+ const sp<MediaCodecInfo::Capabilities> caps =
+ info->getCapabilitiesFor(name);
+ if (caps == nullptr) {
+ ALOGE("%s: Failed to get capabilities for codec %s", __FUNCTION__, name);
+ return nullptr;
+ }
+ const sp<AMessage> details = caps->getDetails();
+ if (details == nullptr) {
+ ALOGE("%s: Failed to get details for codec %s", __FUNCTION__, name);
+ return nullptr;
+ }
+
+ return details;
+}
+} //namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
new file mode 100644
index 0000000..fb0b914
--- /dev/null
+++ b/services/camera/libcameraservice/api2/HeicEncoderInfoManager.h
@@ -0,0 +1,77 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H
+#define ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H
+
+#include <unordered_map>
+#include <utility>
+#include <utils/Errors.h>
+#include <utils/StrongPointer.h>
+
+#include <media/IMediaCodecList.h>
+#include <media/stagefright/foundation/AMessage.h>
+
+namespace android {
+namespace camera3 {
+
+class HeicEncoderInfoManager {
+public:
+ static HeicEncoderInfoManager& getInstance() {
+ static HeicEncoderInfoManager instance;
+ return instance;
+ }
+
+ bool isSizeSupported(int32_t width, int32_t height,
+ bool* useHeic, bool* useGrid, int64_t* stall) const;
+
+ static const auto kGridWidth = 512;
+ static const auto kGridHeight = 512;
+private:
+ struct SizePairHash {
+ std::size_t operator () (const std::pair<int32_t,int32_t> &p) const {
+ return p.first * 31 + p.second;
+ }
+ };
+
+ typedef std::unordered_map<std::pair<int32_t, int32_t>,
+ std::pair<int32_t, int32_t>, SizePairHash> FrameRateMaps;
+
+ HeicEncoderInfoManager();
+ virtual ~HeicEncoderInfoManager();
+
+ status_t initialize();
+ status_t getFrameRateMaps(sp<AMessage> details, FrameRateMaps* maps);
+ status_t getCodecSizeRange(const char* codecName, sp<AMessage> details,
+ std::pair<int32_t, int32_t>* minSize, std::pair<int32_t, int32_t>* maxSize,
+ FrameRateMaps* frameRateMaps);
+ FrameRateMaps::const_iterator findClosestSize(const FrameRateMaps& maps,
+ int32_t width, int32_t height) const;
+ sp<AMessage> getCodecDetails(sp<IMediaCodecList> codecsList, const char* name);
+
+ bool mIsInited;
+ std::pair<int32_t, int32_t> mMinSizeHeic, mMaxSizeHeic;
+ std::pair<int32_t, int32_t> mMinSizeHevc, mMaxSizeHevc;
+ bool mHasHEVC, mHasHEIC;
+ FrameRateMaps mHeicFrameRateMaps, mHevcFrameRateMaps;
+ bool mDisableGrid;
+
+};
+
+} // namespace camera3
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_HEICENCODER_INFO_MANAGER_H
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.cpp b/services/camera/libcameraservice/common/CameraProviderManager.cpp
index 7c32e84..f35c66a 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.cpp
+++ b/services/camera/libcameraservice/common/CameraProviderManager.cpp
@@ -38,6 +38,8 @@
#include <hwbinder/IPCThreadState.h>
#include <utils/Trace.h>
+#include "api2/HeicCompositeStream.h"
+
namespace android {
using namespace ::android::hardware::camera;
@@ -499,6 +501,17 @@
}
}
+bool CameraProviderManager::ProviderInfo::DeviceInfo3::isPublicallyHiddenSecureCamera() {
+ camera_metadata_entry_t entryCap;
+ entryCap = mCameraCharacteristics.find(ANDROID_REQUEST_AVAILABLE_CAPABILITIES);
+ if (entryCap.count != 1) {
+ // Do NOT hide this camera device if the capabilities specify anything more
+ // than ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA.
+ return false;
+ }
+ return entryCap.data.u8[0] == ANDROID_REQUEST_AVAILABLE_CAPABILITIES_SECURE_IMAGE_DATA;
+}
+
void CameraProviderManager::ProviderInfo::DeviceInfo3::getSupportedSizes(
const CameraMetadata& ch, uint32_t tag, android_pixel_format_t format,
std::vector<std::tuple<size_t, size_t>> *sizes/*out*/) {
@@ -643,7 +656,7 @@
bool isDepthExclusivePresent = std::find(chTags.data.i32, chTags.data.i32 + chTags.count,
depthExclTag) != (chTags.data.i32 + chTags.count);
bool isDepthSizePresent = std::find(chTags.data.i32, chTags.data.i32 + chTags.count,
- depthExclTag) != (chTags.data.i32 + chTags.count);
+ depthSizesTag) != (chTags.data.i32 + chTags.count);
if (!(isDepthExclusivePresent && isDepthSizePresent)) {
// No depth support, nothing more to do.
return OK;
@@ -671,7 +684,6 @@
getSupportedDynamicDepthSizes(supportedBlobSizes, supportedDepthSizes,
&supportedDynamicDepthSizes, &internalDepthSizes);
if (supportedDynamicDepthSizes.empty()) {
- ALOGE("%s: No dynamic depth size matched!", __func__);
// Nothing more to do.
return OK;
}
@@ -864,6 +876,130 @@
return res;
}
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::fillHeicStreamCombinations(
+ std::vector<int32_t>* outputs,
+ std::vector<int64_t>* durations,
+ std::vector<int64_t>* stallDurations,
+ const camera_metadata_entry& halStreamConfigs,
+ const camera_metadata_entry& halStreamDurations) {
+ if (outputs == nullptr || durations == nullptr || stallDurations == nullptr) {
+ return BAD_VALUE;
+ }
+
+ static bool supportInMemoryTempFile =
+ camera3::HeicCompositeStream::isInMemoryTempFileSupported();
+ if (!supportInMemoryTempFile) {
+ ALOGI("%s: No HEIC support due to absence of in memory temp file support",
+ __FUNCTION__);
+ return OK;
+ }
+
+ for (size_t i = 0; i < halStreamConfigs.count; i += 4) {
+ int32_t format = halStreamConfigs.data.i32[i];
+ // Only IMPLEMENTATION_DEFINED and YUV_888 can be used to generate HEIC
+ // image.
+ if (format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED &&
+ format != HAL_PIXEL_FORMAT_YCBCR_420_888) {
+ continue;
+ }
+
+ bool sizeAvail = false;
+ for (size_t j = 0; j < outputs->size(); j+= 4) {
+ if ((*outputs)[j+1] == halStreamConfigs.data.i32[i+1] &&
+ (*outputs)[j+2] == halStreamConfigs.data.i32[i+2]) {
+ sizeAvail = true;
+ break;
+ }
+ }
+ if (sizeAvail) continue;
+
+ int64_t stall = 0;
+ bool useHeic, useGrid;
+ if (camera3::HeicCompositeStream::isSizeSupportedByHeifEncoder(
+ halStreamConfigs.data.i32[i+1], halStreamConfigs.data.i32[i+2],
+ &useHeic, &useGrid, &stall)) {
+ if (useGrid != (format == HAL_PIXEL_FORMAT_YCBCR_420_888)) {
+ continue;
+ }
+
+ // HEIC configuration
+ int32_t config[] = {HAL_PIXEL_FORMAT_BLOB, halStreamConfigs.data.i32[i+1],
+ halStreamConfigs.data.i32[i+2], 0 /*isInput*/};
+ outputs->insert(outputs->end(), config, config + 4);
+
+ // HEIC minFrameDuration
+ for (size_t j = 0; j < halStreamDurations.count; j += 4) {
+ if (halStreamDurations.data.i64[j] == format &&
+ halStreamDurations.data.i64[j+1] == halStreamConfigs.data.i32[i+1] &&
+ halStreamDurations.data.i64[j+2] == halStreamConfigs.data.i32[i+2]) {
+ int64_t duration[] = {HAL_PIXEL_FORMAT_BLOB, halStreamConfigs.data.i32[i+1],
+ halStreamConfigs.data.i32[i+2], halStreamDurations.data.i64[j+3]};
+ durations->insert(durations->end(), duration, duration+4);
+ break;
+ }
+ }
+
+ // HEIC stallDuration
+ int64_t stallDuration[] = {HAL_PIXEL_FORMAT_BLOB, halStreamConfigs.data.i32[i+1],
+ halStreamConfigs.data.i32[i+2], stall};
+ stallDurations->insert(stallDurations->end(), stallDuration, stallDuration+4);
+ }
+ }
+ return OK;
+}
+
+status_t CameraProviderManager::ProviderInfo::DeviceInfo3::deriveHeicTags() {
+ auto& c = mCameraCharacteristics;
+
+ camera_metadata_entry halHeicSupport = c.find(ANDROID_HEIC_INFO_SUPPORTED);
+ if (halHeicSupport.count > 1) {
+ ALOGE("%s: Invalid entry count %zu for ANDROID_HEIC_INFO_SUPPORTED",
+ __FUNCTION__, halHeicSupport.count);
+ return BAD_VALUE;
+ } else if (halHeicSupport.count == 0 ||
+ halHeicSupport.data.u8[0] == ANDROID_HEIC_INFO_SUPPORTED_FALSE) {
+ // Camera HAL doesn't support mandatory stream combinations for HEIC.
+ return OK;
+ }
+
+ camera_metadata_entry maxJpegAppsSegments =
+ c.find(ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT);
+ if (maxJpegAppsSegments.count != 1 || maxJpegAppsSegments.data.u8[0] == 0 ||
+ maxJpegAppsSegments.data.u8[0] > 16) {
+ ALOGE("%s: ANDROID_HEIC_INFO_MAX_JPEG_APP_SEGMENTS_COUNT must be within [1, 16]",
+ __FUNCTION__);
+ return BAD_VALUE;
+ }
+
+ // Populate HEIC output configurations and its related min frame duration
+ // and stall duration.
+ std::vector<int32_t> heicOutputs;
+ std::vector<int64_t> heicDurations;
+ std::vector<int64_t> heicStallDurations;
+
+ camera_metadata_entry halStreamConfigs =
+ c.find(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS);
+ camera_metadata_entry minFrameDurations =
+ c.find(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS);
+
+ status_t res = fillHeicStreamCombinations(&heicOutputs, &heicDurations, &heicStallDurations,
+ halStreamConfigs, minFrameDurations);
+ if (res != OK) {
+ ALOGE("%s: Failed to fill HEIC stream combinations: %s (%d)", __FUNCTION__,
+ strerror(-res), res);
+ return res;
+ }
+
+ c.update(ANDROID_HEIC_AVAILABLE_HEIC_STREAM_CONFIGURATIONS,
+ heicOutputs.data(), heicOutputs.size());
+ c.update(ANDROID_HEIC_AVAILABLE_HEIC_MIN_FRAME_DURATIONS,
+ heicDurations.data(), heicDurations.size());
+ c.update(ANDROID_HEIC_AVAILABLE_HEIC_STALL_DURATIONS,
+ heicStallDurations.data(), heicStallDurations.size());
+
+ return OK;
+}
+
bool CameraProviderManager::isLogicalCamera(const std::string& id,
std::vector<std::string>* physicalCameraIds) {
std::lock_guard<std::mutex> lock(mInterfaceMutex);
@@ -877,6 +1013,16 @@
return deviceInfo->mIsLogicalCamera;
}
+bool CameraProviderManager::isPublicallyHiddenSecureCamera(const std::string& id) {
+ std::lock_guard<std::mutex> lock(mInterfaceMutex);
+
+ auto deviceInfo = findDeviceInfoLocked(id);
+ if (deviceInfo == nullptr) {
+ return false;
+ }
+ return deviceInfo->mIsPublicallyHiddenSecureCamera;
+}
+
bool CameraProviderManager::isHiddenPhysicalCamera(const std::string& cameraId) {
for (auto& provider : mProviders) {
for (auto& deviceInfo : provider->mDevices) {
@@ -1704,18 +1850,26 @@
__FUNCTION__, id.c_str(), CameraProviderManager::statusToString(status), status);
return;
}
+
+ mIsPublicallyHiddenSecureCamera = isPublicallyHiddenSecureCamera();
+
status_t res = fixupMonochromeTags();
if (OK != res) {
ALOGE("%s: Unable to fix up monochrome tags based for older HAL version: %s (%d)",
__FUNCTION__, strerror(-res), res);
return;
}
- res = addDynamicDepthTags();
- if (OK != res) {
- ALOGE("%s: Failed appending dynamic depth tags: %s (%d)", __FUNCTION__, strerror(-res),
- res);
- return;
+ auto stat = addDynamicDepthTags();
+ if (OK != stat) {
+ ALOGE("%s: Failed appending dynamic depth tags: %s (%d)", __FUNCTION__, strerror(-stat),
+ stat);
}
+ res = deriveHeicTags();
+ if (OK != res) {
+ ALOGE("%s: Unable to derive HEIC tags based on camera and media capabilities: %s (%d)",
+ __FUNCTION__, strerror(-res), res);
+ }
+
camera_metadata_entry flashAvailable =
mCameraCharacteristics.find(ANDROID_FLASH_INFO_AVAILABLE);
if (flashAvailable.count == 1 &&
@@ -1726,6 +1880,7 @@
}
queryPhysicalCameraIds();
+
// Get physical camera characteristics if applicable
auto castResult = device::V3_5::ICameraDevice::castFrom(interface);
if (!castResult.isOk()) {
diff --git a/services/camera/libcameraservice/common/CameraProviderManager.h b/services/camera/libcameraservice/common/CameraProviderManager.h
index fbd7d2e..3173eda 100644
--- a/services/camera/libcameraservice/common/CameraProviderManager.h
+++ b/services/camera/libcameraservice/common/CameraProviderManager.h
@@ -264,6 +264,7 @@
*/
bool isLogicalCamera(const std::string& id, std::vector<std::string>* physicalCameraIds);
+ bool isPublicallyHiddenSecureCamera(const std::string& id);
bool isHiddenPhysicalCamera(const std::string& cameraId);
static const float kDepthARTolerance;
@@ -354,6 +355,7 @@
std::vector<std::string> mPhysicalIds;
hardware::CameraInfo mInfo;
sp<IBase> mSavedInterface;
+ bool mIsPublicallyHiddenSecureCamera = false;
const hardware::camera::common::V1_0::CameraResourceCost mResourceCost;
@@ -471,6 +473,7 @@
CameraMetadata mCameraCharacteristics;
std::unordered_map<std::string, CameraMetadata> mPhysicalCameraCharacteristics;
void queryPhysicalCameraIds();
+ bool isPublicallyHiddenSecureCamera();
status_t fixupMonochromeTags();
status_t addDynamicDepthTags();
static void getSupportedSizes(const CameraMetadata& ch, uint32_t tag,
@@ -491,6 +494,12 @@
std::vector<std::tuple<size_t, size_t>> *internalDepthSizes /*out*/);
status_t removeAvailableKeys(CameraMetadata& c, const std::vector<uint32_t>& keys,
uint32_t keyTag);
+ status_t fillHeicStreamCombinations(std::vector<int32_t>* outputs,
+ std::vector<int64_t>* durations,
+ std::vector<int64_t>* stallDurations,
+ const camera_metadata_entry& halStreamConfigs,
+ const camera_metadata_entry& halStreamDurations);
+ status_t deriveHeicTags();
};
private:
diff --git a/services/camera/libcameraservice/device3/Camera3Device.cpp b/services/camera/libcameraservice/device3/Camera3Device.cpp
index 82dfc0f..918dcf7 100644
--- a/services/camera/libcameraservice/device3/Camera3Device.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Device.cpp
@@ -1757,18 +1757,20 @@
if (format == HAL_PIXEL_FORMAT_BLOB) {
ssize_t blobBufferSize;
- if (dataSpace != HAL_DATASPACE_DEPTH) {
- blobBufferSize = getJpegBufferSize(width, height);
- if (blobBufferSize <= 0) {
- SET_ERR_L("Invalid jpeg buffer size %zd", blobBufferSize);
- return BAD_VALUE;
- }
- } else {
+ if (dataSpace == HAL_DATASPACE_DEPTH) {
blobBufferSize = getPointCloudBufferSize();
if (blobBufferSize <= 0) {
SET_ERR_L("Invalid point cloud buffer size %zd", blobBufferSize);
return BAD_VALUE;
}
+ } else if (dataSpace == static_cast<android_dataspace>(HAL_DATASPACE_JPEG_APP_SEGMENTS)) {
+ blobBufferSize = width * height;
+ } else {
+ blobBufferSize = getJpegBufferSize(width, height);
+ if (blobBufferSize <= 0) {
+ SET_ERR_L("Invalid jpeg buffer size %zd", blobBufferSize);
+ return BAD_VALUE;
+ }
}
newStream = new Camera3OutputStream(mNextStreamId, consumers[0],
width, height, blobBufferSize, format, dataSpace, rotation,
@@ -5473,8 +5475,22 @@
return TIMED_OUT;
}
}
- outputStream->fireBufferRequestForFrameNumber(
- captureRequest->mResultExtras.frameNumber);
+
+ {
+ sp<Camera3Device> parent = mParent.promote();
+ if (parent != nullptr) {
+ const String8& streamCameraId = outputStream->getPhysicalCameraId();
+ for (const auto& settings : captureRequest->mSettingsList) {
+ if ((streamCameraId.isEmpty() &&
+ parent->getId() == settings.cameraId.c_str()) ||
+ streamCameraId == settings.cameraId.c_str()) {
+ outputStream->fireBufferRequestForFrameNumber(
+ captureRequest->mResultExtras.frameNumber,
+ settings.metadata);
+ }
+ }
+ }
+ }
String8 physicalCameraId = outputStream->getPhysicalCameraId();
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.cpp b/services/camera/libcameraservice/device3/Camera3Stream.cpp
index b296513..d29e5c0 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.cpp
+++ b/services/camera/libcameraservice/device3/Camera3Stream.cpp
@@ -763,14 +763,15 @@
return getInputBufferProducerLocked(producer);
}
-void Camera3Stream::fireBufferRequestForFrameNumber(uint64_t frameNumber) {
+void Camera3Stream::fireBufferRequestForFrameNumber(uint64_t frameNumber,
+ const CameraMetadata& settings) {
ATRACE_CALL();
Mutex::Autolock l(mLock);
for (auto &it : mBufferListenerList) {
sp<Camera3StreamBufferListener> listener = it.promote();
if (listener.get() != nullptr) {
- listener->onBufferRequestForFrameNumber(frameNumber, getId());
+ listener->onBufferRequestForFrameNumber(frameNumber, getId(), settings);
}
}
}
diff --git a/services/camera/libcameraservice/device3/Camera3Stream.h b/services/camera/libcameraservice/device3/Camera3Stream.h
index 06deba9..5eb6a23 100644
--- a/services/camera/libcameraservice/device3/Camera3Stream.h
+++ b/services/camera/libcameraservice/device3/Camera3Stream.h
@@ -434,7 +434,8 @@
/**
* Notify buffer stream listeners about incoming request with particular frame number.
*/
- void fireBufferRequestForFrameNumber(uint64_t frameNumber) override;
+ void fireBufferRequestForFrameNumber(uint64_t frameNumber,
+ const CameraMetadata& settings) override;
protected:
const int mId;
diff --git a/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h b/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h
index 0e6104e..d0aee27 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamBufferListener.h
@@ -17,6 +17,7 @@
#ifndef ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H
#define ANDROID_SERVERS_CAMERA3_STREAMBUFFERLISTENER_H
+#include <camera/CameraMetadata.h>
#include <gui/Surface.h>
#include <utils/RefBase.h>
@@ -42,7 +43,8 @@
// Buffer was released by the HAL
virtual void onBufferReleased(const BufferInfo& bufferInfo) = 0;
// Notify about incoming buffer request frame number
- virtual void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId) = 0;
+ virtual void onBufferRequestForFrameNumber(uint64_t frameNumber, int streamId,
+ const CameraMetadata& settings) = 0;
};
}; //namespace camera3
diff --git a/services/camera/libcameraservice/device3/Camera3StreamInterface.h b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
index 7b80cbd..5cd11b7 100644
--- a/services/camera/libcameraservice/device3/Camera3StreamInterface.h
+++ b/services/camera/libcameraservice/device3/Camera3StreamInterface.h
@@ -18,6 +18,8 @@
#define ANDROID_SERVERS_CAMERA3_STREAM_INTERFACE_H
#include <utils/RefBase.h>
+
+#include <camera/CameraMetadata.h>
#include "Camera3StreamBufferListener.h"
#include "Camera3StreamBufferFreedListener.h"
@@ -346,7 +348,8 @@
/**
* Notify buffer stream listeners about incoming request with particular frame number.
*/
- virtual void fireBufferRequestForFrameNumber(uint64_t frameNumber) = 0;
+ virtual void fireBufferRequestForFrameNumber(uint64_t frameNumber,
+ const CameraMetadata& settings) = 0;
};
} // namespace camera3
diff --git a/services/camera/libcameraservice/hidl/HidlCameraService.cpp b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
index 48f1d37..74cfe42 100644
--- a/services/camera/libcameraservice/hidl/HidlCameraService.cpp
+++ b/services/camera/libcameraservice/hidl/HidlCameraService.cpp
@@ -182,7 +182,8 @@
}
}
std::vector<hardware::CameraStatus> cameraStatusAndIds{};
- binder::Status serviceRet = mAidlICameraService->addListener(csListener, &cameraStatusAndIds);
+ binder::Status serviceRet =
+ mAidlICameraService->addListenerHelper(csListener, &cameraStatusAndIds, true);
HStatus status = HStatus::NO_ERROR;
if (!serviceRet.isOk()) {
ALOGE("%s: Unable to add camera device status listener", __FUNCTION__);
diff --git a/services/camera/libcameraservice/utils/ExifUtils.cpp b/services/camera/libcameraservice/utils/ExifUtils.cpp
new file mode 100644
index 0000000..a4027cc
--- /dev/null
+++ b/services/camera/libcameraservice/utils/ExifUtils.cpp
@@ -0,0 +1,1046 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#define LOG_TAG "CameraServerExifUtils"
+#define ATRACE_TAG ATRACE_TAG_CAMERA
+//#define LOG_NDEBUG 0
+
+#include <cutils/log.h>
+
+#include <inttypes.h>
+#include <math.h>
+#include <stdint.h>
+#include <string>
+#include <vector>
+
+#include "ExifUtils.h"
+
+extern "C" {
+#include <libexif/exif-data.h>
+}
+
+namespace std {
+
+template <>
+struct default_delete<ExifEntry> {
+ inline void operator()(ExifEntry* entry) const { exif_entry_unref(entry); }
+};
+
+} // namespace std
+
+
+namespace android {
+namespace camera3 {
+
+
+class ExifUtilsImpl : public ExifUtils {
+public:
+ ExifUtilsImpl();
+
+ virtual ~ExifUtilsImpl();
+
+ // Initialize() can be called multiple times. The setting of Exif tags will be
+ // cleared.
+ virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize);
+
+ // set all known fields from a metadata structure
+ virtual bool setFromMetadata(const CameraMetadata& metadata,
+ const size_t imageWidth,
+ const size_t imageHeight);
+
+ // sets the len aperture.
+ // Returns false if memory allocation fails.
+ virtual bool setAperture(uint32_t numerator, uint32_t denominator);
+
+ // sets the value of brightness.
+ // Returns false if memory allocation fails.
+ virtual bool setBrightness(int32_t numerator, int32_t denominator);
+
+ // sets the color space.
+ // Returns false if memory allocation fails.
+ virtual bool setColorSpace(uint16_t color_space);
+
+ // sets the information to compressed data.
+ // Returns false if memory allocation fails.
+ virtual bool setComponentsConfiguration(const std::string& components_configuration);
+
+ // sets the compression scheme used for the image data.
+ // Returns false if memory allocation fails.
+ virtual bool setCompression(uint16_t compression);
+
+ // sets image contrast.
+ // Returns false if memory allocation fails.
+ virtual bool setContrast(uint16_t contrast);
+
+ // sets the date and time of image last modified. It takes local time. The
+ // name of the tag is DateTime in IFD0.
+ // Returns false if memory allocation fails.
+ virtual bool setDateTime(const struct tm& t);
+
+ // sets the image description.
+ // Returns false if memory allocation fails.
+ virtual bool setDescription(const std::string& description);
+
+ // sets the digital zoom ratio. If the numerator is 0, it means digital zoom
+ // was not used.
+ // Returns false if memory allocation fails.
+ virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator);
+
+ // sets the exposure bias.
+ // Returns false if memory allocation fails.
+ virtual bool setExposureBias(int32_t numerator, int32_t denominator);
+
+ // sets the exposure mode set when the image was shot.
+ // Returns false if memory allocation fails.
+ virtual bool setExposureMode(uint16_t exposure_mode);
+
+ // sets the program used by the camera to set exposure when the picture is
+ // taken.
+ // Returns false if memory allocation fails.
+ virtual bool setExposureProgram(uint16_t exposure_program);
+
+ // sets the exposure time, given in seconds.
+ // Returns false if memory allocation fails.
+ virtual bool setExposureTime(uint32_t numerator, uint32_t denominator);
+
+ // sets the status of flash.
+ // Returns false if memory allocation fails.
+ virtual bool setFlash(uint16_t flash);
+
+ // sets the F number.
+ // Returns false if memory allocation fails.
+ virtual bool setFNumber(uint32_t numerator, uint32_t denominator);
+
+ // sets the focal length of lens used to take the image in millimeters.
+ // Returns false if memory allocation fails.
+ virtual bool setFocalLength(uint32_t numerator, uint32_t denominator);
+
+ // sets the degree of overall image gain adjustment.
+ // Returns false if memory allocation fails.
+ virtual bool setGainControl(uint16_t gain_control);
+
+ // sets the altitude in meters.
+ // Returns false if memory allocation fails.
+ virtual bool setGpsAltitude(double altitude);
+
+ // sets the latitude with degrees minutes seconds format.
+ // Returns false if memory allocation fails.
+ virtual bool setGpsLatitude(double latitude);
+
+ // sets the longitude with degrees minutes seconds format.
+ // Returns false if memory allocation fails.
+ virtual bool setGpsLongitude(double longitude);
+
+ // sets GPS processing method.
+ // Returns false if memory allocation fails.
+ virtual bool setGpsProcessingMethod(const std::string& method);
+
+ // sets GPS date stamp and time stamp (atomic clock). It takes UTC time.
+ // Returns false if memory allocation fails.
+ virtual bool setGpsTimestamp(const struct tm& t);
+
+ // sets the length (number of rows) of main image.
+ // Returns false if memory allocation fails.
+ virtual bool setImageHeight(uint32_t length);
+
+ // sets the width (number of columes) of main image.
+ // Returns false if memory allocation fails.
+ virtual bool setImageWidth(uint32_t width);
+
+ // sets the ISO speed.
+ // Returns false if memory allocation fails.
+ virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings);
+
+ // sets the kind of light source.
+ // Returns false if memory allocation fails.
+ virtual bool setLightSource(uint16_t light_source);
+
+ // sets the smallest F number of the lens.
+ // Returns false if memory allocation fails.
+ virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator);
+
+ // sets the metering mode.
+ // Returns false if memory allocation fails.
+ virtual bool setMeteringMode(uint16_t metering_mode);
+
+ // sets image orientation.
+ // Returns false if memory allocation fails.
+ virtual bool setOrientation(uint16_t orientation);
+
+ // sets the unit for measuring XResolution and YResolution.
+ // Returns false if memory allocation fails.
+ virtual bool setResolutionUnit(uint16_t resolution_unit);
+
+ // sets image saturation.
+ // Returns false if memory allocation fails.
+ virtual bool setSaturation(uint16_t saturation);
+
+ // sets the type of scene that was shot.
+ // Returns false if memory allocation fails.
+ virtual bool setSceneCaptureType(uint16_t type);
+
+ // sets image sharpness.
+ // Returns false if memory allocation fails.
+ virtual bool setSharpness(uint16_t sharpness);
+
+ // sets the shutter speed.
+ // Returns false if memory allocation fails.
+ virtual bool setShutterSpeed(int32_t numerator, int32_t denominator);
+
+ // sets the distance to the subject, given in meters.
+ // Returns false if memory allocation fails.
+ virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator);
+
+ // sets the fractions of seconds for the <DateTime> tag.
+ // Returns false if memory allocation fails.
+ virtual bool setSubsecTime(const std::string& subsec_time);
+
+ // sets the white balance mode set when the image was shot.
+ // Returns false if memory allocation fails.
+ virtual bool setWhiteBalance(uint16_t white_balance);
+
+ // sets the number of pixels per resolution unit in the image width.
+ // Returns false if memory allocation fails.
+ virtual bool setXResolution(uint32_t numerator, uint32_t denominator);
+
+ // sets the position of chrominance components in relation to the luminance
+ // component.
+ // Returns false if memory allocation fails.
+ virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning);
+
+ // sets the number of pixels per resolution unit in the image length.
+ // Returns false if memory allocation fails.
+ virtual bool setYResolution(uint32_t numerator, uint32_t denominator);
+
+ // sets the manufacturer of camera.
+ // Returns false if memory allocation fails.
+ virtual bool setMake(const std::string& make);
+
+ // sets the model number of camera.
+ // Returns false if memory allocation fails.
+ virtual bool setModel(const std::string& model);
+
+ // Generates APP1 segment.
+ // Returns false if generating APP1 segment fails.
+ virtual bool generateApp1();
+
+ // Gets buffer of APP1 segment. This method must be called only after calling
+ // GenerateAPP1().
+ virtual const uint8_t* getApp1Buffer();
+
+ // Gets length of APP1 segment. This method must be called only after calling
+ // GenerateAPP1().
+ virtual unsigned int getApp1Length();
+
+ protected:
+ // sets the version of this standard supported.
+ // Returns false if memory allocation fails.
+ virtual bool setExifVersion(const std::string& exif_version);
+
+ // Resets the pointers and memories.
+ virtual void reset();
+
+ // Adds a variable length tag to |exif_data_|. It will remove the original one
+ // if the tag exists.
+ // Returns the entry of the tag. The reference count of returned ExifEntry is
+ // two.
+ virtual std::unique_ptr<ExifEntry> addVariableLengthEntry(ExifIfd ifd,
+ ExifTag tag, ExifFormat format, uint64_t components, unsigned int size);
+
+ // Adds a entry of |tag| in |exif_data_|. It won't remove the original one if
+ // the tag exists.
+ // Returns the entry of the tag. It adds one reference count to returned
+ // ExifEntry.
+ virtual std::unique_ptr<ExifEntry> addEntry(ExifIfd ifd, ExifTag tag);
+
+ // Helpe functions to add exif data with different types.
+ virtual bool setShort(ExifIfd ifd, ExifTag tag, uint16_t value, const std::string& msg);
+
+ virtual bool setLong(ExifIfd ifd, ExifTag tag, uint32_t value, const std::string& msg);
+
+ virtual bool setRational(ExifIfd ifd, ExifTag tag, uint32_t numerator,
+ uint32_t denominator, const std::string& msg);
+
+ virtual bool setSRational(ExifIfd ifd, ExifTag tag, int32_t numerator,
+ int32_t denominator, const std::string& msg);
+
+ virtual bool setString(ExifIfd ifd, ExifTag tag, ExifFormat format,
+ const std::string& buffer, const std::string& msg);
+
+ // Destroys the buffer of APP1 segment if exists.
+ virtual void destroyApp1();
+
+ // The Exif data (APP1). Owned by this class.
+ ExifData* exif_data_;
+ // The raw data of APP1 segment. It's allocated by ExifMem in |exif_data_| but
+ // owned by this class.
+ uint8_t* app1_buffer_;
+ // The length of |app1_buffer_|.
+ unsigned int app1_length_;
+
+};
+
+#define SET_SHORT(ifd, tag, value) \
+ do { \
+ if (setShort(ifd, tag, value, #tag) == false) \
+ return false; \
+ } while (0);
+
+#define SET_LONG(ifd, tag, value) \
+ do { \
+ if (setLong(ifd, tag, value, #tag) == false) \
+ return false; \
+ } while (0);
+
+#define SET_RATIONAL(ifd, tag, numerator, denominator) \
+ do { \
+ if (setRational(ifd, tag, numerator, denominator, #tag) == false) \
+ return false; \
+ } while (0);
+
+#define SET_SRATIONAL(ifd, tag, numerator, denominator) \
+ do { \
+ if (setSRational(ifd, tag, numerator, denominator, #tag) == false) \
+ return false; \
+ } while (0);
+
+#define SET_STRING(ifd, tag, format, buffer) \
+ do { \
+ if (setString(ifd, tag, format, buffer, #tag) == false) \
+ return false; \
+ } while (0);
+
+// This comes from the Exif Version 2.2 standard table 6.
+const char gExifAsciiPrefix[] = {0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0};
+
+static void setLatitudeOrLongitudeData(unsigned char* data, double num) {
+ // Take the integer part of |num|.
+ ExifLong degrees = static_cast<ExifLong>(num);
+ ExifLong minutes = static_cast<ExifLong>(60 * (num - degrees));
+ ExifLong microseconds =
+ static_cast<ExifLong>(3600000000u * (num - degrees - minutes / 60.0));
+ exif_set_rational(data, EXIF_BYTE_ORDER_INTEL, {degrees, 1});
+ exif_set_rational(data + sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL, {minutes, 1});
+ exif_set_rational(data + 2 * sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL,
+ {microseconds, 1000000});
+}
+
+ExifUtils *ExifUtils::create() {
+ return new ExifUtilsImpl();
+}
+
+ExifUtils::~ExifUtils() {
+}
+
+ExifUtilsImpl::ExifUtilsImpl()
+ : exif_data_(nullptr), app1_buffer_(nullptr), app1_length_(0) {}
+
+ExifUtilsImpl::~ExifUtilsImpl() {
+ reset();
+}
+
+
+bool ExifUtilsImpl::initialize(const unsigned char *app1Segment, size_t app1SegmentSize) {
+ reset();
+ exif_data_ = exif_data_new_from_data(app1Segment, app1SegmentSize);
+ if (exif_data_ == nullptr) {
+ ALOGE("%s: allocate memory for exif_data_ failed", __FUNCTION__);
+ return false;
+ }
+ // set the image options.
+ exif_data_set_option(exif_data_, EXIF_DATA_OPTION_FOLLOW_SPECIFICATION);
+ exif_data_set_data_type(exif_data_, EXIF_DATA_TYPE_COMPRESSED);
+ exif_data_set_byte_order(exif_data_, EXIF_BYTE_ORDER_INTEL);
+
+ // set exif version to 2.2.
+ if (!setExifVersion("0220")) {
+ return false;
+ }
+
+ return true;
+}
+
+bool ExifUtilsImpl::setAperture(uint32_t numerator, uint32_t denominator) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_APERTURE_VALUE, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setBrightness(int32_t numerator, int32_t denominator) {
+ SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_BRIGHTNESS_VALUE, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setColorSpace(uint16_t color_space) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_COLOR_SPACE, color_space);
+ return true;
+}
+
+bool ExifUtilsImpl::setComponentsConfiguration(
+ const std::string& components_configuration) {
+ SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_COMPONENTS_CONFIGURATION,
+ EXIF_FORMAT_UNDEFINED, components_configuration);
+ return true;
+}
+
+bool ExifUtilsImpl::setCompression(uint16_t compression) {
+ SET_SHORT(EXIF_IFD_0, EXIF_TAG_COMPRESSION, compression);
+ return true;
+}
+
+bool ExifUtilsImpl::setContrast(uint16_t contrast) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_CONTRAST, contrast);
+ return true;
+}
+
+bool ExifUtilsImpl::setDateTime(const struct tm& t) {
+ // The length is 20 bytes including NULL for termination in Exif standard.
+ char str[20];
+ int result = snprintf(str, sizeof(str), "%04i:%02i:%02i %02i:%02i:%02i",
+ t.tm_year + 1900, t.tm_mon + 1, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec);
+ if (result != sizeof(str) - 1) {
+ ALOGW("%s: Input time is invalid", __FUNCTION__);
+ return false;
+ }
+ std::string buffer(str);
+ SET_STRING(EXIF_IFD_0, EXIF_TAG_DATE_TIME, EXIF_FORMAT_ASCII, buffer);
+ SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_DATE_TIME_ORIGINAL, EXIF_FORMAT_ASCII, buffer);
+ SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_DATE_TIME_DIGITIZED, EXIF_FORMAT_ASCII, buffer);
+ return true;
+}
+
+bool ExifUtilsImpl::setDescription(const std::string& description) {
+ SET_STRING(EXIF_IFD_0, EXIF_TAG_IMAGE_DESCRIPTION, EXIF_FORMAT_ASCII, description);
+ return true;
+}
+
+bool ExifUtilsImpl::setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_DIGITAL_ZOOM_RATIO, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setExposureBias(int32_t numerator, int32_t denominator) {
+ SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_BIAS_VALUE, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setExposureMode(uint16_t exposure_mode) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_MODE, exposure_mode);
+ return true;
+}
+
+bool ExifUtilsImpl::setExposureProgram(uint16_t exposure_program) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_PROGRAM, exposure_program);
+ return true;
+}
+
+bool ExifUtilsImpl::setExposureTime(uint32_t numerator, uint32_t denominator) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_EXPOSURE_TIME, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setFlash(uint16_t flash) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_FLASH, flash);
+ return true;
+}
+
+bool ExifUtilsImpl::setFNumber(uint32_t numerator, uint32_t denominator) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FNUMBER, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setFocalLength(uint32_t numerator, uint32_t denominator) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_FOCAL_LENGTH, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setGainControl(uint16_t gain_control) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_GAIN_CONTROL, gain_control);
+ return true;
+}
+
+bool ExifUtilsImpl::setGpsAltitude(double altitude) {
+ ExifTag refTag = static_cast<ExifTag>(EXIF_TAG_GPS_ALTITUDE_REF);
+ std::unique_ptr<ExifEntry> refEntry =
+ addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_BYTE, 1, 1);
+ if (!refEntry) {
+ ALOGE("%s: Adding GPSAltitudeRef exif entry failed", __FUNCTION__);
+ return false;
+ }
+ if (altitude >= 0) {
+ *refEntry->data = 0;
+ } else {
+ *refEntry->data = 1;
+ altitude *= -1;
+ }
+
+ ExifTag tag = static_cast<ExifTag>(EXIF_TAG_GPS_ALTITUDE);
+ std::unique_ptr<ExifEntry> entry = addVariableLengthEntry(
+ EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 1, sizeof(ExifRational));
+ if (!entry) {
+ exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get());
+ ALOGE("%s: Adding GPSAltitude exif entry failed", __FUNCTION__);
+ return false;
+ }
+ exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL,
+ {static_cast<ExifLong>(altitude * 1000), 1000});
+
+ return true;
+}
+
+bool ExifUtilsImpl::setGpsLatitude(double latitude) {
+ const ExifTag refTag = static_cast<ExifTag>(EXIF_TAG_GPS_LATITUDE_REF);
+ std::unique_ptr<ExifEntry> refEntry =
+ addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_ASCII, 2, 2);
+ if (!refEntry) {
+ ALOGE("%s: Adding GPSLatitudeRef exif entry failed", __FUNCTION__);
+ return false;
+ }
+ if (latitude >= 0) {
+ memcpy(refEntry->data, "N", sizeof("N"));
+ } else {
+ memcpy(refEntry->data, "S", sizeof("S"));
+ latitude *= -1;
+ }
+
+ const ExifTag tag = static_cast<ExifTag>(EXIF_TAG_GPS_LATITUDE);
+ std::unique_ptr<ExifEntry> entry = addVariableLengthEntry(
+ EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 3, 3 * sizeof(ExifRational));
+ if (!entry) {
+ exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get());
+ ALOGE("%s: Adding GPSLatitude exif entry failed", __FUNCTION__);
+ return false;
+ }
+ setLatitudeOrLongitudeData(entry->data, latitude);
+
+ return true;
+}
+
+bool ExifUtilsImpl::setGpsLongitude(double longitude) {
+ ExifTag refTag = static_cast<ExifTag>(EXIF_TAG_GPS_LONGITUDE_REF);
+ std::unique_ptr<ExifEntry> refEntry =
+ addVariableLengthEntry(EXIF_IFD_GPS, refTag, EXIF_FORMAT_ASCII, 2, 2);
+ if (!refEntry) {
+ ALOGE("%s: Adding GPSLongitudeRef exif entry failed", __FUNCTION__);
+ return false;
+ }
+ if (longitude >= 0) {
+ memcpy(refEntry->data, "E", sizeof("E"));
+ } else {
+ memcpy(refEntry->data, "W", sizeof("W"));
+ longitude *= -1;
+ }
+
+ ExifTag tag = static_cast<ExifTag>(EXIF_TAG_GPS_LONGITUDE);
+ std::unique_ptr<ExifEntry> entry = addVariableLengthEntry(
+ EXIF_IFD_GPS, tag, EXIF_FORMAT_RATIONAL, 3, 3 * sizeof(ExifRational));
+ if (!entry) {
+ exif_content_remove_entry(exif_data_->ifd[EXIF_IFD_GPS], refEntry.get());
+ ALOGE("%s: Adding GPSLongitude exif entry failed", __FUNCTION__);
+ return false;
+ }
+ setLatitudeOrLongitudeData(entry->data, longitude);
+
+ return true;
+}
+
+bool ExifUtilsImpl::setGpsProcessingMethod(const std::string& method) {
+ std::string buffer =
+ std::string(gExifAsciiPrefix, sizeof(gExifAsciiPrefix)) + method;
+ SET_STRING(EXIF_IFD_GPS, static_cast<ExifTag>(EXIF_TAG_GPS_PROCESSING_METHOD),
+ EXIF_FORMAT_UNDEFINED, buffer);
+ return true;
+}
+
+bool ExifUtilsImpl::setGpsTimestamp(const struct tm& t) {
+ const ExifTag dateTag = static_cast<ExifTag>(EXIF_TAG_GPS_DATE_STAMP);
+ const size_t kGpsDateStampSize = 11;
+ std::unique_ptr<ExifEntry> entry = addVariableLengthEntry(EXIF_IFD_GPS,
+ dateTag, EXIF_FORMAT_ASCII, kGpsDateStampSize, kGpsDateStampSize);
+ if (!entry) {
+ ALOGE("%s: Adding GPSDateStamp exif entry failed", __FUNCTION__);
+ return false;
+ }
+ int result = snprintf(reinterpret_cast<char*>(entry->data), kGpsDateStampSize,
+ "%04i:%02i:%02i", t.tm_year + 1900, t.tm_mon + 1, t.tm_mday);
+ if (result != kGpsDateStampSize - 1) {
+ ALOGW("%s: Input time is invalid", __FUNCTION__);
+ return false;
+ }
+
+ const ExifTag timeTag = static_cast<ExifTag>(EXIF_TAG_GPS_TIME_STAMP);
+ entry = addVariableLengthEntry(EXIF_IFD_GPS, timeTag, EXIF_FORMAT_RATIONAL, 3,
+ 3 * sizeof(ExifRational));
+ if (!entry) {
+ ALOGE("%s: Adding GPSTimeStamp exif entry failed", __FUNCTION__);
+ return false;
+ }
+ exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL,
+ {static_cast<ExifLong>(t.tm_hour), 1});
+ exif_set_rational(entry->data + sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL,
+ {static_cast<ExifLong>(t.tm_min), 1});
+ exif_set_rational(entry->data + 2 * sizeof(ExifRational), EXIF_BYTE_ORDER_INTEL,
+ {static_cast<ExifLong>(t.tm_sec), 1});
+
+ return true;
+}
+
+bool ExifUtilsImpl::setImageHeight(uint32_t length) {
+ SET_LONG(EXIF_IFD_0, EXIF_TAG_IMAGE_LENGTH, length);
+ SET_LONG(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_Y_DIMENSION, length);
+ return true;
+}
+
+bool ExifUtilsImpl::setImageWidth(uint32_t width) {
+ SET_LONG(EXIF_IFD_0, EXIF_TAG_IMAGE_WIDTH, width);
+ SET_LONG(EXIF_IFD_EXIF, EXIF_TAG_PIXEL_X_DIMENSION, width);
+ return true;
+}
+
+bool ExifUtilsImpl::setIsoSpeedRating(uint16_t iso_speed_ratings) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_ISO_SPEED_RATINGS, iso_speed_ratings);
+ return true;
+}
+
+bool ExifUtilsImpl::setLightSource(uint16_t light_source) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_LIGHT_SOURCE, light_source);
+ return true;
+}
+
+bool ExifUtilsImpl::setMaxAperture(uint32_t numerator, uint32_t denominator) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_MAX_APERTURE_VALUE, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setMeteringMode(uint16_t metering_mode) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_METERING_MODE, metering_mode);
+ return true;
+}
+
+bool ExifUtilsImpl::setOrientation(uint16_t orientation) {
+ /*
+ * Orientation value:
+ * 1 2 3 4 5 6 7 8
+ *
+ * 888888 888888 88 88 8888888888 88 88 8888888888
+ * 88 88 88 88 88 88 88 88 88 88 88 88
+ * 8888 8888 8888 8888 88 8888888888 8888888888 88
+ * 88 88 88 88
+ * 88 88 888888 888888
+ */
+ int value = 1;
+ switch (orientation) {
+ case 90:
+ value = 6;
+ break;
+ case 180:
+ value = 3;
+ break;
+ case 270:
+ value = 8;
+ break;
+ default:
+ break;
+ }
+ SET_SHORT(EXIF_IFD_0, EXIF_TAG_ORIENTATION, value);
+ return true;
+}
+
+bool ExifUtilsImpl::setResolutionUnit(uint16_t resolution_unit) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_RESOLUTION_UNIT, resolution_unit);
+ return true;
+}
+
+bool ExifUtilsImpl::setSaturation(uint16_t saturation) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SATURATION, saturation);
+ return true;
+}
+
+bool ExifUtilsImpl::setSceneCaptureType(uint16_t type) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SCENE_CAPTURE_TYPE, type);
+ return true;
+}
+
+bool ExifUtilsImpl::setSharpness(uint16_t sharpness) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_SHARPNESS, sharpness);
+ return true;
+}
+
+bool ExifUtilsImpl::setShutterSpeed(int32_t numerator, int32_t denominator) {
+ SET_SRATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SHUTTER_SPEED_VALUE, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setSubjectDistance(uint32_t numerator, uint32_t denominator) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_SUBJECT_DISTANCE, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setSubsecTime(const std::string& subsec_time) {
+ SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME, EXIF_FORMAT_ASCII, subsec_time);
+ SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_ORIGINAL, EXIF_FORMAT_ASCII, subsec_time);
+ SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_SUB_SEC_TIME_DIGITIZED, EXIF_FORMAT_ASCII, subsec_time);
+ return true;
+}
+
+bool ExifUtilsImpl::setWhiteBalance(uint16_t white_balance) {
+ SET_SHORT(EXIF_IFD_EXIF, EXIF_TAG_WHITE_BALANCE, white_balance);
+ return true;
+}
+
+bool ExifUtilsImpl::setXResolution(uint32_t numerator, uint32_t denominator) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_X_RESOLUTION, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::setYCbCrPositioning(uint16_t ycbcr_positioning) {
+ SET_SHORT(EXIF_IFD_0, EXIF_TAG_YCBCR_POSITIONING, ycbcr_positioning);
+ return true;
+}
+
+bool ExifUtilsImpl::setYResolution(uint32_t numerator, uint32_t denominator) {
+ SET_RATIONAL(EXIF_IFD_EXIF, EXIF_TAG_Y_RESOLUTION, numerator, denominator);
+ return true;
+}
+
+bool ExifUtilsImpl::generateApp1() {
+ destroyApp1();
+ // Save the result into |app1_buffer_|.
+ exif_data_save_data(exif_data_, &app1_buffer_, &app1_length_);
+ if (!app1_length_) {
+ ALOGE("%s: Allocate memory for app1_buffer_ failed", __FUNCTION__);
+ return false;
+ }
+ /*
+ * The JPEG segment size is 16 bits in spec. The size of APP1 segment should
+ * be smaller than 65533 because there are two bytes for segment size field.
+ */
+ if (app1_length_ > 65533) {
+ destroyApp1();
+ ALOGE("%s: The size of APP1 segment is too large", __FUNCTION__);
+ return false;
+ }
+ return true;
+}
+
+const uint8_t* ExifUtilsImpl::getApp1Buffer() {
+ return app1_buffer_;
+}
+
+unsigned int ExifUtilsImpl::getApp1Length() {
+ return app1_length_;
+}
+
+bool ExifUtilsImpl::setExifVersion(const std::string& exif_version) {
+ SET_STRING(EXIF_IFD_EXIF, EXIF_TAG_EXIF_VERSION, EXIF_FORMAT_UNDEFINED, exif_version);
+ return true;
+}
+
+bool ExifUtilsImpl::setMake(const std::string& make) {
+ SET_STRING(EXIF_IFD_0, EXIF_TAG_MAKE, EXIF_FORMAT_ASCII, make);
+ return true;
+}
+
+bool ExifUtilsImpl::setModel(const std::string& model) {
+ SET_STRING(EXIF_IFD_0, EXIF_TAG_MODEL, EXIF_FORMAT_ASCII, model);
+ return true;
+}
+
+void ExifUtilsImpl::reset() {
+ destroyApp1();
+ if (exif_data_) {
+ /*
+ * Since we decided to ignore the original APP1, we are sure that there is
+ * no thumbnail allocated by libexif. |exif_data_->data| is actually
+ * allocated by JpegCompressor. sets |exif_data_->data| to nullptr to
+ * prevent exif_data_unref() destroy it incorrectly.
+ */
+ exif_data_->data = nullptr;
+ exif_data_->size = 0;
+ exif_data_unref(exif_data_);
+ exif_data_ = nullptr;
+ }
+}
+
+std::unique_ptr<ExifEntry> ExifUtilsImpl::addVariableLengthEntry(ExifIfd ifd,
+ ExifTag tag, ExifFormat format, uint64_t components, unsigned int size) {
+ // Remove old entry if exists.
+ exif_content_remove_entry(exif_data_->ifd[ifd],
+ exif_content_get_entry(exif_data_->ifd[ifd], tag));
+ ExifMem* mem = exif_mem_new_default();
+ if (!mem) {
+ ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__);
+ return nullptr;
+ }
+ std::unique_ptr<ExifEntry> entry(exif_entry_new_mem(mem));
+ if (!entry) {
+ ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__);
+ exif_mem_unref(mem);
+ return nullptr;
+ }
+ void* tmpBuffer = exif_mem_alloc(mem, size);
+ if (!tmpBuffer) {
+ ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__);
+ exif_mem_unref(mem);
+ return nullptr;
+ }
+
+ entry->data = static_cast<unsigned char*>(tmpBuffer);
+ entry->tag = tag;
+ entry->format = format;
+ entry->components = components;
+ entry->size = size;
+
+ exif_content_add_entry(exif_data_->ifd[ifd], entry.get());
+ exif_mem_unref(mem);
+
+ return entry;
+}
+
+std::unique_ptr<ExifEntry> ExifUtilsImpl::addEntry(ExifIfd ifd, ExifTag tag) {
+ std::unique_ptr<ExifEntry> entry(exif_content_get_entry(exif_data_->ifd[ifd], tag));
+ if (entry) {
+ // exif_content_get_entry() won't ref the entry, so we ref here.
+ exif_entry_ref(entry.get());
+ return entry;
+ }
+ entry.reset(exif_entry_new());
+ if (!entry) {
+ ALOGE("%s: Allocate memory for exif entry failed", __FUNCTION__);
+ return nullptr;
+ }
+ entry->tag = tag;
+ exif_content_add_entry(exif_data_->ifd[ifd], entry.get());
+ exif_entry_initialize(entry.get(), tag);
+ return entry;
+}
+
+bool ExifUtilsImpl::setShort(ExifIfd ifd, ExifTag tag, uint16_t value, const std::string& msg) {
+ std::unique_ptr<ExifEntry> entry = addEntry(ifd, tag);
+ if (!entry) {
+ ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str());
+ return false;
+ }
+ exif_set_short(entry->data, EXIF_BYTE_ORDER_INTEL, value);
+ return true;
+}
+
+bool ExifUtilsImpl::setLong(ExifIfd ifd, ExifTag tag, uint32_t value, const std::string& msg) {
+ std::unique_ptr<ExifEntry> entry = addEntry(ifd, tag);
+ if (!entry) {
+ ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str());
+ return false;
+ }
+ exif_set_long(entry->data, EXIF_BYTE_ORDER_INTEL, value);
+ return true;
+}
+
+bool ExifUtilsImpl::setRational(ExifIfd ifd, ExifTag tag, uint32_t numerator,
+ uint32_t denominator, const std::string& msg) {
+ std::unique_ptr<ExifEntry> entry = addEntry(ifd, tag);
+ if (!entry) {
+ ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str());
+ return false;
+ }
+ exif_set_rational(entry->data, EXIF_BYTE_ORDER_INTEL, {numerator, denominator});
+ return true;
+}
+
+bool ExifUtilsImpl::setSRational(ExifIfd ifd, ExifTag tag, int32_t numerator,
+ int32_t denominator, const std::string& msg) {
+ std::unique_ptr<ExifEntry> entry = addEntry(ifd, tag);
+ if (!entry) {
+ ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str());
+ return false;
+ }
+ exif_set_srational(entry->data, EXIF_BYTE_ORDER_INTEL, {numerator, denominator});
+ return true;
+}
+
+bool ExifUtilsImpl::setString(ExifIfd ifd, ExifTag tag, ExifFormat format,
+ const std::string& buffer, const std::string& msg) {
+ size_t entry_size = buffer.length();
+ // Since the exif format is undefined, NULL termination is not necessary.
+ if (format == EXIF_FORMAT_ASCII) {
+ entry_size++;
+ }
+ std::unique_ptr<ExifEntry> entry =
+ addVariableLengthEntry(ifd, tag, format, entry_size, entry_size);
+ if (!entry) {
+ ALOGE("%s: Adding '%s' entry failed", __FUNCTION__, msg.c_str());
+ return false;
+ }
+ memcpy(entry->data, buffer.c_str(), entry_size);
+ return true;
+}
+
+void ExifUtilsImpl::destroyApp1() {
+ /*
+ * Since there is no API to access ExifMem in ExifData->priv, we use free
+ * here, which is the default free function in libexif. See
+ * exif_data_save_data() for detail.
+ */
+ free(app1_buffer_);
+ app1_buffer_ = nullptr;
+ app1_length_ = 0;
+}
+
+bool ExifUtilsImpl::setFromMetadata(const CameraMetadata& metadata,
+ const size_t imageWidth, const size_t imageHeight) {
+ // How precise the float-to-rational conversion for EXIF tags would be.
+ constexpr int kRationalPrecision = 10000;
+ if (!setImageWidth(imageWidth) ||
+ !setImageHeight(imageHeight)) {
+ ALOGE("%s: setting image resolution failed.", __FUNCTION__);
+ return false;
+ }
+
+ struct timespec tp;
+ struct tm time_info;
+ bool time_available = clock_gettime(CLOCK_REALTIME, &tp) != -1;
+ localtime_r(&tp.tv_sec, &time_info);
+ if (!setDateTime(time_info)) {
+ ALOGE("%s: setting data time failed.", __FUNCTION__);
+ return false;
+ }
+
+ float focal_length;
+ camera_metadata_ro_entry entry = metadata.find(ANDROID_LENS_FOCAL_LENGTH);
+ if (entry.count) {
+ focal_length = entry.data.f[0];
+
+ if (!setFocalLength(
+ static_cast<uint32_t>(focal_length * kRationalPrecision), kRationalPrecision)) {
+ ALOGE("%s: setting focal length failed.", __FUNCTION__);
+ return false;
+ }
+ } else {
+ ALOGV("%s: Cannot find focal length in metadata.", __FUNCTION__);
+ }
+
+ if (metadata.exists(ANDROID_JPEG_GPS_COORDINATES)) {
+ entry = metadata.find(ANDROID_JPEG_GPS_COORDINATES);
+ if (entry.count < 3) {
+ ALOGE("%s: Gps coordinates in metadata is not complete.", __FUNCTION__);
+ return false;
+ }
+ if (!setGpsLatitude(entry.data.d[0])) {
+ ALOGE("%s: setting gps latitude failed.", __FUNCTION__);
+ return false;
+ }
+ if (!setGpsLongitude(entry.data.d[1])) {
+ ALOGE("%s: setting gps longitude failed.", __FUNCTION__);
+ return false;
+ }
+ if (!setGpsAltitude(entry.data.d[2])) {
+ ALOGE("%s: setting gps altitude failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
+ if (metadata.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
+ entry = metadata.find(ANDROID_JPEG_GPS_PROCESSING_METHOD);
+ std::string method_str(reinterpret_cast<const char*>(entry.data.u8));
+ if (!setGpsProcessingMethod(method_str)) {
+ ALOGE("%s: setting gps processing method failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
+ if (time_available && metadata.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
+ entry = metadata.find(ANDROID_JPEG_GPS_TIMESTAMP);
+ time_t timestamp = static_cast<time_t>(entry.data.i64[0]);
+ if (gmtime_r(×tamp, &time_info)) {
+ if (!setGpsTimestamp(time_info)) {
+ ALOGE("%s: setting gps timestamp failed.", __FUNCTION__);
+ return false;
+ }
+ } else {
+ ALOGE("%s: Time tranformation failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
+ if (metadata.exists(ANDROID_JPEG_ORIENTATION)) {
+ entry = metadata.find(ANDROID_JPEG_ORIENTATION);
+ if (!setOrientation(entry.data.i32[0])) {
+ ALOGE("%s: setting orientation failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
+ if (metadata.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
+ entry = metadata.find(ANDROID_SENSOR_EXPOSURE_TIME);
+ // int64_t of nanoseconds
+ if (!setExposureTime(entry.data.i64[0],1000000000u)) {
+ ALOGE("%s: setting exposure time failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
+ if (metadata.exists(ANDROID_LENS_APERTURE)) {
+ const int kAperturePrecision = 10000;
+ entry = metadata.find(ANDROID_LENS_APERTURE);
+ if (!setFNumber(entry.data.f[0] * kAperturePrecision, kAperturePrecision)) {
+ ALOGE("%s: setting F number failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
+ if (metadata.exists(ANDROID_FLASH_INFO_AVAILABLE)) {
+ entry = metadata.find(ANDROID_FLASH_INFO_AVAILABLE);
+ if (entry.data.u8[0] == ANDROID_FLASH_INFO_AVAILABLE_FALSE) {
+ const uint32_t kNoFlashFunction = 0x20;
+ if (!setFlash(kNoFlashFunction)) {
+ ALOGE("%s: setting flash failed.", __FUNCTION__);
+ return false;
+ }
+ } else {
+ ALOGE("%s: Unsupported flash info: %d",__FUNCTION__, entry.data.u8[0]);
+ return false;
+ }
+ }
+
+ if (metadata.exists(ANDROID_CONTROL_AWB_MODE)) {
+ entry = metadata.find(ANDROID_CONTROL_AWB_MODE);
+ if (entry.data.u8[0] == ANDROID_CONTROL_AWB_MODE_AUTO) {
+ const uint16_t kAutoWhiteBalance = 0;
+ if (!setWhiteBalance(kAutoWhiteBalance)) {
+ ALOGE("%s: setting white balance failed.", __FUNCTION__);
+ return false;
+ }
+ } else {
+ ALOGE("%s: Unsupported awb mode: %d", __FUNCTION__, entry.data.u8[0]);
+ return false;
+ }
+ }
+
+ if (time_available) {
+ char str[4];
+ if (snprintf(str, sizeof(str), "%03ld", tp.tv_nsec / 1000000) < 0) {
+ ALOGE("%s: Subsec is invalid: %ld", __FUNCTION__, tp.tv_nsec);
+ return false;
+ }
+ if (!setSubsecTime(std::string(str))) {
+ ALOGE("%s: setting subsec time failed.", __FUNCTION__);
+ return false;
+ }
+ }
+
+ return true;
+}
+
+} // namespace camera3
+} // namespace android
diff --git a/services/camera/libcameraservice/utils/ExifUtils.h b/services/camera/libcameraservice/utils/ExifUtils.h
new file mode 100644
index 0000000..8ccdd8f
--- /dev/null
+++ b/services/camera/libcameraservice/utils/ExifUtils.h
@@ -0,0 +1,245 @@
+/*
+ * Copyright (C) 2019 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef ANDROID_SERVERS_CAMERA_EXIF_UTILS_H
+#define ANDROID_SERVERS_CAMERA_EXIF_UTILS_H
+
+#include "CameraMetadata.h"
+
+namespace android {
+namespace camera3 {
+
+// This is based on the camera HIDL shim implementation, which was in turned
+// based on original ChromeOS ARC implementation of a V4L2 HAL
+
+// ExifUtils can override APP1 segment with tags which caller set. ExifUtils can
+// also add a thumbnail in the APP1 segment if thumbnail size is specified.
+// ExifUtils can be reused with different images by calling initialize().
+//
+// Example of using this class :
+// std::unique_ptr<ExifUtils> utils(ExifUtils::Create());
+// utils->initialize(const unsigned char* app1Segment, size_t app1SegmentSize);
+// ...
+// // Call ExifUtils functions to set Exif tags.
+// ...
+// utils->GenerateApp1();
+// unsigned int app1Length = utils->GetApp1Length();
+// uint8_t* app1Buffer = new uint8_t[app1Length];
+// memcpy(app1Buffer, utils->GetApp1Buffer(), app1Length);
+class ExifUtils {
+
+public:
+ virtual ~ExifUtils();
+
+ static ExifUtils* create();
+
+ // Initialize() can be called multiple times. The setting of Exif tags will be
+ // cleared.
+ virtual bool initialize(const unsigned char *app1Segment, size_t app1SegmentSize) = 0;
+
+ // Set all known fields from a metadata structure
+ virtual bool setFromMetadata(const CameraMetadata& metadata,
+ const size_t imageWidth, const size_t imageHeight) = 0;
+
+ // Sets the len aperture.
+ // Returns false if memory allocation fails.
+ virtual bool setAperture(uint32_t numerator, uint32_t denominator) = 0;
+
+ // Sets the value of brightness.
+ // Returns false if memory allocation fails.
+ virtual bool setBrightness(int32_t numerator, int32_t denominator) = 0;
+
+ // Sets the color space.
+ // Returns false if memory allocation fails.
+ virtual bool setColorSpace(uint16_t color_space) = 0;
+
+ // Sets the information to compressed data.
+ // Returns false if memory allocation fails.
+ virtual bool setComponentsConfiguration(const std::string& components_configuration) = 0;
+
+ // Sets the compression scheme used for the image data.
+ // Returns false if memory allocation fails.
+ virtual bool setCompression(uint16_t compression) = 0;
+
+ // Sets image contrast.
+ // Returns false if memory allocation fails.
+ virtual bool setContrast(uint16_t contrast) = 0;
+
+ // Sets the date and time of image last modified. It takes local time. The
+ // name of the tag is DateTime in IFD0.
+ // Returns false if memory allocation fails.
+ virtual bool setDateTime(const struct tm& t) = 0;
+
+ // Sets the image description.
+ // Returns false if memory allocation fails.
+ virtual bool setDescription(const std::string& description) = 0;
+
+ // Sets the digital zoom ratio. If the numerator is 0, it means digital zoom
+ // was not used.
+ // Returns false if memory allocation fails.
+ virtual bool setDigitalZoomRatio(uint32_t numerator, uint32_t denominator) = 0;
+
+ // Sets the exposure bias.
+ // Returns false if memory allocation fails.
+ virtual bool setExposureBias(int32_t numerator, int32_t denominator) = 0;
+
+ // Sets the exposure mode set when the image was shot.
+ // Returns false if memory allocation fails.
+ virtual bool setExposureMode(uint16_t exposure_mode) = 0;
+
+ // Sets the program used by the camera to set exposure when the picture is
+ // taken.
+ // Returns false if memory allocation fails.
+ virtual bool setExposureProgram(uint16_t exposure_program) = 0;
+
+ // Sets the exposure time, given in seconds.
+ // Returns false if memory allocation fails.
+ virtual bool setExposureTime(uint32_t numerator, uint32_t denominator) = 0;
+
+ // Sets the status of flash.
+ // Returns false if memory allocation fails.
+ virtual bool setFlash(uint16_t flash) = 0;
+
+ // Sets the F number.
+ // Returns false if memory allocation fails.
+ virtual bool setFNumber(uint32_t numerator, uint32_t denominator) = 0;
+
+ // Sets the focal length of lens used to take the image in millimeters.
+ // Returns false if memory allocation fails.
+ virtual bool setFocalLength(uint32_t numerator, uint32_t denominator) = 0;
+
+ // Sets the degree of overall image gain adjustment.
+ // Returns false if memory allocation fails.
+ virtual bool setGainControl(uint16_t gain_control) = 0;
+
+ // Sets the altitude in meters.
+ // Returns false if memory allocation fails.
+ virtual bool setGpsAltitude(double altitude) = 0;
+
+ // Sets the latitude with degrees minutes seconds format.
+ // Returns false if memory allocation fails.
+ virtual bool setGpsLatitude(double latitude) = 0;
+
+ // Sets the longitude with degrees minutes seconds format.
+ // Returns false if memory allocation fails.
+ virtual bool setGpsLongitude(double longitude) = 0;
+
+ // Sets GPS processing method.
+ // Returns false if memory allocation fails.
+ virtual bool setGpsProcessingMethod(const std::string& method) = 0;
+
+ // Sets GPS date stamp and time stamp (atomic clock). It takes UTC time.
+ // Returns false if memory allocation fails.
+ virtual bool setGpsTimestamp(const struct tm& t) = 0;
+
+ // Sets the height (number of rows) of main image.
+ // Returns false if memory allocation fails.
+ virtual bool setImageHeight(uint32_t length) = 0;
+
+ // Sets the width (number of columns) of main image.
+ // Returns false if memory allocation fails.
+ virtual bool setImageWidth(uint32_t width) = 0;
+
+ // Sets the ISO speed.
+ // Returns false if memory allocation fails.
+ virtual bool setIsoSpeedRating(uint16_t iso_speed_ratings) = 0;
+
+ // Sets the kind of light source.
+ // Returns false if memory allocation fails.
+ virtual bool setLightSource(uint16_t light_source) = 0;
+
+ // Sets the smallest F number of the lens.
+ // Returns false if memory allocation fails.
+ virtual bool setMaxAperture(uint32_t numerator, uint32_t denominator) = 0;
+
+ // Sets the metering mode.
+ // Returns false if memory allocation fails.
+ virtual bool setMeteringMode(uint16_t metering_mode) = 0;
+
+ // Sets image orientation.
+ // Returns false if memory allocation fails.
+ virtual bool setOrientation(uint16_t orientation) = 0;
+
+ // Sets the unit for measuring XResolution and YResolution.
+ // Returns false if memory allocation fails.
+ virtual bool setResolutionUnit(uint16_t resolution_unit) = 0;
+
+ // Sets image saturation.
+ // Returns false if memory allocation fails.
+ virtual bool setSaturation(uint16_t saturation) = 0;
+
+ // Sets the type of scene that was shot.
+ // Returns false if memory allocation fails.
+ virtual bool setSceneCaptureType(uint16_t type) = 0;
+
+ // Sets image sharpness.
+ // Returns false if memory allocation fails.
+ virtual bool setSharpness(uint16_t sharpness) = 0;
+
+ // Sets the shutter speed.
+ // Returns false if memory allocation fails.
+ virtual bool setShutterSpeed(int32_t numerator, int32_t denominator) = 0;
+
+ // Sets the distance to the subject, given in meters.
+ // Returns false if memory allocation fails.
+ virtual bool setSubjectDistance(uint32_t numerator, uint32_t denominator) = 0;
+
+ // Sets the fractions of seconds for the <DateTime> tag.
+ // Returns false if memory allocation fails.
+ virtual bool setSubsecTime(const std::string& subsec_time) = 0;
+
+ // Sets the white balance mode set when the image was shot.
+ // Returns false if memory allocation fails.
+ virtual bool setWhiteBalance(uint16_t white_balance) = 0;
+
+ // Sets the number of pixels per resolution unit in the image width.
+ // Returns false if memory allocation fails.
+ virtual bool setXResolution(uint32_t numerator, uint32_t denominator) = 0;
+
+ // Sets the position of chrominance components in relation to the luminance
+ // component.
+ // Returns false if memory allocation fails.
+ virtual bool setYCbCrPositioning(uint16_t ycbcr_positioning) = 0;
+
+ // Sets the number of pixels per resolution unit in the image length.
+ // Returns false if memory allocation fails.
+ virtual bool setYResolution(uint32_t numerator, uint32_t denominator) = 0;
+
+ // Sets the manufacturer of camera.
+ // Returns false if memory allocation fails.
+ virtual bool setMake(const std::string& make) = 0;
+
+ // Sets the model number of camera.
+ // Returns false if memory allocation fails.
+ virtual bool setModel(const std::string& model) = 0;
+
+ // Generates APP1 segment.
+ // Returns false if generating APP1 segment fails.
+ virtual bool generateApp1() = 0;
+
+ // Gets buffer of APP1 segment. This method must be called only after calling
+ // GenerateAPP1().
+ virtual const uint8_t* getApp1Buffer() = 0;
+
+ // Gets length of APP1 segment. This method must be called only after calling
+ // GenerateAPP1().
+ virtual unsigned int getApp1Length() = 0;
+};
+
+} // namespace camera3
+} // namespace android
+
+#endif // ANDROID_SERVERS_CAMERA_EXIF_UTILS_H
diff --git a/services/mediacodec/Android.mk b/services/mediacodec/Android.mk
index 6a71d7d..f78c671 100644
--- a/services/mediacodec/Android.mk
+++ b/services/mediacodec/Android.mk
@@ -108,6 +108,9 @@
libutils \
libziparchive \
+LOCAL_HEADER_LIBRARIES := \
+ libnativeloader-dummy-headers \
+
LOCAL_MODULE := mediaswcodec
LOCAL_INIT_RC := mediaswcodec.rc
LOCAL_SANITIZE := scudo
diff --git a/services/mediacodec/MediaCodecUpdateService.cpp b/services/mediacodec/MediaCodecUpdateService.cpp
index 0e6892d..50ccbce 100644
--- a/services/mediacodec/MediaCodecUpdateService.cpp
+++ b/services/mediacodec/MediaCodecUpdateService.cpp
@@ -20,28 +20,12 @@
#include <android/dlext.h>
#include <dlfcn.h>
#include <media/CodecServiceRegistrant.h>
+#include <nativeloader/dlext_namespaces.h>
#include <utils/Log.h>
#include <utils/String8.h>
#include "MediaCodecUpdateService.h"
-// Copied from GraphicsEnv.cpp
-// TODO(b/37049319) Get this from a header once one exists
-extern "C" {
- android_namespace_t* android_create_namespace(const char* name,
- const char* ld_library_path,
- const char* default_library_path,
- uint64_t type,
- const char* permitted_when_isolated_path,
- android_namespace_t* parent);
- bool android_link_namespaces(android_namespace_t* from,
- android_namespace_t* to,
- const char* shared_libs_sonames);
- enum {
- ANDROID_NAMESPACE_TYPE_ISOLATED = 1,
- };
-}
-
namespace android {
void loadFromApex(const char *libDirPath) {