Camera: HIDLized camera HALs, first set

* Common camera definitions (camera.common/1.0)
  * Basic status codes, shared types
* Provider 2.4 API (camera.provider/2.4), including vendor tag APIs
  * Enumerate and acquire camera device instances
  * Mostly equivalent to legacy camera module v2.4.
* Device 1.0 API (camera.device/1.0)
  * Mostly equivalent to legacy camera HAL v1.0.
* Device 3.2 API (camera.device/3.2)
  * Mostly equivalent to legacy camera HAL v3.2.
* Metadata 3.2 API (camera.metadata/3.2)
  * Definitions for valid metadata fields for device 3.2

Only the key initial interfaces are added; default implementations are
in a later CL. Other interfaces that will likely need to be added:

* Other provider minor versions
* Other device 3.x minor versions

Test: make -j32
Bug: 30985004
Bug: 32991603
Change-Id: I1c6a9a269bf45276055707bbc58cfc50d29fa919
diff --git a/camera/device/3.2/Android.bp b/camera/device/3.2/Android.bp
new file mode 100644
index 0000000..2791faf
--- /dev/null
+++ b/camera/device/3.2/Android.bp
@@ -0,0 +1,76 @@
+// This file is autogenerated by hidl-gen. Do not edit manually.
+
+genrule {
+    name: "android.hardware.camera.device@3.2_genc++",
+    tools: ["hidl-gen"],
+    cmd: "$(location hidl-gen) -o $(genDir) -Lc++ -randroid.hardware:hardware/interfaces android.hardware.camera.device@3.2",
+    srcs: [
+        "types.hal",
+        "ICameraDevice.hal",
+        "ICameraDeviceCallback.hal",
+        "ICameraDeviceSession.hal",
+    ],
+    out: [
+        "android/hardware/camera/device/3.2/types.cpp",
+        "android/hardware/camera/device/3.2/CameraDeviceAll.cpp",
+        "android/hardware/camera/device/3.2/CameraDeviceCallbackAll.cpp",
+        "android/hardware/camera/device/3.2/CameraDeviceSessionAll.cpp",
+    ],
+}
+
+genrule {
+    name: "android.hardware.camera.device@3.2_genc++_headers",
+    tools: ["hidl-gen"],
+    cmd: "$(location hidl-gen) -o $(genDir) -Lc++ -randroid.hardware:hardware/interfaces android.hardware.camera.device@3.2",
+    srcs: [
+        "types.hal",
+        "ICameraDevice.hal",
+        "ICameraDeviceCallback.hal",
+        "ICameraDeviceSession.hal",
+    ],
+    out: [
+        "android/hardware/camera/device/3.2/types.h",
+        "android/hardware/camera/device/3.2/ICameraDevice.h",
+        "android/hardware/camera/device/3.2/IHwCameraDevice.h",
+        "android/hardware/camera/device/3.2/BnCameraDevice.h",
+        "android/hardware/camera/device/3.2/BpCameraDevice.h",
+        "android/hardware/camera/device/3.2/BsCameraDevice.h",
+        "android/hardware/camera/device/3.2/ICameraDeviceCallback.h",
+        "android/hardware/camera/device/3.2/IHwCameraDeviceCallback.h",
+        "android/hardware/camera/device/3.2/BnCameraDeviceCallback.h",
+        "android/hardware/camera/device/3.2/BpCameraDeviceCallback.h",
+        "android/hardware/camera/device/3.2/BsCameraDeviceCallback.h",
+        "android/hardware/camera/device/3.2/ICameraDeviceSession.h",
+        "android/hardware/camera/device/3.2/IHwCameraDeviceSession.h",
+        "android/hardware/camera/device/3.2/BnCameraDeviceSession.h",
+        "android/hardware/camera/device/3.2/BpCameraDeviceSession.h",
+        "android/hardware/camera/device/3.2/BsCameraDeviceSession.h",
+    ],
+}
+
+cc_library_shared {
+    name: "android.hardware.camera.device@3.2",
+    generated_sources: ["android.hardware.camera.device@3.2_genc++"],
+    generated_headers: ["android.hardware.camera.device@3.2_genc++_headers"],
+    export_generated_headers: ["android.hardware.camera.device@3.2_genc++_headers"],
+    shared_libs: [
+        "libhidlbase",
+        "libhidltransport",
+        "libhwbinder",
+        "liblog",
+        "libutils",
+        "libcutils",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.graphics.allocator@2.0",
+        "android.hardware.graphics.common@1.0",
+    ],
+    export_shared_lib_headers: [
+        "libhidlbase",
+        "libhidltransport",
+        "libhwbinder",
+        "libutils",
+        "android.hardware.camera.common@1.0",
+        "android.hardware.graphics.allocator@2.0",
+        "android.hardware.graphics.common@1.0",
+    ],
+}
diff --git a/camera/device/3.2/ICameraDevice.hal b/camera/device/3.2/ICameraDevice.hal
new file mode 100644
index 0000000..6e66bf3
--- /dev/null
+++ b/camera/device/3.2/ICameraDevice.hal
@@ -0,0 +1,199 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera.device@3.2;
+
+import android.hardware.camera.common@1.0::types;
+import ICameraDeviceSession;
+import ICameraDeviceCallback;
+
+/**
+ * Camera device HAL, first modern version
+ *
+ * Supports the android.hardware.Camera API, and the android.hardware.camera2
+ * API at LIMITED or better hardware level.
+ *
+ */
+interface ICameraDevice {
+
+    /**
+     * Get camera device resource cost information.
+     *
+     * @return status Status code for the operation, one of:
+     *     OK:
+     *         On success
+     *     INTERNAL_ERROR:
+     *         An unexpected internal camera HAL error occurred, and the
+     *         resource cost is not available.
+     *     CAMERA_DISCONNECTED:
+     *         An external camera device has been disconnected, and is no longer
+     *         available. This camera device interface is now stale, and a new
+     *         instance must be acquired if the device is reconnected. All
+     *         subsequent calls on this interface must return
+     *         CAMERA_DISCONNECTED.
+     * @return resourceCost
+     *     The resources required to open this camera device, or unspecified
+     *     values if status is not OK.
+     */
+    getResourceCost() generates (Status status, CameraResourceCost resourceCost);
+
+    /**
+     * getCameraCharacteristics:
+     *
+     * Return the static camera information for this camera device. This
+     * information may not change between consecutive calls.
+     *
+     * When an external camera is disconnected, its camera id becomes
+     * invalid. Calling this method with this invalid camera id must result in
+     * ILLEGAL_ARGUMENT; this may happen even before the device status callback
+     * is invoked by the HAL.
+     *
+     * @return status Status code for the operation, one of:
+     *     OK:
+     *         On a successful open of the camera device.
+     *     INTERNAL_ERROR:
+     *         The camera device cannot be opened due to an internal
+     *         error.
+     *     CAMERA_DISCONNECTED:
+     *         An external camera device has been disconnected, and is no longer
+     *         available. This camera device interface is now stale, and a new
+     *         instance must be acquired if the device is reconnected. All
+     *         subsequent calls on this interface must return
+     *         CAMERA_DISCONNECTED.
+     *
+     * @return cameraCharacteristics
+     *     The static metadata for this camera device, or an empty metadata
+     *     structure if status is not OK.
+     *
+     */
+    getCameraCharacteristics() generates
+            (Status status, CameraMetadata cameraCharacteristics);
+
+    /**
+     * setTorchMode:
+     *
+     * Turn on or off the torch mode of the flash unit associated with this
+     * camera device. If the operation is successful, HAL must notify the
+     * framework torch state by invoking
+     * ICameraProviderCallback::torchModeStatusChange() with the new state.
+     *
+     * An active camera session has a higher priority accessing the flash
+     * unit. When there are any resource conflicts, such as when open() is
+     * called to fully activate a camera device, the provider must notify the
+     * framework through ICameraProviderCallback::torchModeStatusChange() that
+     * the torch mode has been turned off and the torch mode state has become
+     * TORCH_MODE_STATUS_NOT_AVAILABLE. When resources to turn on torch mode
+     * become available again, the provider must notify the framework through
+     * ICameraProviderCallback::torchModeStatusChange() that the torch mode
+     * state has become TORCH_MODE_STATUS_AVAILABLE_OFF for set_torch_mode() to
+     * be called.
+     *
+     * When the client calls setTorchMode() to turn on the torch mode of a flash
+     * unit, if the HAL cannot keep multiple torch modes on simultaneously, the
+     * HAL must turn off the torch mode(s) that were turned on by previous
+     * setTorchMode() calls and notify the framework that the torch mode state
+     * of those flash unit(s) has become TORCH_MODE_STATUS_AVAILABLE_OFF.
+     *
+     * @param torchMode The new mode to set the device flash unit to.
+     *
+     * @return status Status code for the operation, one of:
+     *     OK:
+     *         On a successful change to the torch state
+     *     INTERNAL_ERROR:
+     *         The flash unit cannot be operated due to an unexpected internal
+     *         error.
+     *     ILLEGAL_ARGUMENT:
+     *         The camera ID is unknown.
+     *     CAMERA_IN_USE:
+     *         This camera device has been opened, so the torch cannot be
+     *         controlled until it is closed.
+     *     MAX_CAMERAS_IN_USE:
+     *         Due to other camera devices being open, or due to other
+     *         resource constraints, the torch cannot be controlled currently.
+     *     METHOD_NOT_SUPPORTED:
+     *         This provider does not support direct operation of flashlight
+     *         torch mode. The framework must open the camera device and turn
+     *         the torch on through the device interface.
+     *     OPERATION_NOT_SUPPORTED:
+     *         This camera device does not have a flash unit. This can
+     *         be returned if and only if android.flash.info.available is
+     *         false.
+     *     CAMERA_DISCONNECTED:
+     *         An external camera device has been disconnected, and is no longer
+     *         available. This camera device interface is now stale, and a new
+     *         instance must be acquired if the device is reconnected. All
+     *         subsequent calls on this interface must return
+     *         CAMERA_DISCONNECTED.
+     *
+     */
+    setTorchMode(TorchMode mode) generates (Status status);
+
+    /**
+     * open:
+     *
+     * Power on and initialize this camera device for active use, returning a
+     * session handle for active operations.
+     *
+     * @param callback Interface to invoke by the HAL for device asynchronous
+     *     events.
+     * @return status Status code for the operation, one of:
+     *     OK:
+     *         On a successful open of the camera device.
+     *     INTERNAL_ERROR:
+     *         The camera device cannot be opened due to an internal
+     *         error.
+     *     ILLEGAL_ARGUMENT:
+     *         The callbacks handle is invalid (for example, it is null).
+     *     CAMERA_IN_USE:
+     *         This camera device is already open.
+     *     MAX_CAMERAS_IN_USE:
+     *         The maximal number of camera devices that can be
+     *         opened concurrently were opened already.
+     *     CAMERA_DISCONNECTED:
+     *         This external camera device has been disconnected, and is no
+     *         longer available. This interface is now stale, and a new instance
+     *         must be acquired if the device is reconnected. All subsequent
+     *         calls on this interface must return CAMERA_DISCONNECTED.
+     * @return cameraDevice The interface to the newly-opened camera session,
+     *     or null if status is not OK.
+     */
+    open(ICameraDeviceCallback callback) generates
+            (Status status, ICameraDeviceSession session);
+
+    /**
+     * dumpState:
+     *
+     * Print out debugging state for the camera device. This may be called by
+     * the framework when the camera service is asked for a debug dump, which
+     * happens when using the dumpsys tool, or when capturing a bugreport.
+     *
+     * The passed-in file descriptor can be used to write debugging text using
+     * dprintf() or write(). The text must be in ASCII encoding only.
+     *
+     * In case this camera device has been disconnected, the dump must not fail,
+     * but may simply print out 'Device disconnected' or equivalent.
+     *
+     * Performance requirements:
+     *
+     * This must be a non-blocking call. The HAL should return from this call
+     * in 1ms, must return from this call in 10ms. This call must avoid
+     * deadlocks, as it may be called at any point during camera operation.
+     * Any synchronization primitives used (such as mutex locks or semaphores)
+     * must be acquired with a timeout.
+     */
+    dumpState(handle fd);
+
+};
diff --git a/camera/device/3.2/ICameraDeviceCallback.hal b/camera/device/3.2/ICameraDeviceCallback.hal
new file mode 100644
index 0000000..753d085
--- /dev/null
+++ b/camera/device/3.2/ICameraDeviceCallback.hal
@@ -0,0 +1,126 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera.device@3.2;
+
+import android.hardware.camera.common@1.0::types;
+
+/**
+ *
+ * Callback methods for the HAL to call into the framework.
+ *
+ * These methods are used to return metadata and image buffers for a completed
+ * or failed captures, and to notify the framework of asynchronous events such
+ * as errors.
+ *
+ * The framework must not call back into the HAL from within these callbacks,
+ * and these calls must not block for extended periods.
+ *
+ */
+interface ICameraDeviceCallback {
+
+    /**
+     * processCaptureResult:
+     *
+     * Send results from a completed capture to the framework.
+     * processCaptureResult() may be invoked multiple times by the HAL in
+     * response to a single capture request. This allows, for example, the
+     * metadata and low-resolution buffers to be returned in one call, and
+     * post-processed JPEG buffers in a later call, once it is available. Each
+     * call must include the frame number of the request it is returning
+     * metadata or buffers for.
+     *
+     * A component (buffer or metadata) of the complete result may only be
+     * included in one process_capture_result call. A buffer for each stream,
+     * and the result metadata, must be returned by the HAL for each request in
+     * one of the processCaptureResult calls, even in case of errors producing
+     * some of the output. A call to processCaptureResult() with neither
+     * output buffers or result metadata is not allowed.
+     *
+     * The order of returning metadata and buffers for a single result does not
+     * matter, but buffers for a given stream must be returned in FIFO order. So
+     * the buffer for request 5 for stream A must always be returned before the
+     * buffer for request 6 for stream A. This also applies to the result
+     * metadata; the metadata for request 5 must be returned before the metadata
+     * for request 6.
+     *
+     * However, different streams are independent of each other, so it is
+     * acceptable and expected that the buffer for request 5 for stream A may be
+     * returned after the buffer for request 6 for stream B is. And it is
+     * acceptable that the result metadata for request 6 for stream B is
+     * returned before the buffer for request 5 for stream A is.
+     *
+     * The HAL retains ownership of result structure, which only needs to be
+     * valid to access during this call. The framework must copy whatever it
+     * needs before this call returns.
+     *
+     * The output buffers do not need to be filled yet; the framework must wait
+     * on the stream buffer release sync fence before reading the buffer
+     * data. Therefore, this method should be called by the HAL as soon as
+     * possible, even if some or all of the output buffers are still in
+     * being filled. The HAL must include valid release sync fences into each
+     * output_buffers stream buffer entry, or -1 if that stream buffer is
+     * already filled.
+     *
+     * If the result buffer cannot be constructed for a request, the HAL must
+     * return an empty metadata buffer, but still provide the output buffers and
+     * their sync fences. In addition, notify() must be called with an
+     * ERROR_RESULT message.
+     *
+     * If an output buffer cannot be filled, its status field must be set to
+     * STATUS_ERROR. In addition, notify() must be called with a ERROR_BUFFER
+     * message.
+     *
+     * If the entire capture has failed, then this method still needs to be
+     * called to return the output buffers to the framework. All the buffer
+     * statuses must be STATUS_ERROR, and the result metadata must be an
+     * empty buffer. In addition, notify() must be called with a ERROR_REQUEST
+     * message. In this case, individual ERROR_RESULT/ERROR_BUFFER messages
+     * must not be sent.
+     *
+     * Performance requirements:
+     *
+     * This is a non-blocking call. The framework must return this call in 5ms.
+     *
+     * The pipeline latency (see S7 for definition) should be less than or equal to
+     * 4 frame intervals, and must be less than or equal to 8 frame intervals.
+     *
+     */
+    processCaptureResult(CaptureResult result);
+
+    /**
+     * notify:
+     *
+     * Asynchronous notification callback from the HAL, fired for various
+     * reasons. Only for information independent of frame capture, or that
+     * require specific timing.
+     *
+     * Multiple threads may call notify() simultaneously.
+     *
+     * Buffers delivered to the framework must not be dispatched to the
+     * application layer until a start of exposure timestamp (or input image's
+     * start of exposure timestamp for a reprocess request) has been received
+     * via a SHUTTER notify() call. It is highly recommended to dispatch this
+     * call as early as possible.
+     *
+     * ------------------------------------------------------------------------
+     * Performance requirements:
+     *
+     * This is a non-blocking call. The framework must return this call in 5ms.
+     */
+    notify(NotifyMsg msg);
+
+};
diff --git a/camera/device/3.2/ICameraDeviceSession.hal b/camera/device/3.2/ICameraDeviceSession.hal
new file mode 100644
index 0000000..c8cc246
--- /dev/null
+++ b/camera/device/3.2/ICameraDeviceSession.hal
@@ -0,0 +1,360 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera.device@3.2;
+
+import android.hardware.camera.common@1.0::types;
+
+/**
+ * Camera device active session interface.
+ *
+ * Obtained via ICameraDevice::open(), this interface contains the methods to
+ * configure and request captures from an active camera device.
+ *
+ */
+interface ICameraDeviceSession {
+
+    /**
+     * constructDefaultRequestSettings:
+     *
+     * Create capture settings for standard camera use cases.
+     *
+     * The device must return a settings buffer that is configured to meet the
+     * requested use case, which must be one of the CAMERA3_TEMPLATE_*
+     * enums. All request control fields must be included.
+     *
+     * Performance requirements:
+     *
+     * This must be a non-blocking call. The HAL should return from this call
+     * in 1ms, and must return from this call in 5ms.
+     *
+     * Return values:
+     * @return status Status code for the operation, one of:
+     *     OK:
+     *         On a successful construction of default settings.
+     *     INTERNAL_ERROR:
+     *         An unexpected internal error occurred, and the default settings
+     *         are not available.
+     *     CAMERA_DISCONNECTED:
+     *         An external camera device has been disconnected, and is no longer
+     *         available. This camera device interface is now stale, and a new
+     *         instance must be acquired if the device is reconnected. All
+     *         subsequent calls on this interface must return
+     *         CAMERA_DISCONNECTED.
+     * @return template The default capture request settings for the requested
+     *     use case, or an empty metadata structure if status is not OK.
+     *
+     */
+    constructDefaultRequestSettings(RequestTemplate type) generates
+            (Status status, CameraMetadata requestTemplate);
+
+    /**
+     * configureStreams:
+     *
+     * Reset the HAL camera device processing pipeline and set up new input and
+     * output streams. This call replaces any existing stream configuration with
+     * the streams defined in the streamList. This method must be called at
+     * least once before a request is submitted with processCaptureRequest().
+     *
+     * The streamList must contain at least one output-capable stream, and may
+     * not contain more than one input-capable stream.
+     *
+     * The streamList may contain streams that are also in the currently-active
+     * set of streams (from the previous call to configureStreams()). These
+     * streams must already have valid values for usage, maxBuffers, and the
+     * private pointer.
+     *
+     * If the HAL needs to change the stream configuration for an existing
+     * stream due to the new configuration, it may rewrite the values of usage
+     * and/or maxBuffers during the configure call.
+     *
+     * The framework must detect such a change, and may then reallocate the
+     * stream buffers before using buffers from that stream in a request.
+     *
+     * If a currently-active stream is not included in streamList, the HAL may
+     * safely remove any references to that stream. It must not be reused in a
+     * later configureStreams() call by the framework, and all the gralloc
+     * buffers for it must be freed after the configureStreams() call returns.
+     *
+     * If the stream is new, the maxBuffer field of the stream structure must be
+     * set to 0. The usage must be set to the consumer usage flags. The HAL
+     * device must set these fields in the configureStreams() return values.
+     * These fields are then used by the framework and the platform gralloc
+     * module to allocate the gralloc buffers for each stream.
+     *
+     * Newly allocated buffers may be included in a capture request at any time
+     * by the framework. Once a gralloc buffer is returned to the framework
+     * with processCaptureResult (and its respective releaseFence has been
+     * signaled) the framework may free or reuse it at any time.
+     *
+     * ------------------------------------------------------------------------
+     *
+     * Preconditions:
+     *
+     * The framework must only call this method when no captures are being
+     * processed. That is, all results have been returned to the framework, and
+     * all in-flight input and output buffers have been returned and their
+     * release sync fences have been signaled by the HAL. The framework must not
+     * submit new requests for capture while the configureStreams() call is
+     * underway.
+     *
+     * Postconditions:
+     *
+     * The HAL device must configure itself to provide maximum possible output
+     * frame rate given the sizes and formats of the output streams, as
+     * documented in the camera device's static metadata.
+     *
+     * Performance requirements:
+     *
+     * This call is expected to be heavyweight and possibly take several hundred
+     * milliseconds to complete, since it may require resetting and
+     * reconfiguring the image sensor and the camera processing pipeline.
+     * Nevertheless, the HAL device should attempt to minimize the
+     * reconfiguration delay to minimize the user-visible pauses during
+     * application operational mode changes (such as switching from still
+     * capture to video recording).
+     *
+     * The HAL should return from this call in 500ms, and must return from this
+     * call in 1000ms.
+     *
+     * @return Status Status code for the operation, one of:
+     *     OK:
+     *          On successful stream configuration.
+     *     INTERNAL_ERROR:
+     *         If there has been a fatal error and the device is no longer
+     *         operational. Only close() can be called successfully by the
+     *         framework after this error is returned.
+     *     ILLEGAL_ARGUMENT:
+     *         If the requested stream configuration is invalid. Some examples
+     *         of invalid stream configurations include:
+     *           - Including more than 1 INPUT stream
+     *           - Not including any OUTPUT streams
+     *           - Including streams with unsupported formats, or an unsupported
+     *             size for that format.
+     *           - Including too many output streams of a certain format.
+     *           - Unsupported rotation configuration
+     *           - Stream sizes/formats don't satisfy the
+     *             camera3_stream_configuration_t->operation_mode requirements
+     *             for non-NORMAL mode, or the requested operation_mode is not
+     *             supported by the HAL.
+     *         The camera service cannot filter out all possible illegal stream
+     *         configurations, since some devices may support more simultaneous
+     *         streams or larger stream resolutions than the minimum required
+     *         for a given camera device hardware level. The HAL must return an
+     *         ILLEGAL_ARGUMENT for any unsupported stream set, and then be
+     *         ready to accept a future valid stream configuration in a later
+     *         configureStreams call.
+     * @return finalConfiguration The stream parameters desired by the HAL for
+     *     each stream, including maximum buffers, the usage flags, and the
+     *     override format.
+     *
+     */
+    configureStreams(StreamConfiguration requestedConfiguration)
+            generates (Status status,
+                    HalStreamConfiguration halConfiguration);
+
+    /**
+     * processCaptureRequest:
+     *
+     * Send a new capture request to the HAL. The HAL must not return from
+     * this call until it is ready to accept the next request to process. Only
+     * one call to processCaptureRequest() must be made at a time by the
+     * framework, and the calls must all be from the same thread. The next call
+     * to processCaptureRequest() must be made as soon as a new request and
+     * its associated buffers are available. In a normal preview scenario, this
+     * means the function is generally called again by the framework almost
+     * instantly.
+     *
+     * The actual request processing is asynchronous, with the results of
+     * capture being returned by the HAL through the processCaptureResult()
+     * call. This call requires the result metadata to be available, but output
+     * buffers may simply provide sync fences to wait on. Multiple requests are
+     * expected to be in flight at once, to maintain full output frame rate.
+     *
+     * The framework retains ownership of the request structure. It is only
+     * guaranteed to be valid during this call. The HAL device must make copies
+     * of the information it needs to retain for the capture processing. The HAL
+     * is responsible for waiting on and closing the buffers' fences and
+     * returning the buffer handles to the framework.
+     *
+     * The HAL must write the file descriptor for the input buffer's release
+     * sync fence into input_buffer->release_fence, if input_buffer is not
+     * valid. If the HAL returns -1 for the input buffer release sync fence, the
+     * framework is free to immediately reuse the input buffer. Otherwise, the
+     * framework must wait on the sync fence before refilling and reusing the
+     * input buffer.
+     *
+     * The input/output buffers provided by the framework in each request
+     * may be brand new (having never before seen by the HAL).
+     *
+     * ------------------------------------------------------------------------
+     * Performance considerations:
+     *
+     * Handling a new buffer should be extremely lightweight and there must be
+     * no frame rate degradation or frame jitter introduced.
+     *
+     * This call must return fast enough to ensure that the requested frame
+     * rate can be sustained, especially for streaming cases (post-processing
+     * quality settings set to FAST). The HAL should return this call in 1
+     * frame interval, and must return from this call in 4 frame intervals.
+     *
+     * @return status Status code for the operation, one of:
+     *     OK:
+     *         On a successful start to processing the capture request
+     *     ILLEGAL_ARGUMENT:
+     *         If the input is malformed (the settings are empty when not
+     *         allowed, there are 0 output buffers, etc) and capture processing
+     *         cannot start. Failures during request processing must be
+     *         handled by calling ICameraDeviceCallback::notify(). In case of
+     *         this error, the framework retains responsibility for the
+     *         stream buffers' fences and the buffer handles; the HAL must not
+     *         close the fences or return these buffers with
+     *         ICameraDeviceCallback::processCaptureResult().
+     *     INTERNAL_ERROR:
+     *         If the camera device has encountered a serious error. After this
+     *         error is returned, only the close() method can be successfully
+     *         called by the framework.
+     *
+     */
+    processCaptureRequest(CaptureRequest request)
+            generates (Status status);
+
+    /**
+     * flush:
+     *
+     * Flush all currently in-process captures and all buffers in the pipeline
+     * on the given device. Generally, this method is used to dump all state as
+     * quickly as possible in order to prepare for a configure_streams() call.
+     *
+     * No buffers are required to be successfully returned, so every buffer
+     * held at the time of flush() (whether successfully filled or not) may be
+     * returned with CAMERA3_BUFFER_STATUS_ERROR. Note the HAL is still allowed
+     * to return valid (CAMERA3_BUFFER_STATUS_OK) buffers during this call,
+     * provided they are successfully filled.
+     *
+     * All requests currently in the HAL are expected to be returned as soon as
+     * possible. Not-in-process requests must return errors immediately. Any
+     * interruptible hardware blocks must be stopped, and any uninterruptible
+     * blocks must be waited on.
+     *
+     * flush() may be called concurrently to processCaptureRequest(), with the
+     * expectation that processCaptureRequest returns quickly and the
+     * request submitted in that processCaptureRequest call is treated like
+     * all other in-flight requests. Due to concurrency issues, it is possible
+     * that from the HAL's point of view, a processCaptureRequest() call may
+     * be started after flush has been invoked but has not returned yet. If such
+     * a call happens before flush() returns, the HAL must treat the new
+     * capture request like other in-flight pending requests (see #4 below).
+     *
+     * More specifically, the HAL must follow below requirements for various
+     * cases:
+     *
+     * 1. For captures that are too late for the HAL to cancel/stop, and must be
+     *    completed normally by the HAL; i.e. the HAL can send shutter/notify
+     *    and processCaptureResult and buffers as normal.
+     *
+     * 2. For pending requests that have not done any processing, the HAL must
+     *    call notify CAMERA3_MSG_ERROR_REQUEST, and return all the output
+     *    buffers with processCaptureResult in the error state
+     *    (CAMERA3_BUFFER_STATUS_ERROR). The HAL must not place the release
+     *    fence into an error state, instead, the release fences must be set to
+     *    the acquire fences passed by the framework, or -1 if they have been
+     *    waited on by the HAL already. This is also the path to follow for any
+     *    captures for which the HAL already called notify() with
+     *    CAMERA3_MSG_SHUTTER but won't be producing any metadata/valid buffers
+     *    for. After CAMERA3_MSG_ERROR_REQUEST, for a given frame, only
+     *    processCaptureResults with buffers in CAMERA3_BUFFER_STATUS_ERROR
+     *    are allowed. No further notifys or processCaptureResult with
+     *    non-empty metadata is allowed.
+     *
+     * 3. For partially completed pending requests that do not have all the
+     *    output buffers or perhaps missing metadata, the HAL must follow
+     *    below:
+     *
+     *    3.1. Call notify with CAMERA3_MSG_ERROR_RESULT if some of the expected
+     *         result metadata (i.e. one or more partial metadata) won't be
+     *         available for the capture.
+     *
+     *    3.2. Call notify with CAMERA3_MSG_ERROR_BUFFER for every buffer that
+     *         won't be produced for the capture.
+     *
+     *    3.3. Call notify with CAMERA3_MSG_SHUTTER with the capture timestamp
+     *         before any buffers/metadata are returned with
+     *         processCaptureResult.
+     *
+     *    3.4. For captures that will produce some results, the HAL must not
+     *         call CAMERA3_MSG_ERROR_REQUEST, since that indicates complete
+     *         failure.
+     *
+     *    3.5. Valid buffers/metadata must be passed to the framework as
+     *         normal.
+     *
+     *    3.6. Failed buffers must be returned to the framework as described
+     *         for case 2. But failed buffers do not have to follow the strict
+     *         ordering valid buffers do, and may be out-of-order with respect
+     *         to valid buffers. For example, if buffers A, B, C, D, E are sent,
+     *         D and E are failed, then A, E, B, D, C is an acceptable return
+     *         order.
+     *
+     *    3.7. For fully-missing metadata, calling CAMERA3_MSG_ERROR_RESULT is
+     *         sufficient, no need to call processCaptureResult with empty
+     *         metadata or equivalent.
+     *
+     * 4. If a flush() is invoked while a processCaptureRequest() invocation
+     *    is active, that process call must return as soon as possible. In
+     *    addition, if a processCaptureRequest() call is made after flush()
+     *    has been invoked but before flush() has returned, the capture request
+     *    provided by the late processCaptureRequest call must be treated
+     *    like a pending request in case #2 above.
+     *
+     * flush() must only return when there are no more outstanding buffers or
+     * requests left in the HAL. The framework may call configure_streams (as
+     * the HAL state is now quiesced) or may issue new requests.
+     *
+     * Note that it's sufficient to only support fully-succeeded and
+     * fully-failed result cases. However, it is highly desirable to support
+     * the partial failure cases as well, as it could help improve the flush
+     * call overall performance.
+     *
+     * Performance requirements:
+     *
+     * The HAL should return from this call in 100ms, and must return from this
+     * call in 1000ms. And this call must not be blocked longer than pipeline
+     * latency (see S7 for definition).
+     *
+     * @return status Status code for the operation, one of:
+     *     OK:
+     *         On a successful flush of the camera HAL.
+     *     INTERNAL_ERROR:
+     *         If the camera device has encountered a serious error. After this
+     *         error is returned, only the close() method can be successfully
+     *         called by the framework.
+     */
+    flush() generates (Status status);
+
+    /**
+     * close:
+     *
+     * Shut down the camera device.
+     *
+     * After this call, all calls to this session instance must return
+     * INTERNAL_ERROR.
+     *
+     * This method must always succeed, even if the device has encountered a
+     * serious error.
+     */
+    close();
+};
diff --git a/camera/device/3.2/types.hal b/camera/device/3.2/types.hal
new file mode 100644
index 0000000..3ce5037
--- /dev/null
+++ b/camera/device/3.2/types.hal
@@ -0,0 +1,920 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package android.hardware.camera.device@3.2;
+
+import android.hardware.graphics.allocator@2.0::types;
+import android.hardware.graphics.common@1.0::types;
+
+typedef vec<uint8_t> CameraMetadata;
+
+/**
+ * StreamType:
+ *
+ * The type of the camera stream, which defines whether the camera HAL device is
+ * the producer or the consumer for that stream, and how the buffers of the
+ * stream relate to the other streams.
+ */
+enum StreamType : uint32_t {
+    /**
+     * This stream is an output stream; the camera HAL device must fill buffers
+     * from this stream with newly captured or reprocessed image data.
+     */
+    OUTPUT = 0,
+
+    /**
+     * This stream is an input stream; the camera HAL device must read buffers
+     * from this stream and send them through the camera processing pipeline,
+     * as if the buffer was a newly captured image from the imager.
+     *
+     * The pixel format for input stream can be any format reported by
+     * android.scaler.availableInputOutputFormatsMap. The pixel format of the
+     * output stream that is used to produce the reprocessing data may be any
+     * format reported by android.scaler.availableStreamConfigurations. The
+     * supported input/output stream combinations depends the camera device
+     * capabilities, see android.scaler.availableInputOutputFormatsMap for
+     * stream map details.
+     *
+     * This kind of stream is generally used to reprocess data into higher
+     * quality images (that otherwise would cause a frame rate performance
+     * loss), or to do off-line reprocessing.
+     *
+     * The typical use cases are OPAQUE (typically ZSL) and YUV reprocessing,
+     * see S8.2, S8.3 and S10 for more details.
+     */
+    INPUT = 1
+
+};
+
+/**
+ * StreamRotation:
+ *
+ * The required counterclockwise rotation of camera stream.
+ */
+enum StreamRotation : uint32_t  {
+    /* No rotation */
+    ROTATION_0 = 0,
+
+    /* Rotate by 90 degree counterclockwise */
+    ROTATION_90 = 1,
+
+    /* Rotate by 180 degree counterclockwise */
+    ROTATION_180 = 2,
+
+    /* Rotate by 270 degree counterclockwise */
+    ROTATION_270 = 3
+
+};
+
+/**
+ * StreamConfigurationMode:
+ *
+ * This defines the general operation mode for the HAL (for a given stream
+ * configuration) where modes besides NORMAL have different semantics, and
+ * usually limit the generality of the API in exchange for higher performance in
+ * some particular area.
+ */
+enum StreamConfigurationMode : uint32_t {
+    /**
+     * Normal stream configuration operation mode. This is the default camera
+     * operation mode, where all semantics of HAL APIs and metadata controls
+     * apply.
+     */
+    NORMAL_MODE = 0,
+
+    /**
+     * Special constrained high speed operation mode for devices that can not
+     * support high speed output in NORMAL mode. All streams in this
+     * configuration are operating at high speed mode and have different
+     * characteristics and limitations to achieve high speed output. The NORMAL
+     * mode can still be used for high speed output if the HAL can support high
+     * speed output while satisfying all the semantics of HAL APIs and metadata
+     * controls. It is recommended for the HAL to support high speed output in
+     * NORMAL mode (by advertising the high speed FPS ranges in
+     * android.control.aeAvailableTargetFpsRanges) if possible.
+     *
+     * This mode has below limitations/requirements:
+     *
+     *   1. The HAL must support up to 2 streams with sizes reported by
+     *      android.control.availableHighSpeedVideoConfigurations.
+     *   2. In this mode, the HAL is expected to output up to 120fps or
+     *      higher. This mode must support the targeted FPS range and size
+     *      configurations reported by
+     *      android.control.availableHighSpeedVideoConfigurations.
+     *   3. The HAL must support IMPLEMENTATION_DEFINED output
+     *      stream format.
+     *   4. To achieve efficient high speed streaming, the HAL may have to
+     *      aggregate multiple frames together and send to camera device for
+     *      processing where the request controls are same for all the frames in
+     *      this batch (batch mode). The HAL must support max batch size and the
+     *      max batch size requirements defined by
+     *      android.control.availableHighSpeedVideoConfigurations.
+     *   5. In this mode, the HAL must override aeMode, awbMode, and afMode to
+     *      ON, ON, and CONTINUOUS_VIDEO, respectively. All post-processing
+     *      block mode controls must be overridden to be FAST. Therefore, no
+     *      manual control of capture and post-processing parameters is
+     *      possible. All other controls operate the same as when
+     *      android.control.mode == AUTO. This means that all other
+     *      android.control.* fields must continue to work, such as
+     *
+     *      android.control.aeTargetFpsRange
+     *      android.control.aeExposureCompensation
+     *      android.control.aeLock
+     *      android.control.awbLock
+     *      android.control.effectMode
+     *      android.control.aeRegions
+     *      android.control.afRegions
+     *      android.control.awbRegions
+     *      android.control.afTrigger
+     *      android.control.aePrecaptureTrigger
+     *
+     *      Outside of android.control.*, the following controls must work:
+     *
+     *      android.flash.mode (TORCH mode only, automatic flash for still
+     *          capture must not work since aeMode is ON)
+     *      android.lens.opticalStabilizationMode (if it is supported)
+     *      android.scaler.cropRegion
+     *      android.statistics.faceDetectMode (if it is supported)
+     *
+     * For more details about high speed stream requirements, see
+     * android.control.availableHighSpeedVideoConfigurations and
+     * CONSTRAINED_HIGH_SPEED_VIDEO capability defined in
+     * android.request.availableCapabilities.
+     *
+     * This mode only needs to be supported by HALs that include
+     * CONSTRAINED_HIGH_SPEED_VIDEO in the android.request.availableCapabilities
+     * static metadata.
+     */
+    CONSTRAINED_HIGH_SPEED_MODE = 1
+
+};
+
+/**
+ * Stream:
+ *
+ * A descriptor for a single camera input or output stream. A stream is defined
+ * by the framework by its buffer resolution and format, and additionally by the
+ * HAL with the gralloc usage flags and the maximum in-flight buffer count.
+ *
+ * If a configureStreams() call returns a non-fatal error, all active streams
+ * remain valid as if configureStreams() had not been called.
+ *
+ */
+struct Stream {
+    /**
+     * Stream ID - a nonnegative integer identifier for a stream.
+     *
+     * The identical stream ID must reference the same stream, with the same
+     * width/height/format, across consecutive calls to configureStreams.
+     *
+     * If previously-used stream ID is not used in a new call to
+     * configureStreams, then that stream is no longer active. Such a stream ID
+     * may be reused in a future configureStreams with a new
+     * width/height/format.
+     *
+     */
+    int32_t id;
+
+    /**
+     * The type of the stream (input vs output, etc).
+     */
+    StreamType streamType;
+
+    /**
+     * The width in pixels of the buffers in this stream
+     */
+    uint32_t width;
+
+    /**
+     * The height in pixels of the buffers in this stream
+     */
+    uint32_t height;
+
+    /**
+     * The pixel format for the buffers in this stream.
+     *
+     * If IMPLEMENTATION_DEFINED is used, then the platform
+     * gralloc module must select a format based on the usage flags provided by
+     * the camera device and the other endpoint of the stream.
+     *
+     */
+    android.hardware.graphics.common@1.0::PixelFormat format;
+
+    /**
+     * The gralloc usage flags for this stream, as needed by the consumer of
+     * the stream.
+     *
+     * The usage flags from the producer and the consumer must be combined
+     * together and then passed to the platform gralloc HAL module for
+     * allocating the gralloc buffers for each stream.
+     *
+     * For streamType OUTPUT, when passed via
+     * configureStreams(), the initial value of this is the consumer's usage
+     * flags. The HAL may use these consumer flags to decide stream
+     * configuration. For streamType INPUT, when passed via
+     * configureStreams(), the initial value of this is 0. For all streams
+     * passed via configureStreams(), the HAL must set its desired producer
+     * usage flags in the final stream configuration.
+     */
+    ConsumerUsage usage;
+
+    /**
+     * The maximum number of buffers the HAL device may need to have dequeued at
+     * the same time. The HAL device may not have more buffers in-flight from
+     * this stream than this value. For all streams passed via
+     * configureStreams(), the HAL must set its desired max buffer count in the
+     * final stream configuration.
+     */
+    uint32_t maxBuffers;
+
+    /**
+     * A field that describes the contents of the buffer. The format and buffer
+     * dimensions define the memory layout and structure of the stream buffers,
+     * while dataSpace defines the meaning of the data within the buffer.
+     *
+     * For most formats, dataSpace defines the color space of the image data.
+     * In addition, for some formats, dataSpace indicates whether image- or
+     * depth-based data is requested. See
+     * android.hardware.graphics.common@1.0::types for details of formats and
+     * valid dataSpace values for each format.
+     *
+     * The HAL must use this dataSpace to configure the stream to the correct
+     * colorspace, or to select between color and depth outputs if
+     * supported. The dataspace values are set using the V0 dataspace
+     * definitions.
+     */
+    Dataspace dataSpace;
+
+    /**
+     * The required output rotation of the stream.
+     *
+     * This must be inspected by HAL along with stream width and height. For
+     * example, if the rotation is 90 degree and the stream width and height is
+     * 720 and 1280 respectively, camera service must supply buffers of size
+     * 720x1280, and HAL must capture a 1280x720 image and rotate the image by
+     * 90 degree counterclockwise. The rotation field must be ignored when the
+     * stream type is input.
+     *
+     * The HAL must inspect this field during stream configuration and return
+     * IllegalArgument if HAL cannot perform such rotation. HAL must always
+     * support ROTATION_0, so a configureStreams() call must not fail for
+     * unsupported rotation if rotation field of all streams is ROTATION_0.
+     *
+     */
+    StreamRotation rotation;
+
+};
+
+/**
+ * StreamConfiguration:
+ *
+ * A structure of stream definitions, used by configureStreams(). This
+ * structure defines all the output streams and the reprocessing input
+ * stream for the current camera use case.
+ */
+struct StreamConfiguration {
+    /**
+     * An array of camera stream pointers, defining the input/output
+     * configuration for the camera HAL device.
+     *
+     * At most one input-capable stream may be defined.
+     * At least one output-capable stream must be defined.
+     */
+    vec<Stream> streams;
+
+    /**
+     * The operation mode of streams in this configuration. The HAL can use this
+     * mode as an indicator to set the stream property (e.g.,
+     * HalStream::maxBuffers) appropriately. For example, if the
+     * configuration is
+     * CONSTRAINED_HIGH_SPEED_MODE, the HAL may
+     * want to set aside more buffers for batch mode operation (see
+     * android.control.availableHighSpeedVideoConfigurations for batch mode
+     * definition).
+     *
+     */
+    StreamConfigurationMode operationMode;
+
+};
+
+/**
+ * HalStream:
+ *
+ * The camera HAL's response to each requested stream configuration.
+ *
+ * The HAL may specify the desired format, maximum buffers, and
+ * usage flags for each stream.
+ *
+ */
+struct HalStream {
+    /**
+     * Stream ID - a nonnegative integer identifier for a stream.
+     *
+     * The ID must be one of the stream IDs passed into configureStreams.
+     */
+    int32_t id;
+
+    /**
+     * The pixel format for the buffers in this stream.
+     *
+     * If HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED is used, then the platform
+     * gralloc module must select a format based on the usage flags provided by
+     * the camera device and the other endpoint of the stream.
+     *
+     * The HAL must respect the requested format in Stream unless it is
+     * IMPLEMENTATION_DEFINED, in which case the override format here must be
+     * used instead. This allows cross-platform HALs to use a standard format
+     * since IMPLEMENTATION_DEFINED formats often require device-specific
+     * information. In all other cases, the overrideFormat must match the
+     * requested format.
+     */
+    android.hardware.graphics.common@1.0::PixelFormat overrideFormat;
+
+    /**
+     * The gralloc usage flags for this stream, as needed by the HAL.
+     *
+     * For output streams, these are the HAL's producer usage flags. For input
+     * streams, these are the HAL's consumer usage flags. The usage flags from
+     * the producer and the consumer must be combined together and then passed
+     * to the platform graphics allocator HAL for allocating the gralloc buffers
+     * for each stream.
+     *
+     * If the stream's type is INPUT, then producerUsage must be 0, and
+     * consumerUsage must be set. For other types, producerUsage must be set,
+     * and consumerUsage must be 0.
+     */
+    ProducerUsage producerUsage;
+    ConsumerUsage consumerUsage;
+
+    /**
+     * The maximum number of buffers the HAL device may need to have dequeued at
+     * the same time. The HAL device may not have more buffers in-flight from
+     * this stream than this value.
+     */
+    uint32_t maxBuffers;
+
+};
+
+/**
+ * HalStreamConfiguration:
+ *
+ * A structure of stream definitions, returned by configureStreams(). This
+ * structure defines the HAL's desired parameters for each stream.
+ *
+ * All streams that were defined in the input to configureStreams() must have a
+ * corresponding entry in this structure when returned by configureStreams().
+ */
+struct HalStreamConfiguration {
+    vec<HalStream> streams;
+};
+
+/**
+ * BufferStatus:
+ *
+ * The current status of a single stream buffer.
+ */
+enum BufferStatus : uint32_t {
+    /**
+     * The buffer is in a normal state, and can be used after waiting on its
+     * sync fence.
+     */
+    OK = 0,
+
+    /**
+     * The buffer does not contain valid data, and the data in it must not be
+     * used. The sync fence must still be waited on before reusing the buffer.
+     */
+    ERROR = 1
+};
+
+/**
+ * StreamBuffer:
+ *
+ * A single buffer from a camera3 stream. It includes a handle to its parent
+ * stream, the handle to the gralloc buffer itself, and sync fences
+ *
+ * The buffer does not specify whether it is to be used for input or output;
+ * that is determined by its parent stream type and how the buffer is passed to
+ * the HAL device.
+ */
+struct StreamBuffer {
+    /**
+     * The ID of the stream this buffer is associated with
+     */
+    int32_t streamId;
+
+    /**
+     * The graphics buffer handle to the buffer
+     */
+    handle buffer;
+
+    /**
+     * Current state of the buffer. The framework must not pass buffers to the
+     * HAL that are in an error state. In case a buffer could not be filled by
+     * the HAL, it must have its status set to ERROR when returned to the
+     * framework with processCaptureResult().
+     */
+    BufferStatus status;
+
+    /**
+     * The acquire sync fence for this buffer. The HAL must wait on this fence
+     * fd before attempting to read from or write to this buffer.
+     *
+     * The framework may be set to -1 to indicate that no waiting is necessary
+     * for this buffer.
+     *
+     * When the HAL returns an output buffer to the framework with
+     * processCaptureResult(), the acquireFence must be set to -1. If the HAL
+     * never waits on the acquireFence due to an error in filling a buffer,
+     * when calling processCaptureResult() the HAL must set the releaseFence
+     * of the buffer to be the acquireFence passed to it by the framework. This
+     * allows the framework to wait on the fence before reusing the buffer.
+     *
+     * For input buffers, the HAL must not change the acquireFence field during
+     * the processCaptureRequest() call.
+     *
+     * When the HAL returns an input buffer to the framework with
+     * processCaptureResult(), the acquireFence must be set to -1. If the HAL
+     * never waits on input buffer acquire fence due to an error, the sync
+     * fences must be handled similarly to the way they are handled for output
+     * buffers.
+     */
+    handle acquireFence;
+
+    /**
+     * The release sync fence for this buffer. The HAL must set this fence when
+     * returning buffers to the framework, or write -1 to indicate that no
+     * waiting is required for this buffer.
+     *
+     * For the output buffers, the fences must be set in the outputBuffers
+     * array passed to processCaptureResult().
+     *
+     * For the input buffer, the fences must be set in the inputBuffer
+     * passed to processCaptureResult().
+     *
+     * After signaling the releaseFence for this buffer, the HAL
+     * must not make any further attempts to access this buffer as the
+     * ownership has been fully transferred back to the framework.
+     *
+     * If a fence of -1 was specified then the ownership of this buffer
+     * is transferred back immediately upon the call of processCaptureResult.
+     */
+    handle releaseFence;
+
+};
+
+/**
+ * CameraBlob:
+ *
+ * Transport header for camera blob types; generally compressed JPEG buffers in
+ * output streams.
+ *
+ * To capture JPEG images, a stream is created using the pixel format
+ * HAL_PIXEL_FORMAT_BLOB and dataspace HAL_DATASPACE_V0_JFIF. The buffer size
+ * for the stream is calculated by the framework, based on the static metadata
+ * field android.jpeg.maxSize. Since compressed JPEG images are of variable
+ * size, the HAL needs to include the final size of the compressed image using
+ * this structure inside the output stream buffer. The camera blob ID field must
+ * be set to CameraBlobId::JPEG.
+ *
+ * The transport header must be at the end of the JPEG output stream
+ * buffer. That means the jpegBlobId must start at byte[buffer_size -
+ * sizeof(CameraBlob)], where the buffer_size is the size of gralloc
+ * buffer. Any HAL using this transport header must account for it in
+ * android.jpeg.maxSize. The JPEG data itself starts at the beginning of the
+ * buffer and must be blobSize bytes long.
+ */
+enum CameraBlobId : uint16_t {
+    JPEG = 0x00FF,
+};
+
+struct CameraBlob {
+    CameraBlobId blobId;
+
+    uint32_t blobSize;
+};
+
+/**
+ * MsgType:
+ *
+ * Indicates the type of message sent, which specifies which member of the
+ * message union is valid.
+ *
+ */
+enum MsgType : uint32_t {
+    /**
+     * An error has occurred. NotifyMsg::Message::Error contains the
+     * error information.
+     */
+    ERROR = 1,
+
+    /**
+     * The exposure of a given request or processing a reprocess request has
+     * begun. NotifyMsg::Message::Shutter contains the information
+     * the capture.
+     */
+    SHUTTER = 2
+};
+
+/**
+ * Defined error codes for MsgType::ERROR
+ */
+enum ErrorCode : uint32_t {
+    /**
+     * A serious failure occured. No further frames or buffer streams must
+     * be produced by the device. Device must be treated as closed. The
+     * client must reopen the device to use it again. The frameNumber field
+     * is unused.
+     */
+    ERROR_DEVICE = 1,
+
+    /**
+     * An error has occurred in processing a request. No output (metadata or
+     * buffers) must be produced for this request. The frameNumber field
+     * specifies which request has been dropped. Subsequent requests are
+     * unaffected, and the device remains operational.
+     */
+    ERROR_REQUEST = 2,
+
+    /**
+     * An error has occurred in producing an output result metadata buffer
+     * for a request, but output stream buffers for it must still be
+     * available. Subsequent requests are unaffected, and the device remains
+     * operational. The frameNumber field specifies the request for which
+     * result metadata won't be available.
+     */
+    ERROR_RESULT = 3,
+
+    /**
+     * An error has occurred in placing an output buffer into a stream for a
+     * request. The frame metadata and other buffers may still be
+     * available. Subsequent requests are unaffected, and the device remains
+     * operational. The frameNumber field specifies the request for which the
+     * buffer was dropped, and errorStreamId indicates the stream
+     * that dropped the frame.
+     */
+    ERROR_BUFFER = 4,
+};
+
+/**
+ * ErrorMsg:
+ *
+ * Message contents for MsgType::ERROR
+ */
+struct ErrorMsg {
+    /**
+     * Frame number of the request the error applies to. 0 if the frame number
+     * isn't applicable to the error.
+     */
+    uint32_t frameNumber;
+
+    /**
+     * Pointer to the stream that had a failure. -1 if the stream isn't
+     * applicable to the error.
+     */
+    int32_t errorStreamId;
+
+    /**
+     * The code for this error.
+     */
+    ErrorCode errorCode;
+
+};
+
+/**
+ * ShutterMsg:
+ *
+ * Message contents for MsgType::SHUTTER
+ */
+struct ShutterMsg {
+    /**
+     * Frame number of the request that has begun exposure or reprocessing.
+     */
+    uint32_t frameNumber;
+
+    /**
+     * Timestamp for the start of capture. For a reprocess request, this must
+     * be input image's start of capture. This must match the capture result
+     * metadata's sensor exposure start timestamp.
+     */
+    uint64_t timestamp;
+
+};
+
+/**
+ * MotifyMsg:
+ *
+ * The message structure sent to ICameraDevice3Callback::notify()
+ */
+struct NotifyMsg {
+    /**
+     * The message type.
+     */
+    MsgType type;
+
+    union Message {
+        /**
+         * Error message contents. Valid if type is MsgType::ERROR
+         */
+        ErrorMsg error;
+
+        /**
+         * Shutter message contents. Valid if type is MsgType::SHUTTER
+         */
+        ShutterMsg shutter;
+    };
+
+};
+
+/**
+ * RequestTemplate:
+ *
+ * Available template types for
+ * ICameraDevice::constructDefaultRequestSettings()
+ */
+enum RequestTemplate : uint32_t {
+    /**
+     * Standard camera preview operation with 3A on auto.
+     */
+    PREVIEW = 1,
+
+    /**
+     * Standard camera high-quality still capture with 3A and flash on auto.
+     */
+    STILL_CAPTURE = 2,
+
+    /**
+     * Standard video recording plus preview with 3A on auto, torch off.
+     */
+    VIDEO_RECORD = 3,
+
+    /**
+     * High-quality still capture while recording video. Applications typically
+     * include preview, video record, and full-resolution YUV or JPEG streams in
+     * request. Must not cause stuttering on video stream. 3A on auto.
+     */
+    VIDEO_SNAPSHOT = 4,
+
+    /**
+     * Zero-shutter-lag mode. Application typically request preview and
+     * full-resolution data for each frame, and reprocess it to JPEG when a
+     * still image is requested by user. Settings must provide highest-quality
+     * full-resolution images without compromising preview frame rate. 3A on
+     * auto.
+     */
+    ZERO_SHUTTER_LAG = 5,
+
+    /**
+     * A basic template for direct application control of capture
+     * parameters. All automatic control is disabled (auto-exposure, auto-white
+     * balance, auto-focus), and post-processing parameters are set to preview
+     * quality. The manual capture parameters (exposure, sensitivity, etc.)
+     * are set to reasonable defaults, but may be overridden by the
+     * application depending on the intended use case.
+     */
+    MANUAL = 6,
+
+    /**
+     * First value for vendor-defined request templates
+     */
+    VENDOR_TEMPLATE_START = 0x40000000,
+
+};
+
+/**
+ * CaptureRequest:
+ *
+ * A single request for image capture/buffer reprocessing, sent to the Camera
+ * HAL device by the framework in processCaptureRequest().
+ *
+ * The request contains the settings to be used for this capture, and the set of
+ * output buffers to write the resulting image data in. It may optionally
+ * contain an input buffer, in which case the request is for reprocessing that
+ * input buffer instead of capturing a new image with the camera sensor. The
+ * capture is identified by the frameNumber.
+ *
+ * In response, the camera HAL device must send a CaptureResult
+ * structure asynchronously to the framework, using the processCaptureResult()
+ * callback.
+ */
+struct CaptureRequest {
+    /**
+     * The frame number is an incrementing integer set by the framework to
+     * uniquely identify this capture. It needs to be returned in the result
+     * call, and is also used to identify the request in asynchronous
+     * notifications sent to ICameraDevice3Callback::notify().
+     */
+    uint32_t frameNumber;
+
+    /**
+     * The settings buffer contains the capture and processing parameters for
+     * the request. As a special case, an empty settings buffer indicates that
+     * the settings are identical to the most-recently submitted capture
+     * request. A empty buffer cannot be used as the first submitted request
+     * after a configureStreams() call.
+     */
+    CameraMetadata settings;
+
+    /**
+     * The input stream buffer to use for this request, if any.
+     *
+     * If inputBuffer is invalid, then the request is for a new capture from the
+     * imager. If inputBuffer is valid, the request is for reprocessing the
+     * image contained in inputBuffer.
+     *
+     * In the latter case, the HAL must set the releaseFence of the
+     * inputBuffer to a valid sync fence, or to -1 if the HAL does not support
+     * sync, before processCaptureRequest() returns.
+     *
+     * The HAL is required to wait on the acquire sync fence of the input buffer
+     * before accessing it.
+     *
+     */
+    StreamBuffer inputBuffer;
+
+    /**
+     * An array of at least 1 stream buffers, to be filled with image
+     * data from this capture/reprocess. The HAL must wait on the acquire fences
+     * of each stream buffer before writing to them.
+     *
+     * The HAL takes ownership of the actual buffer_handle_t entries in
+     * outputBuffers; the framework must not access them until they are
+     * returned in a CaptureResult.
+     *
+     * Any or all of the buffers included here may be brand new in this
+     * request (having never before seen by the HAL).
+     */
+    vec<StreamBuffer> outputBuffers;
+
+};
+
+/**
+ * CaptureResult:
+ *
+ * The result of a single capture/reprocess by the camera HAL device. This is
+ * sent to the framework asynchronously with processCaptureResult(), in
+ * response to a single capture request sent to the HAL with
+ * processCaptureRequest(). Multiple processCaptureResult() calls may be
+ * performed by the HAL for each request.
+ *
+ * Each call, all with the same frame
+ * number, may contain some subset of the output buffers, and/or the result
+ * metadata.
+ *
+ * The result structure contains the output metadata from this capture, and the
+ * set of output buffers that have been/will be filled for this capture. Each
+ * output buffer may come with a release sync fence that the framework must wait
+ * on before reading, in case the buffer has not yet been filled by the HAL.
+ *
+ * The metadata may be provided multiple times for a single frame number. The
+ * framework must accumulate together the final result set by combining each
+ * partial result together into the total result set.
+ *
+ * If an input buffer is given in a request, the HAL must return it in one of
+ * the processCaptureResult calls, and the call may be to just return the
+ * input buffer, without metadata and output buffers; the sync fences must be
+ * handled the same way they are done for output buffers.
+ *
+ * Performance considerations:
+ *
+ * Applications receive these partial results immediately, so sending partial
+ * results is a highly recommended performance optimization to avoid the total
+ * pipeline latency before sending the results for what is known very early on
+ * in the pipeline.
+ *
+ * A typical use case might be calculating the AF state halfway through the
+ * pipeline; by sending the state back to the framework immediately, we get a
+ * 50% performance increase and perceived responsiveness of the auto-focus.
+ *
+ */
+struct CaptureResult {
+    /**
+     * The frame number is an incrementing integer set by the framework in the
+     * submitted request to uniquely identify this capture. It is also used to
+     * identify the request in asynchronous notifications sent to
+     * ICameraDevice3Callback::notify().
+     */
+    uint32_t frameNumber;
+
+    /**
+     * The result metadata for this capture. This contains information about the
+     * final capture parameters, the state of the capture and post-processing
+     * hardware, the state of the 3A algorithms, if enabled, and the output of
+     * any enabled statistics units.
+     *
+     * If there was an error producing the result metadata, result must be an
+     * empty metadata buffer, and notify() must be called with
+     * ErrorCode::ERROR_RESULT.
+     *
+     * Multiple calls to processCaptureResult() with a given frameNumber
+     * may include (partial) result metadata.
+     *
+     * Partial metadata submitted must not include any metadata key returned
+     * in a previous partial result for a given frame. Each new partial result
+     * for that frame must also set a distinct partialResult value.
+     *
+     * If notify has been called with ErrorCode::ERROR_RESULT, all further
+     * partial results for that frame are ignored by the framework.
+     */
+    CameraMetadata result;
+
+    /**
+     * The handles for the output stream buffers for this capture. They may not
+     * yet be filled at the time the HAL calls processCaptureResult(); the
+     * framework must wait on the release sync fences provided by the HAL before
+     * reading the buffers.
+     *
+     * The number of output buffers returned must be less than or equal to the
+     * matching capture request's count. If this is less than the buffer count
+     * in the capture request, at least one more call to processCaptureResult
+     * with the same frameNumber must be made, to return the remaining output
+     * buffers to the framework. This may only be zero if the structure includes
+     * valid result metadata or an input buffer is returned in this result.
+     *
+     * The HAL must set the stream buffer's release sync fence to a valid sync
+     * fd, or to -1 if the buffer has already been filled.
+     *
+     * If the HAL encounters an error while processing the buffer, and the
+     * buffer is not filled, the buffer's status field must be set to ERROR. If
+     * the HAL did not wait on the acquire fence before encountering the error,
+     * the acquire fence must be copied into the release fence, to allow the
+     * framework to wait on the fence before reusing the buffer.
+     *
+     * The acquire fence must be set to -1 for all output buffers.
+     *
+     * This vector may be empty; if so, at least one other processCaptureResult
+     * call must be made (or have been made) by the HAL to provide the filled
+     * output buffers.
+     *
+     * When processCaptureResult is called with a new buffer for a frame,
+     * all previous frames' buffers for that corresponding stream must have been
+     * already delivered (the fences need not have yet been signaled).
+     *
+     * Gralloc buffers for a frame may be sent to framework before the
+     * corresponding SHUTTER-notify.
+     *
+     * Performance considerations:
+     *
+     * Buffers delivered to the framework are not dispatched to the
+     * application layer until a start of exposure timestamp has been received
+     * via a SHUTTER notify() call. It is highly recommended to
+     * dispatch that call as early as possible.
+     */
+    vec<StreamBuffer> outputBuffers;
+
+    /**
+     * The handle for the input stream buffer for this capture. It may not
+     * yet be consumed at the time the HAL calls processCaptureResult(); the
+     * framework must wait on the release sync fence provided by the HAL before
+     * reusing the buffer.
+     *
+     * The HAL must handle the sync fences the same way they are done for
+     * outputBuffers.
+     *
+     * Only one input buffer is allowed to be sent per request. Similarly to
+     * output buffers, the ordering of returned input buffers must be
+     * maintained by the HAL.
+     *
+     * Performance considerations:
+     *
+     * The input buffer should be returned as early as possible. If the HAL
+     * supports sync fences, it can call processCaptureResult to hand it back
+     * with sync fences being set appropriately. If the sync fences are not
+     * supported, the buffer can only be returned when it is consumed, which
+     * may take long time; the HAL may choose to copy this input buffer to make
+     * the buffer return sooner.
+     */
+    StreamBuffer inputBuffer;
+
+    /**
+     * In order to take advantage of partial results, the HAL must set the
+     * static metadata android.request.partialResultCount to the number of
+     * partial results it sends for each frame.
+     *
+     * Each new capture result with a partial result must set
+     * this field to a distinct inclusive value between
+     * 1 and android.request.partialResultCount.
+     *
+     * HALs not wishing to take advantage of this feature must not
+     * set an android.request.partialResultCount or partial_result to a value
+     * other than 1.
+     *
+     * This value must be set to 0 when a capture result contains buffers only
+     * and no metadata.
+     */
+    uint32_t partialResult;
+
+};