Merge "SnapdragonCamera: ZSL and manual 3A can be on at same time" into camera.lnx.1.0-dev.1.0
diff --git a/AndroidManifest.xml b/AndroidManifest.xml
index e44f9cc..1b8a88d 100755
--- a/AndroidManifest.xml
+++ b/AndroidManifest.xml
@@ -67,6 +67,7 @@
<activity
android:name="com.android.camera.PermissionsActivity"
android:label="@string/app_name"
+ android:launchMode="singleTop"
android:configChanges="orientation|screenSize|keyboardHidden"
android:parentActivityName="com.android.camera.CameraActivity" >
<meta-data
@@ -75,13 +76,14 @@
</activity>
<activity
- android:theme="@style/Theme.OneUISettings"
+ android:theme="@style/Theme.Settings"
android:name="com.android.camera.SettingsActivity"
android:clearTaskOnLaunch="true"
android:configChanges="orientation|screenSize|keyboardHidden"
android:icon="@mipmap/ic_launcher_camera"
android:label="@string/snapcam_app_name"
- android:launchMode="singleTop" >
+ android:launchMode="singleTop"
+ android:parentActivityName="com.android.camera.CameraActivity">
</activity>
<activity
diff --git a/res/layout/photo_module.xml b/res/layout/photo_module.xml
index c55433b..d65dbd4 100755
--- a/res/layout/photo_module.xml
+++ b/res/layout/photo_module.xml
@@ -117,11 +117,12 @@
android:id="@+id/blur_degree_bar"
android:orientation="horizontal"
android:layout_gravity="bottom"
- android:paddingBottom="80dp"
+ android:paddingBottom="130dp"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="30dip"
android:layout_marginLeft="30dip"
- android:layout_marginRight="30dip" />
+ android:layout_marginRight="30dip"
+ android:visibility="gone"/>
</FrameLayout>
</merge>
diff --git a/res/layout/preferences_category.xml b/res/layout/preferences_category.xml
index 3dae03b..e002c4e 100644
--- a/res/layout/preferences_category.xml
+++ b/res/layout/preferences_category.xml
@@ -33,7 +33,7 @@
android:gravity="center_vertical"
android:paddingLeft="15dp"
android:paddingTop="15dp"
- android:textAllCaps="true"
+ android:textAllCaps="false"
android:textColor="#ff999999"
android:textSize="14sp"
android:textStyle="bold" />
diff --git a/res/values/qcomarrays.xml b/res/values/qcomarrays.xml
index 02b4fbe..c11056d 100755
--- a/res/values/qcomarrays.xml
+++ b/res/values/qcomarrays.xml
@@ -910,5 +910,34 @@
<item>90</item>
<item>100</item>
</string-array>
+ <!-- Camera Preferences zoom dialog box entries -->
+ <string-array name="pref_camera_zoom_entries" translatable="false">
+ <item>@string/pref_camera_zoom_entry_off</item>
+ <item>@string/pref_camera_zoom_entry_1x</item>
+ <item>@string/pref_camera_zoom_entry_2x</item>
+ <item>@string/pref_camera_zoom_entry_3x</item>
+ <item>@string/pref_camera_zoom_entry_4x</item>
+ <item>@string/pref_camera_zoom_entry_5x</item>
+ <item>@string/pref_camera_zoom_entry_6x</item>
+ <item>@string/pref_camera_zoom_entry_7x</item>
+ <item>@string/pref_camera_zoom_entry_8x</item>
+ <item>@string/pref_camera_zoom_entry_9x</item>
+ <item>@string/pref_camera_zoom_entry_10x</item>
+ </string-array>
+
+ <string-array name="pref_camera_zoom_entryvalues" translatable="false">
+ <item>@string/pref_camera_zoom_default</item>
+ <item>1</item>
+ <item>2</item>
+ <item>3</item>
+ <item>4</item>
+ <item>5</item>
+ <item>6</item>
+ <item>7</item>
+ <item>8</item>
+ <item>9</item>
+ <item>10</item>
+ </string-array>
+
</resources>
diff --git a/res/values/qcomstrings.xml b/res/values/qcomstrings.xml
index 1693a88..5dcd4b5 100755
--- a/res/values/qcomstrings.xml
+++ b/res/values/qcomstrings.xml
@@ -1147,5 +1147,20 @@
<string name="pref_camera_bokeh_blur_degree_default" translatable="false">50</string>
<string name="pref_camera_bokeh_blur_degree_title" translatable="true">Bokeh Blur Value</string>
+ <!-- Default Zoom setting. -->
+ <string name="pref_camera_zoom_default" translatable="false">0</string>
+ <string name="pref_camera_zoom_title">Zoom</string>
+ <!-- Settings menu, Zoom choices -->
+ <string name="pref_camera_zoom_entry_off">Off</string>
+ <string name="pref_camera_zoom_entry_1x">1x</string>
+ <string name="pref_camera_zoom_entry_2x">2x</string>
+ <string name="pref_camera_zoom_entry_3x">3x</string>
+ <string name="pref_camera_zoom_entry_4x">4x</string>
+ <string name="pref_camera_zoom_entry_5x">5x</string>
+ <string name="pref_camera_zoom_entry_6x">6x</string>
+ <string name="pref_camera_zoom_entry_7x">7x</string>
+ <string name="pref_camera_zoom_entry_8x">8x</string>
+ <string name="pref_camera_zoom_entry_9x">9x</string>
+ <string name="pref_camera_zoom_entry_10x">10x</string>
</resources>
diff --git a/res/values/strings.xml b/res/values/strings.xml
index 94c901f..0b23104 100644
--- a/res/values/strings.xml
+++ b/res/values/strings.xml
@@ -772,4 +772,6 @@
<string name="delete_all_best_dialog_positive_bt">Delete All</string>
<string name="overflow_best_item1">Save All</string>
<string name="overflow_best_item2">Delete All</string>
+
+ <string name="settings_title">Settings</string>
</resources>
diff --git a/res/values/styles.xml b/res/values/styles.xml
index a48d453..a65538c 100644
--- a/res/values/styles.xml
+++ b/res/values/styles.xml
@@ -22,6 +22,9 @@
<style name="Theme.OneUISettings" parent="@android:style/Theme.Material.Light.NoActionBar.Fullscreen">
<item name="android:colorAccent">#5999e1</item>
</style>
+ <style name="Theme.Settings" parent="@android:style/Theme.Material.Light">
+ <item name="android:colorAccent">#5999e1</item>
+ </style>
<style name="Theme.Camera" parent="Theme.CameraBase">
<item name="android:windowFullscreen">true</item>
diff --git a/res/xml/camera_preferences.xml b/res/xml/camera_preferences.xml
index ac723f9..bf9263c 100755
--- a/res/xml/camera_preferences.xml
+++ b/res/xml/camera_preferences.xml
@@ -397,9 +397,7 @@
<ListPreference
camera:key="pref_camera_bokeh_blur_degree_key"
camera:defaultValue="@string/pref_camera_bokeh_blur_degree_default"
- camera:title="@string/pref_camera_bokeh_blur_degree_title"
- camera:entries="@array/pref_camera_bokeh_blur_degree_entries"
- camera:entryValues="@array/pref_camera_bokeh_blur_degree_entry_values" />
+ camera:title="@string/pref_camera_bokeh_blur_degree_title"/>
<ListPreference
@@ -408,4 +406,12 @@
camera:title="@string/pref_camera2_camera2_title"
camera:entries="@array/pref_camera2_camera2_entries"
camera:entryValues="@array/pref_camera2_camera2_entryvalues" />
+
+ <ListPreference
+ camera:key="pref_camera_zoom_key"
+ camera:defaultValue="@string/pref_camera_zoom_default"
+ camera:title="@string/pref_camera_zoom_title"
+ camera:entries="@array/pref_camera_zoom_entries"
+ camera:entryValues="@array/pref_camera_zoom_entryvalues" />
+
</PreferenceGroup>
diff --git a/res/xml/capture_preferences.xml b/res/xml/capture_preferences.xml
old mode 100644
new mode 100755
index 8dd3eb7..a92f849
--- a/res/xml/capture_preferences.xml
+++ b/res/xml/capture_preferences.xml
@@ -330,6 +330,13 @@
camera:title="@string/pref_camera2_histogram_title" />
<ListPreference
+ camera:defaultValue="@string/pref_camera2_auto_hdr_default"
+ camera:entries="@array/pref_camera2_auto_hdr_entries"
+ camera:entryValues="@array/pref_camera2_auto_hdr_entryvalues"
+ camera:key="pref_camera2_auto_hdr_key"
+ camera:title="@string/pref_camera2_auto_hdr_title" />
+
+ <ListPreference
camera:defaultValue="@string/pref_camera2_hdr_default"
camera:entries="@array/pref_camera2_hdr_entries"
camera:entryValues="@array/pref_camera2_hdr_entryvalues"
@@ -342,4 +349,11 @@
camera:title="@string/pref_camera2_saveraw_title"
camera:entries="@array/pref_camera2_saveraw_entries"
camera:entryValues="@array/pref_camera2_saveraw_entryvalues" />
+
+ <ListPreference
+ camera:key="pref_camera2_zoom_key"
+ camera:defaultValue="@string/pref_camera_zoom_default"
+ camera:title="@string/pref_camera_zoom_title"
+ camera:entries="@array/pref_camera_zoom_entries"
+ camera:entryValues="@array/pref_camera_zoom_entryvalues" />
</PreferenceGroup>
diff --git a/res/xml/setting_menu_preferences.xml b/res/xml/setting_menu_preferences.xml
index d781310..3c31124 100644
--- a/res/xml/setting_menu_preferences.xml
+++ b/res/xml/setting_menu_preferences.xml
@@ -338,5 +338,14 @@
android:layout="@layout/preference"
android:summary="%s"
android:title="@string/pref_camera2_saveraw_title" />
+
+ <ListPreference
+ android:key="pref_camera2_zoom_key"
+ android:defaultValue="@string/pref_camera_zoom_default"
+ android:title="@string/pref_camera_zoom_title"
+ android:summary="%s"
+ android:entries="@array/pref_camera_zoom_entries"
+ android:entryValues="@array/pref_camera_zoom_entryvalues" />
+
</PreferenceCategory>
</PreferenceScreen>
diff --git a/res/xml/video_preferences.xml b/res/xml/video_preferences.xml
index 2d9e2fb..3fafddd 100644
--- a/res/xml/video_preferences.xml
+++ b/res/xml/video_preferences.xml
@@ -186,4 +186,10 @@
camera:title="@string/pref_camera_video_rotation_title"
camera:entries="@array/pref_camera_video_rotation_entries"
camera:entryValues="@array/pref_camera_video_rotation_entryvalues" />
+ <ListPreference
+ camera:key="pref_camera_zoom_key"
+ camera:defaultValue="@string/pref_camera_zoom_default"
+ camera:title="@string/pref_camera_zoom_title"
+ camera:entries="@array/pref_camera_zoom_entries"
+ camera:entryValues="@array/pref_camera_zoom_entryvalues" />
</PreferenceGroup>
diff --git a/src/com/android/camera/CameraActivity.java b/src/com/android/camera/CameraActivity.java
index 32d499a..ea4bc03 100755
--- a/src/com/android/camera/CameraActivity.java
+++ b/src/com/android/camera/CameraActivity.java
@@ -765,7 +765,7 @@
if (mThumbnail == null) return;
if (mThumbnailDrawable != null) {
mThumbnail.setImageDrawable(mThumbnailDrawable);
- if (!isSecureCamera()) {
+ if (!isSecureCamera() && !isCaptureIntent()) {
mThumbnail.setVisibility(View.VISIBLE);
} else {
//under SecureCamera and UbiFocus mode, when back from RefocusActivity,if not save
@@ -1432,7 +1432,7 @@
}
}
- private boolean isCaptureIntent() {
+ public boolean isCaptureIntent() {
if (MediaStore.ACTION_VIDEO_CAPTURE.equals(getIntent().getAction())
|| MediaStore.ACTION_IMAGE_CAPTURE.equals(getIntent().getAction())
|| MediaStore.ACTION_IMAGE_CAPTURE_SECURE.equals(getIntent().getAction())) {
@@ -1774,6 +1774,7 @@
if(!mSecureCamera && (!isRequestShown || !hasCriticalPermissions())) {
Log.v(TAG, "Start Request Permission");
Intent intent = new Intent(this, PermissionsActivity.class);
+ intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP);
startActivity(intent);
SharedPreferences.Editor editor = prefs.edit();
editor.putBoolean(CameraSettings.KEY_REQUEST_PERMISSION, true);
diff --git a/src/com/android/camera/CameraSettings.java b/src/com/android/camera/CameraSettings.java
old mode 100755
new mode 100644
index 4bd88f7..2a81228
--- a/src/com/android/camera/CameraSettings.java
+++ b/src/com/android/camera/CameraSettings.java
@@ -127,6 +127,7 @@
public static final String KEY_LONGSHOT = "pref_camera_longshot_key";
public static final String KEY_INSTANT_CAPTURE = "pref_camera_instant_capture_key";
+ public static final String KEY_ZOOM = "pref_camera_zoom_key";
public static final String KEY_BOKEH_MODE = "pref_camera_bokeh_mode_key";
public static final String KEY_BOKEH_MPO = "pref_camera_bokeh_mpo_key";
@@ -662,6 +663,18 @@
}
+ private static List<String> getSupportedZoomLevel(Parameters params) {
+ ArrayList<String> supported = new ArrayList<String>();
+ int zoomMaxIdx = params.getMaxZoom();
+ List <Integer> zoomRatios = params.getZoomRatios();
+ int zoomMax = zoomRatios.get(zoomMaxIdx)/100;
+
+ for (int zoomLevel = 0; zoomLevel <= zoomMax; zoomLevel++) {
+ supported.add(String.valueOf(zoomLevel));
+ }
+ return supported;
+ }
+
private void qcomInitPreferences(PreferenceGroup group){
//Qcom Preference add here
ListPreference powerMode = group.findPreference(KEY_POWER_MODE);
@@ -702,6 +715,8 @@
ListPreference bokehMode = group.findPreference(KEY_BOKEH_MODE);
ListPreference bokehMpo = group.findPreference(KEY_BOKEH_MPO);
ListPreference bokehBlurDegree = group.findPreference(KEY_BOKEH_BLUR_VALUE);
+ ListPreference zoomLevel = group.findPreference(KEY_ZOOM);
+
if (instantCapture != null) {
if (!isInstantCaptureSupported(mParameters)) {
@@ -855,6 +870,11 @@
filterUnsupportedOptions(group,
manualExposure, getSupportedManualExposureModes(mParameters));
}
+
+ if (zoomLevel != null) {
+ filterUnsupportedOptions(group,
+ zoomLevel, getSupportedZoomLevel(mParameters));
+ }
}
private void initPreference(PreferenceGroup group) {
diff --git a/src/com/android/camera/CaptureModule.java b/src/com/android/camera/CaptureModule.java
index 0a8da71..7c3e9b4 100644
--- a/src/com/android/camera/CaptureModule.java
+++ b/src/com/android/camera/CaptureModule.java
@@ -147,7 +147,7 @@
public static final int INTENT_MODE_CAPTURE_SECURE = 3;
private static final int BACK_MODE = 0;
private static final int FRONT_MODE = 1;
- private static final int CANCEL_TOUCH_FOCUS_DELAY = 3000;
+ private static final int CANCEL_TOUCH_FOCUS_DELAY = 5000;
private static final int OPEN_CAMERA = 0;
private static final int CANCEL_TOUCH_FOCUS = 1;
private static final int MAX_NUM_CAM = 3;
@@ -239,6 +239,8 @@
new CameraCharacteristics.Key<>("org.codeaurora.qcamera3.histogram.max_count", Integer.class);
public static CaptureResult.Key<int[]> histogramStats =
new CaptureResult.Key<>("org.codeaurora.qcamera3.histogram.stats", int[].class);
+ public static CameraCharacteristics.Key<Integer> isHdrScene =
+ new CameraCharacteristics.Key<>("org.codeaurora.qcamera3.stats.is_hdr_scene", Integer.class);
private boolean[] mTakingPicture = new boolean[MAX_NUM_CAM];
private int mControlAFMode = CameraMetadata.CONTROL_AF_MODE_CONTINUOUS_PICTURE;
private int mLastResultAFState = -1;
@@ -376,6 +378,8 @@
private Size mSupportedMaxPictureSize;
private Size mSupportedRawPictureSize;
+ private long mIsoExposureTime;
+ private int mIsoSensitivity;
private class SelfieThread extends Thread {
public void run() {
@@ -1583,6 +1587,7 @@
captureBuilder.set(CaptureRequest.JPEG_THUMBNAIL_SIZE, mVideoSnapshotThumbSize);
captureBuilder.set(CaptureRequest.JPEG_THUMBNAIL_QUALITY, (byte)80);
applyVideoSnapshot(captureBuilder, id);
+ applyZoom(captureBuilder, id);
captureBuilder.addTarget(mVideoSnapshotImageReader.getSurface());
@@ -2121,6 +2126,7 @@
@Override
public void onPauseBeforeSuper() {
+ cancelTouchFocus();
mPaused = true;
mToast = null;
mUI.onPause();
@@ -2167,10 +2173,24 @@
mState[i] = STATE_PREVIEW;
}
mLongshotActive = false;
- mZoomValue = 1.0f;
+ updateZoom();
updatePreviewSurfaceReadyState(false);
}
+ private void cancelTouchFocus() {
+ if (getCameraMode() == DUAL_MODE) {
+ if(mState[BAYER_ID] == STATE_WAITING_TOUCH_FOCUS) {
+ cancelTouchFocus(BAYER_ID);
+ } else if (mState[MONO_ID] == STATE_WAITING_TOUCH_FOCUS) {
+ cancelTouchFocus(MONO_ID);
+ }
+ } else {
+ if (mState[getMainCameraId()] == STATE_WAITING_TOUCH_FOCUS) {
+ cancelTouchFocus(getMainCameraId());
+ }
+ }
+ }
+
private ArrayList<Integer> getFrameProcFilterId() {
ArrayList<Integer> filters = new ArrayList<Integer>();
@@ -2991,6 +3011,16 @@
}
}
+ private void updateZoom() {
+ String zoomStr = mSettingsManager.getValue(SettingsManager.KEY_ZOOM);
+ int zoom = Integer.parseInt(zoomStr);
+ if ( zoom !=0 ) {
+ mZoomValue = (float)zoom;
+ }else{
+ mZoomValue = 1.0f;
+ }
+ }
+
private boolean startRecordingVideo(final int cameraId) {
if (null == mCameraDevice[cameraId]) {
return false;
@@ -3125,7 +3155,7 @@
mCurrentSession = cameraCaptureSession;
mCaptureSession[cameraId] = cameraCaptureSession;
try {
- setUpVideoCaptureRequestBuilder(mVideoRequestBuilder);
+ setUpVideoCaptureRequestBuilder(mVideoRequestBuilder, cameraId);
mCurrentSession.setRepeatingRequest(mVideoRequestBuilder.build(),
mCaptureCallback, mCameraHandler);
} catch (CameraAccessException e) {
@@ -3191,7 +3221,7 @@
}
}
- private void setUpVideoCaptureRequestBuilder(CaptureRequest.Builder builder) {
+ private void setUpVideoCaptureRequestBuilder(CaptureRequest.Builder builder,int cameraId) {
builder.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
builder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest
.CONTROL_AF_MODE_CONTINUOUS_VIDEO);
@@ -3201,6 +3231,7 @@
applyColorEffect(builder);
applyVideoFlash(builder);
applyFaceDetection(builder);
+ applyZoom(builder, cameraId);
}
private void updateVideoFlash() {
@@ -3999,8 +4030,16 @@
private void applySceneMode(CaptureRequest.Builder request) {
String value = mSettingsManager.getValue(SettingsManager.KEY_SCENE_MODE);
+ String autoHdr = mSettingsManager.getValue(SettingsManager.KEY_AUTO_HDR);
if (value == null) return;
int mode = Integer.parseInt(value);
+ if (autoHdr != null && "enable".equals(autoHdr) && "0".equals(value)) {
+ if (mSettingsManager.isHdrScene(getMainCameraId())) {
+ request.set(CaptureRequest.CONTROL_SCENE_MODE, CaptureRequest.CONTROL_SCENE_MODE_HDR);
+ request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_USE_SCENE_MODE);
+ }
+ return;
+ }
if(getPostProcFilterId(mode) != PostProcessor.FILTER_NONE || mCaptureHDRTestEnable) {
request.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
return;
@@ -4026,12 +4065,26 @@
String value = mSettingsManager.getValue(SettingsManager.KEY_ISO);
if (value == null) return;
if (value.equals("auto")) {
- request.set(SELECT_PRIORITY, null);
- request.set(ISO_EXP, null);
+ request.set(SELECT_PRIORITY, 0);
+ request.set(ISO_EXP, 0L);
+ if (request.get(CaptureRequest.SENSOR_EXPOSURE_TIME) == null) {
+ request.set(CaptureRequest.SENSOR_EXPOSURE_TIME, mIsoExposureTime);
+ }
+ if (request.get(CaptureRequest.SENSOR_SENSITIVITY) == null) {
+ request.set(CaptureRequest.SENSOR_SENSITIVITY, mIsoSensitivity);
+ }
} else {
- long intValue = Integer.parseInt(value);
+ long intValue = SettingsManager.KEY_ISO_INDEX.get(value);
request.set(SELECT_PRIORITY, 0);
request.set(ISO_EXP, intValue);
+ if (request.get(CaptureRequest.SENSOR_EXPOSURE_TIME) != null) {
+ mIsoExposureTime = request.get(CaptureRequest.SENSOR_EXPOSURE_TIME);
+ }
+ if (request.get(CaptureRequest.SENSOR_SENSITIVITY) != null) {
+ mIsoSensitivity = request.get(CaptureRequest.SENSOR_SENSITIVITY);
+ }
+ request.set(CaptureRequest.SENSOR_EXPOSURE_TIME, null);
+ request.set(CaptureRequest.SENSOR_SENSITIVITY, null);
}
}
@@ -4274,6 +4327,7 @@
updateVideoFlash();
return;
case SettingsManager.KEY_FLASH_MODE:
+ case SettingsManager.KEY_AUTO_HDR:
case SettingsManager.KEY_SAVERAW:
case SettingsManager.KEY_HDR:
if (count == 0) restartSession(false);
@@ -4431,8 +4485,14 @@
private Size getMaxPictureSizeLessThan4k() {
Size[] sizes = mSettingsManager.getSupportedOutputSize(getMainCameraId(), ImageFormat.JPEG);
+ float ratio = (float) mVideoSize.getWidth() / mVideoSize.getHeight();
for (Size size : sizes) {
- if (!is4kSize(size)) return size;
+ if (!is4kSize(size)) {
+ float pictureRatio = (float) size.getWidth() / size.getHeight();
+ if (Math.abs(pictureRatio - ratio) < 0.01) {
+ return size;
+ }
+ }
}
return sizes[sizes.length - 1];
}
diff --git a/src/com/android/camera/CaptureUI.java b/src/com/android/camera/CaptureUI.java
old mode 100644
new mode 100755
index 58ecdfb..025a17a
--- a/src/com/android/camera/CaptureUI.java
+++ b/src/com/android/camera/CaptureUI.java
@@ -488,6 +488,9 @@
mDecodeTaskForReview = new CaptureUI.DecodeImageForReview(jpegData, orientation, mirror);
mDecodeTaskForReview.execute();
if (getCurrentIntentMode() != CaptureModule.INTENT_MODE_NORMAL) {
+ if (mFilterMenuStatus == FILTER_MENU_ON) {
+ removeFilterMenu(false);
+ }
mPreviewLayout.setVisibility(View.VISIBLE);
CameraUtil.fadeIn(mReviewDoneButton);
CameraUtil.fadeIn(mReviewRetakeButton);
@@ -496,6 +499,9 @@
protected void showRecordVideoForReview(Bitmap preview) {
if (getCurrentIntentMode() != CaptureModule.INTENT_MODE_NORMAL) {
+ if (mFilterMenuStatus == FILTER_MENU_ON) {
+ removeFilterMenu(false);
+ }
mReviewImage.setImageBitmap(preview);
mPreviewLayout.setVisibility(View.VISIBLE);
mReviewPlayButton.setVisibility(View.VISIBLE);
@@ -890,13 +896,14 @@
public void showUIafterRecording() {
mCameraControls.setVideoMode(false);
- mSceneModeLabelRect.setVisibility(View.VISIBLE);
mFrontBackSwitcher.setVisibility(View.VISIBLE);
mFilterModeSwitcher.setVisibility(View.VISIBLE);
mSceneModeSwitcher.setVisibility(View.VISIBLE);
mMakeupButton.setVisibility(View.VISIBLE);
mIsVideoUI = false;
mPauseButton.setVisibility(View.INVISIBLE);
+ //exit recording mode needs to refresh scene mode label.
+ showSceneModeLabel();
}
public void addFilterMode() {
diff --git a/src/com/android/camera/PermissionsActivity.java b/src/com/android/camera/PermissionsActivity.java
old mode 100644
new mode 100755
index e8df5c5..91699c3
--- a/src/com/android/camera/PermissionsActivity.java
+++ b/src/com/android/camera/PermissionsActivity.java
@@ -44,11 +44,6 @@
super.onCreate(savedInstanceState);
mIntent = getIntent();
mIsReturnResult = false;
- }
-
- @Override
- protected void onResume() {
- super.onResume();
if (!mCriticalPermissionDenied && !mIsReturnResult) {
mNumPermissionsToRequest = 0;
checkPermissions();
@@ -56,7 +51,7 @@
mCriticalPermissionDenied = false;
}
}
-
+
private void checkPermissions() {
if (checkSelfPermission(Manifest.permission.CAMERA)
!= PackageManager.PERMISSION_GRANTED) {
diff --git a/src/com/android/camera/PhotoMenu.java b/src/com/android/camera/PhotoMenu.java
index 6e83f6d..f1ba938 100755
--- a/src/com/android/camera/PhotoMenu.java
+++ b/src/com/android/camera/PhotoMenu.java
@@ -239,7 +239,8 @@
CameraSettings.KEY_MANUAL_WB,
CameraSettings.KEY_MANUAL_FOCUS,
CameraSettings.KEY_SELFIE_MIRROR,
- CameraSettings.KEY_SHUTTER_SOUND
+ CameraSettings.KEY_SHUTTER_SOUND,
+ CameraSettings.KEY_ZOOM
};
initSwitchItem(CameraSettings.KEY_CAMERA_ID, mFrontBackSwitcher);
diff --git a/src/com/android/camera/PhotoModule.java b/src/com/android/camera/PhotoModule.java
old mode 100755
new mode 100644
index f3bec26..49cb099
--- a/src/com/android/camera/PhotoModule.java
+++ b/src/com/android/camera/PhotoModule.java
@@ -22,6 +22,7 @@
import android.content.ContentResolver;
import android.content.Context;
import android.content.Intent;
+import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
@@ -51,6 +52,7 @@
import android.os.Message;
import android.os.MessageQueue;
import android.os.SystemClock;
+import android.preference.PreferenceManager;
import android.provider.MediaStore;
import android.util.DisplayMetrics;
import android.util.Log;
@@ -222,6 +224,7 @@
private static final String PERSIST_CAPTURE_ANIMATION = "persist.camera.capture.animate";
private static final boolean PERSIST_SKIP_MEM_CHECK =
android.os.SystemProperties.getBoolean("persist.camera.perf.skip_memck", false);
+ private static final String PERSIST_ZZHDR_ENABLE="persist.camera.zzhdr.enable";
private static final int MINIMUM_BRIGHTNESS = 0;
private static final int MAXIMUM_BRIGHTNESS = 6;
@@ -360,6 +363,8 @@
private float[] mR = new float[16];
private int mHeading = -1;
+ private static final int MAX_ZOOM = 10;
+ private int[] mZoomIdxTbl = {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1};
// True if all the parameters needed to start preview is ready.
private boolean mCameraPreviewParamsReady = false;
@@ -1275,6 +1280,7 @@
if ( srcFile.renameTo(dstFile) ) {
Size s = mParameters.getPictureSize();
String pictureFormat = mParameters.get(KEY_PICTURE_FORMAT);
+ Log.d(TAG, "capture:" + title + "." + pictureFormat);
mActivity.getMediaSaveService().addImage(
null, title, date, mLocation, s.width, s.height,
0, null, mOnMediaSavedListener, mContentResolver, pictureFormat);
@@ -1287,9 +1293,7 @@
private byte[] flipJpeg(byte[] jpegData, int orientation, int jpegOrientation) {
Bitmap srcBitmap = BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length);
Matrix m = new Matrix();
- if(orientation == 270) {
- m.preScale(-1, 1);
- } else { //if it's 90
+ if(orientation == 270 || orientation == 90) {
// Judge whether the picture or phone is horizontal screen
if (jpegOrientation == 0 || jpegOrientation == 180) {
m.preScale(-1, 1);
@@ -1486,6 +1490,7 @@
exif.setTag(directionTag);
}
String mPictureFormat = mParameters.get(KEY_PICTURE_FORMAT);
+ Log.d(TAG, "capture:" + title + "." + mPictureFormat);
mActivity.getMediaSaveService().addImage(
jpegData, title, date, mLocation, width, height,
orientation, exif, mOnMediaSavedListener,
@@ -1576,17 +1581,16 @@
public void onStartTrackingTouch(SeekBar bar) {
}
public void onProgressChanged(SeekBar bar, int progress, boolean fromtouch) {
- if (mPreferenceGroup != null) {
- ListPreference blurValue = mPreferenceGroup.findPreference(
- CameraSettings.KEY_BOKEH_BLUR_VALUE);
- if (blurValue != null) {
- blurValue.setValue(""+progress);
- }
+ if (mParameters != null) {
+ mParameters.set(CameraSettings.KEY_QC_BOKEH_BLUR_VALUE, progress);
}
- mParameters.set(CameraSettings.KEY_QC_BOKEH_BLUR_VALUE, progress);
Log.d(TAG,"seekbar bokeh degree = "+ progress);
}
public void onStopTrackingTouch(SeekBar bar) {
+ final SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(mActivity);
+ SharedPreferences.Editor editor = prefs.edit();
+ editor.putInt(CameraSettings.KEY_BOKEH_BLUR_VALUE, bar.getProgress());
+ editor.apply();
}
};
@@ -1993,7 +1997,9 @@
colorEffect = mParameters.getColorEffect();
String defaultEffect = mActivity.getString(R.string.pref_camera_coloreffect_default);
if (CameraUtil.SCENE_MODE_HDR.equals(mSceneMode)) {
- disableLongShot = true;
+ if(SystemProperties.getInt(PERSIST_ZZHDR_ENABLE, 0) != 1) {
+ disableLongShot = true;
+ }
if (colorEffect != null & !colorEffect.equals(defaultEffect)) {
// Change the colorEffect to default(None effect) when HDR ON.
colorEffect = defaultEffect;
@@ -3725,6 +3731,9 @@
if(mManual3AEnabled != 0) {
mManual3AEnabled = 0;
}
+ final SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(mActivity);
+ final int degree = prefs.getInt(CameraSettings.KEY_BOKEH_BLUR_VALUE,50);
+ bokehBlurDegree = String.valueOf(degree);
mActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
@@ -3736,14 +3745,11 @@
mUI.overrideSettings(CameraSettings.KEY_LONGSHOT,
mActivity.getString(R.string.pref_camera_longshot_default));
mBlurDegreeProgressBar.setVisibility(View.VISIBLE);
- mBlurDegreeProgressBar.setProgress(50);
+ mBlurDegreeProgressBar.setProgress(degree);
}
});
- mParameters.set(CameraSettings.KEY_QC_BOKEH_MODE, bokehMode);
- mParameters.set(CameraSettings.KEY_QC_BOKEH_MPO_MODE, bokehMpo);
- mParameters.set(CameraSettings.KEY_QC_BOKEH_BLUR_VALUE, bokehBlurDegree);
-
} else {
+ bokehBlurDegree = "0";
mActivity.runOnUiThread(new Runnable() {
@Override
public void run() {
@@ -3755,6 +3761,9 @@
}
});
}
+ mParameters.set(CameraSettings.KEY_QC_BOKEH_MODE, bokehMode);
+ mParameters.set(CameraSettings.KEY_QC_BOKEH_MPO_MODE, bokehMpo);
+ mParameters.set(CameraSettings.KEY_QC_BOKEH_BLUR_VALUE, bokehBlurDegree);
Log.v(TAG, "Bokeh Mode = " + bokehMode + " bokehMpo = " + bokehMpo +
" bokehBlurDegree = " + bokehBlurDegree);
}
@@ -3846,6 +3855,61 @@
mParameters.setMeteringAreas(mFocusManager.getMeteringAreas());
}
}
+ private void setZoomMenuValue() {
+ String zoomMenuValue = mPreferences.getString(CameraSettings.KEY_ZOOM,
+ mActivity.getString(R.string.pref_camera_zoom_default));
+ if (!zoomMenuValue.equals("0")) {
+ int zoomValue = Integer.parseInt(zoomMenuValue);
+ if (mZoomIdxTbl[0] == -1) {
+ /* update the index table once */
+ Log.d(TAG, "Update the zoom index table.");
+ List<Integer> zoomRatios = mParameters.getZoomRatios();
+ int lastZoomIdx = 0;
+ for (int zoom = 1; zoom <= MAX_ZOOM; zoom++) {
+ int zoomIdx = zoomRatios.indexOf(zoom*100);
+ if (zoomIdx == -1) {
+ Log.d(TAG, "Can't find matching zoom value "+zoom);
+ int nextZoom = 0;
+ while ((++lastZoomIdx < zoomRatios.size()) &&
+ (nextZoom < (zoom*100))){
+ nextZoom = zoomRatios.get(lastZoomIdx);
+ zoomIdx = lastZoomIdx;
+ }
+ if (lastZoomIdx < zoomRatios.size()) {
+ zoomIdx = lastZoomIdx - 1;
+ } else {
+ break;
+ }
+ }
+ mZoomIdxTbl[zoom-1] = zoomIdx;
+ lastZoomIdx = zoomIdx;
+ }
+ }
+
+ if ((zoomValue <= mZoomIdxTbl.length) &&
+ (mZoomIdxTbl[zoomValue-1] != -1)) {
+ int step = 1;
+ int cur_zoom = mParameters.getZoom();
+ Log.d(TAG, "zoom index = "+mZoomIdxTbl[zoomValue-1]+", cur index = "+cur_zoom);
+ if (cur_zoom > mZoomIdxTbl[zoomValue-1]) {
+ step = -1;
+ }
+
+ /* move zoom slowly */
+ while (cur_zoom != mZoomIdxTbl[zoomValue-1]) {
+ cur_zoom += step;
+ mParameters.setZoom(cur_zoom);
+ try {
+ Thread.sleep(25);
+ } catch(InterruptedException e) {
+ }
+ }
+ mParameters.setZoom(mZoomIdxTbl[zoomValue-1]);
+ } else {
+ Log.e(TAG, "Zoom value "+zoomValue+" is not supported!");
+ }
+ }
+ }
/** This can run on a background thread, so don't do UI updates here.*/
private boolean updateCameraParametersPreference() {
@@ -4132,6 +4196,8 @@
mParameters.set(CameraSettings.KEY_TS_MAKEUP_PARAM_CLEAN, makeupCleanValue);
}
+ setZoomMenuValue();
+
//QCom related parameters updated here.
qcomUpdateCameraParametersPreference();
return doGcamModeSwitch;
diff --git a/src/com/android/camera/PhotoUI.java b/src/com/android/camera/PhotoUI.java
old mode 100644
new mode 100755
index 733868a..84a6f90
--- a/src/com/android/camera/PhotoUI.java
+++ b/src/com/android/camera/PhotoUI.java
@@ -559,7 +559,7 @@
}
public void initializeControlByIntent() {
- if (!mActivity.isSecureCamera()) {
+ if (!mActivity.isSecureCamera() && !mActivity.isCaptureIntent()) {
mThumbnail = (ImageView) mRootView.findViewById(R.id.preview_thumb);
mThumbnail.setOnClickListener(new OnClickListener() {
@Override
@@ -1229,6 +1229,7 @@
mLocationDialog.dismiss();
}
mLocationDialog = null;
+ mMenu.animateSlideOutPreviewMenu();
}
public void initDisplayChangeListener() {
diff --git a/src/com/android/camera/SettingsActivity.java b/src/com/android/camera/SettingsActivity.java
index 0444fc8..d3f0b5a 100644
--- a/src/com/android/camera/SettingsActivity.java
+++ b/src/com/android/camera/SettingsActivity.java
@@ -29,6 +29,7 @@
package com.android.camera;
+import android.app.ActionBar;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.SharedPreferences;
@@ -108,6 +109,14 @@
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
+ int flag = WindowManager.LayoutParams.FLAG_FULLSCREEN;
+ Window window = getWindow();
+ window.setFlags(flag, flag);
+ ActionBar actionBar = getActionBar();
+ if (actionBar != null) {
+ actionBar.setDisplayHomeAsUpEnabled(true);
+ actionBar.setTitle(getResources().getString(R.string.settings_title));
+ }
final boolean isSecureCamera = getIntent().getBooleanExtra(
CameraUtil.KEY_IS_SECURE_CAMERA, false);
if (isSecureCamera) {
@@ -171,9 +180,14 @@
set.add(SettingsManager.KEY_MONO_ONLY);
set.add(SettingsManager.KEY_CLEARSIGHT);
- PreferenceScreen parent = getPreferenceScreen();
- PreferenceGroup developer = (PreferenceGroup)findPreference("developer");
- parent.removePreference(developer);
+ PreferenceGroup developer = (PreferenceGroup) findPreference("developer");
+ //Before restore settings,if current is not developer mode,the developer
+ // preferenceGroup has been removed when enter camera by default .So duplicate remove
+ // it will cause crash.
+ if (developer != null) {
+ PreferenceScreen parent = getPreferenceScreen();
+ parent.removePreference(developer);
+ }
}
CharSequence[] entries = mSettingsManager.getEntries(SettingsManager.KEY_SCENE_MODE);
@@ -203,6 +217,7 @@
updatePreference(SettingsManager.KEY_EXPOSURE);
updatePreference(SettingsManager.KEY_VIDEO_HIGH_FRAME_RATE);
updatePreference(SettingsManager.KEY_VIDEO_ENCODER);
+ updatePreference(SettingsManager.KEY_ZOOM);
Map<String, SettingsManager.Values> map = mSettingsManager.getValuesMap();
Set<Map.Entry<String, SettingsManager.Values>> set = map.entrySet();
diff --git a/src/com/android/camera/SettingsManager.java b/src/com/android/camera/SettingsManager.java
old mode 100644
new mode 100755
index 638b89f..2e207b9
--- a/src/com/android/camera/SettingsManager.java
+++ b/src/com/android/camera/SettingsManager.java
@@ -142,8 +142,12 @@
public static final String KEY_SATURATION_LEVEL = "pref_camera2_saturation_level_key";
public static final String KEY_ANTI_BANDING_LEVEL = "pref_camera2_anti_banding_level_key";
public static final String KEY_HISTOGRAM = "pref_camera2_histogram_key";
+ public static final String KEY_AUTO_HDR = "pref_camera2_auto_hdr_key";
public static final String KEY_HDR = "pref_camera2_hdr_key";
public static final String KEY_SAVERAW = "pref_camera2_saveraw_key";
+ public static final String KEY_ZOOM = "pref_camera2_zoom_key";
+
+ public static final HashMap<String, Integer> KEY_ISO_INDEX = new HashMap<String, Integer>();
private static final String TAG = "SnapCam_SettingsManager";
@@ -169,6 +173,18 @@
return mFilteredKeys;
}
+ static {
+ //ISO values vendor tag
+ KEY_ISO_INDEX.put("auto", 0);
+ KEY_ISO_INDEX.put("deblur", 1);
+ KEY_ISO_INDEX.put("100", 2);
+ KEY_ISO_INDEX.put("100", 2);
+ KEY_ISO_INDEX.put("200", 3);
+ KEY_ISO_INDEX.put("400", 4);
+ KEY_ISO_INDEX.put("800", 5);
+ KEY_ISO_INDEX.put("1600", 6);
+ }
+
private SettingsManager(Context context) {
mListeners = new ArrayList<>();
mCharacteristics = new ArrayList<>();
@@ -272,6 +288,7 @@
filterPreferences(cameraId);
initDependencyTable();
initializeValueMap();
+ filterChromaflashPictureSizeOptions();
}
private Size parseSize(String value) {
@@ -594,6 +611,7 @@
ListPreference antiBandingLevel = mPreferenceGroup.findPreference(KEY_ANTI_BANDING_LEVEL);
ListPreference histogram = mPreferenceGroup.findPreference(KEY_HISTOGRAM);
ListPreference hdr = mPreferenceGroup.findPreference(KEY_HDR);
+ ListPreference zoom = mPreferenceGroup.findPreference(KEY_ZOOM);
if (whiteBalance != null) {
if (filterUnsupportedOptions(whiteBalance, getSupportedWhiteBalanceModes(cameraId))) {
@@ -731,6 +749,13 @@
removePreference(mPreferenceGroup, KEY_SELFIE_FLASH);
removePreference(mPreferenceGroup, KEY_SELFIEMIRROR);
}
+
+ if ( zoom != null ) {
+ if (filterUnsupportedOptions(zoom,
+ getSupportedZoomLevel(cameraId))) {
+ mFilteredKeys.add(zoom.getKey());
+ }
+ }
}
private void runTimeUpdateDependencyOptions(ListPreference pref) {
@@ -738,6 +763,8 @@
if (pref.getKey().equals(KEY_VIDEO_QUALITY)) {
filterHFROptions();
filterVideoEncoderOptions();
+ } else if (pref.getKey().equals(KEY_SCENE_MODE)) {
+ filterChromaflashPictureSizeOptions();
}
}
@@ -825,6 +852,31 @@
}
}
+ private void filterChromaflashPictureSizeOptions() {
+ String scene = getValue(SettingsManager.KEY_SCENE_MODE);
+ ListPreference picturePref = mPreferenceGroup.findPreference(KEY_PICTURE_SIZE);
+ if (picturePref == null) return;
+ picturePref.reloadInitialEntriesAndEntryValues();
+ if (Integer.parseInt(scene) == SCENE_MODE_CHROMAFLASH_INT) {
+ if (filterUnsupportedOptions(picturePref, getSupportedChromaFlashPictureSize())) {
+ mFilteredKeys.add(picturePref.getKey());
+ }
+ // if picture size is setted the CIF/QVGA, modify smallest supportted size .
+ Size pictureSize = parseSize(getValue(KEY_PICTURE_SIZE));
+ if (pictureSize.getWidth() <= 352 && pictureSize.getHeight() <= 288) {
+ CharSequence[] entryValues = picturePref.getEntryValues();
+ int size = entryValues.length;
+ CharSequence smallerSize = entryValues[size -1];
+ setValue(KEY_PICTURE_SIZE, smallerSize.toString());
+ }
+ } else {
+ if (filterUnsupportedOptions(picturePref, getSupportedPictureSize(
+ getCurrentCameraId()))) {
+ mFilteredKeys.add(picturePref.getKey());
+ }
+ }
+ }
+
private void filterHFROptions() {
ListPreference hfrPref = mPreferenceGroup.findPreference(KEY_VIDEO_HIGH_FRAME_RATE);
if (hfrPref != null) {
@@ -836,6 +888,31 @@
}
}
+ private List<String> getSupportedChromaFlashPictureSize() {
+ StreamConfigurationMap map = mCharacteristics.get(getCurrentCameraId()).get(
+ CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
+ Size[] sizes = map.getOutputSizes(ImageFormat.JPEG);
+ List<String> res = new ArrayList<>();
+ if (sizes != null) {
+ for (int i = 0; i < sizes.length; i++) {
+ if (sizes[i].getWidth() > 352 && sizes[i].getHeight() > 288) {
+ res.add(sizes[i].toString());
+ }
+ }
+ }
+
+ Size[] highResSizes = map.getHighResolutionOutputSizes(ImageFormat.JPEG);
+ if (highResSizes != null) {
+ for (int i = 0; i < highResSizes.length; i++) {
+ if (sizes[i].getWidth() > 352 && sizes[i].getHeight() > 288) {
+ res.add(highResSizes[i].toString());
+ }
+ }
+ }
+
+ return res;
+ }
+
private List<String> getSupportedHighFrameRate() {
ArrayList<String> supported = new ArrayList<String>();
supported.add("off");
@@ -944,6 +1021,12 @@
return maxAfRegions != null && maxAfRegions > 0;
}
+ public boolean isHdrScene(int id) {
+ Integer hdrScene = mCharacteristics.get(id).get(
+ CaptureModule.isHdrScene);
+ return hdrScene != null && hdrScene == 1;
+ }
+
public boolean isFixedFocus(int id) {
Float focusDistance = mCharacteristics.get(id).get(CameraCharacteristics
.LENS_INFO_MINIMUM_FOCUS_DISTANCE);
@@ -1239,6 +1322,16 @@
return modes;
}
+ private List<String> getSupportedZoomLevel(int cameraId) {
+ float maxZoom = mCharacteristics.get(cameraId).get(CameraCharacteristics
+ .SCALER_AVAILABLE_MAX_DIGITAL_ZOOM);
+ ArrayList<String> supported = new ArrayList<String>();
+ for (int zoomLevel = 0; zoomLevel <= maxZoom; zoomLevel++) {
+ supported.add(String.valueOf(zoomLevel));
+ }
+ return supported;
+ }
+
private void resetIfInvalid(ListPreference pref) {
// Set the value to the first entry if it is invalid.
String value = pref.getValue();
diff --git a/src/com/android/camera/VideoMenu.java b/src/com/android/camera/VideoMenu.java
old mode 100644
new mode 100755
index ea4415f..472630b
--- a/src/com/android/camera/VideoMenu.java
+++ b/src/com/android/camera/VideoMenu.java
@@ -140,7 +140,8 @@
CameraSettings.KEY_VIDEO_ROTATION,
CameraSettings.KEY_VIDEO_CDS_MODE,
CameraSettings.KEY_VIDEO_TNR_MODE,
- CameraSettings.KEY_VIDEO_SNAPSHOT_SIZE
+ CameraSettings.KEY_VIDEO_SNAPSHOT_SIZE,
+ CameraSettings.KEY_ZOOM
};
initSwitchItem(CameraSettings.KEY_CAMERA_ID, mFrontBackSwitcher);
}
@@ -776,21 +777,25 @@
.getInt("persist.camcorder.eis.maxfps", 30);
ListPreference hfrPref = mPreferenceGroup
.findPreference(CameraSettings.KEY_VIDEO_HIGH_FRAME_RATE);
- if (hfrPref != null) {
- String highFrameRate = hfrPref.getValue();
- boolean isHFR = "hfr".equals(highFrameRate.substring(0,3));
- boolean isHSR = "hsr".equals(highFrameRate.substring(0,3));
- int rate = 0;
- if ( isHFR || isHSR ) {
- String hfrRate = highFrameRate.substring(3);
- rate = Integer.parseInt(hfrRate);
- }
+ String highFrameRate;
+ if (hfrPref == null) {
+ //If hfrPref is null, use whitespace instead.
+ highFrameRate = " ";
+ } else {
+ highFrameRate = hfrPref.getValue();
+ }
+ boolean isHFR = "hfr".equals(highFrameRate.substring(0,3));
+ boolean isHSR = "hsr".equals(highFrameRate.substring(0,3));
+ int rate = 0;
+ if ( isHFR || isHSR ) {
+ String hfrRate = highFrameRate.substring(3);
+ rate = Integer.parseInt(hfrRate);
+ }
- if ((disMode.equals("enable") && rate > PERSIST_EIS_MAX_FPS)
- || !videoHDR.equals("off")
- || timeLapseInterval != 0) {
- mListMenu.setPreferenceEnabled(CameraSettings.KEY_VIDEO_HIGH_FRAME_RATE, false);
- }
+ if ((disMode.equals("enable") && rate > PERSIST_EIS_MAX_FPS)
+ || !videoHDR.equals("off")
+ || timeLapseInterval != 0) {
+ mListMenu.setPreferenceEnabled(CameraSettings.KEY_VIDEO_HIGH_FRAME_RATE, false);
}
}
diff --git a/src/com/android/camera/VideoModule.java b/src/com/android/camera/VideoModule.java
index d5351e2..0f5fe32 100644
--- a/src/com/android/camera/VideoModule.java
+++ b/src/com/android/camera/VideoModule.java
@@ -216,6 +216,9 @@
private boolean mFaceDetectionEnabled = false;
private boolean mFaceDetectionStarted = false;
+ private static final int MAX_ZOOM = 10;
+ private int[] mZoomIdxTbl = {-1, -1, -1, -1, -1, -1, -1, -1, -1, -1};
+
private static final boolean PERSIST_4K_NO_LIMIT =
android.os.SystemProperties.getBoolean("persist.camcorder.4k.nolimit", false);
@@ -2269,11 +2272,69 @@
mIsFlipEnabled = false;
}
}
+ private void setZoomMenuValue() {
+ String zoomMenuValue = mPreferences.getString(CameraSettings.KEY_ZOOM,
+ mActivity.getString(R.string.pref_camera_zoom_default));
+ if (!zoomMenuValue.equals("0")) {
+ int zoomValue = Integer.parseInt(zoomMenuValue);
+ if (mZoomIdxTbl[0] == -1) {
+ /* update the index table once */
+ Log.d(TAG, "Update the zoom index table.");
+ List<Integer> zoomRatios = mParameters.getZoomRatios();
+ int lastZoomIdx = 0;
+ for (int zoom = 1; zoom <= MAX_ZOOM; zoom++) {
+ int zoomIdx = zoomRatios.indexOf(zoom*100);
+ if (zoomIdx == -1) {
+ Log.d(TAG, "Can't find matching zoom value "+zoom);
+ int nextZoom = 0;
+ while ((++lastZoomIdx < zoomRatios.size()) &&
+ (nextZoom < (zoom*100))){
+ nextZoom = zoomRatios.get(lastZoomIdx);
+ zoomIdx = lastZoomIdx;
+ }
+ if (lastZoomIdx < zoomRatios.size()) {
+ zoomIdx = lastZoomIdx - 1;
+ } else {
+ break;
+ }
+ }
+ mZoomIdxTbl[zoom-1] = zoomIdx;
+ lastZoomIdx = zoomIdx;
+ }
+ }
+
+ if ((zoomValue <= mZoomIdxTbl.length) &&
+ (mZoomIdxTbl[zoomValue-1] != -1)) {
+ int step = 1;
+ int cur_zoom = mParameters.getZoom();
+ Log.d(TAG, "zoom index = "+mZoomIdxTbl[zoomValue-1]+", cur index = "+cur_zoom);
+ if (cur_zoom > mZoomIdxTbl[zoomValue-1]) {
+ step = -1;
+ }
+
+ /* move zoom slowly */
+ while (cur_zoom != mZoomIdxTbl[zoomValue-1]) {
+ cur_zoom += step;
+ mParameters.setZoom(cur_zoom);
+ try {
+ Thread.sleep(25);
+ } catch(InterruptedException e) {
+ }
+ }
+
+ mParameters.setZoom(mZoomIdxTbl[zoomValue-1]);
+ } else {
+ Log.e(TAG, "Zoom value "+zoomValue+" is not supported!");
+ }
+ }
+ }
private void qcomSetCameraParameters(){
// add QCOM Parameters here
// Set color effect parameter.
Log.i(TAG,"NOTE: qcomSetCameraParameters " + videoWidth + " x " + videoHeight);
+
+ setZoomMenuValue();
String colorEffect = mPreferences.getString(
CameraSettings.KEY_COLOR_EFFECT,
mActivity.getString(R.string.pref_camera_coloreffect_default));
diff --git a/src/com/android/camera/imageprocessor/PostProcessor.java b/src/com/android/camera/imageprocessor/PostProcessor.java
old mode 100755
new mode 100644
index d1e57ca..ec38bb7
--- a/src/com/android/camera/imageprocessor/PostProcessor.java
+++ b/src/com/android/camera/imageprocessor/PostProcessor.java
@@ -120,9 +120,12 @@
private int mOrientation = 0;
private ImageWriter mImageWriter;
- private static boolean DEBUG_FILTER = false;
- private static boolean DEBUG_ZSL = false;
- private ImageFilter.ResultImage mDebugResultImage;
+ private static boolean DEBUG_DUMP_FILTER_IMG =
+ (PersistUtil.getCamera2Debug() == PersistUtil.CAMERA2_DEBUG_DUMP_IMAGE) ||
+ (PersistUtil.getCamera2Debug() == PersistUtil.CAMERA2_DEBUG_DUMP_ALL);
+ private static boolean DEBUG_ZSL =
+ (PersistUtil.getCamera2Debug() == PersistUtil.CAMERA2_DEBUG_DUMP_LOG) ||
+ (PersistUtil.getCamera2Debug() == PersistUtil.CAMERA2_DEBUG_DUMP_ALL);
private ZSLQueue mZSLQueue;
private CameraDevice mCameraDevice;
@@ -670,6 +673,8 @@
mSaveRaw = isSaveRaw;
if(setFilter(postFilterId) || isFlashModeOn || isTrackingFocusOn || isMakeupOn || isSelfieMirrorOn
|| PersistUtil.getCameraZSLDisabled()
+ || "enable".equals(
+ SettingsManager.getInstance().getValue(SettingsManager.KEY_AUTO_HDR))
|| SettingsManager.getInstance().isCamera2HDRSupport()
|| "18".equals(SettingsManager.getInstance().getValue(
SettingsManager.KEY_SCENE_MODE))
@@ -945,14 +950,7 @@
}
ByteBuffer yBuf = image.getPlanes()[0].getBuffer();
ByteBuffer vuBuf = image.getPlanes()[2].getBuffer();
- if(mFilter != null && DEBUG_FILTER && numImage == 0) {
- mDebugResultImage = new ImageFilter.ResultImage(ByteBuffer.allocateDirect(mStride * mHeight*3/2),
- new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStride);
- yBuf.get(mDebugResultImage.outBuffer.array(), 0, yBuf.remaining());
- vuBuf.get(mDebugResultImage.outBuffer.array(), mStride * mHeight, vuBuf.remaining());
- yBuf.rewind();
- vuBuf.rewind();
- }
+
if(mFilter == null) {
mDefaultResultImage = new ImageFilter.ResultImage(ByteBuffer.allocateDirect(mStride * mHeight*3/2),
new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStride);
@@ -960,8 +958,26 @@
vuBuf.get(mDefaultResultImage.outBuffer.array(), mStride*mHeight, vuBuf.remaining());
image.close();
} else {
- mFilter.addImage(image.getPlanes()[0].getBuffer(),
- image.getPlanes()[2].getBuffer(), numImage, null);
+ if (DEBUG_DUMP_FILTER_IMG) {
+ ImageFilter.ResultImage debugResultImage = new
+ ImageFilter.ResultImage(ByteBuffer.allocateDirect(
+ mStride * mHeight * 3 / 2), new Rect(0, 0, mWidth,
+ mHeight), mWidth, mHeight, mStride);
+ yBuf.get(debugResultImage.outBuffer.array(), 0, yBuf.remaining());
+ vuBuf.get(debugResultImage.outBuffer.array(), mStride * mHeight,
+ vuBuf.remaining());
+ yBuf.rewind();
+ vuBuf.rewind();
+
+ byte[] bytes = nv21ToJpeg(debugResultImage, mOrientation, null);
+ mActivity.getMediaSaveService().addImage(
+ bytes, "Debug_beforeApplyingFilter" + numImage, 0L, null,
+ debugResultImage.outRoi.width(),
+ debugResultImage.outRoi.height(),
+ mOrientation, null, mController.getMediaSavedListener(),
+ mActivity.getContentResolver(), "jpeg");
+ }
+ mFilter.addImage(yBuf, vuBuf, numImage, null);
mImages[numImage] = image;
}
}
@@ -1065,12 +1081,6 @@
) {
Log.d(TAG, "Result image is not valid.");
} else {
- if(mFilter != null && DEBUG_FILTER) {
- bytes = nv21ToJpeg(mDebugResultImage, mOrientation, null);
- mActivity.getMediaSaveService().addImage(
- bytes, title + "_beforeApplyingFilter", date, null, mDebugResultImage.outRoi.width(), mDebugResultImage.outRoi.height(),
- mOrientation, null, mediaSavedListener, contentResolver, "jpeg");
- }
bytes = nv21ToJpeg(resultImage, mOrientation, waitForMetaData(0));
if (mController.getCurrentIntentMode() ==
CaptureModule.INTENT_MODE_CAPTURE) {
diff --git a/src/com/android/camera/imageprocessor/ZSLQueue.java b/src/com/android/camera/imageprocessor/ZSLQueue.java
old mode 100755
new mode 100644
index b9dea4e..c9ecb79
--- a/src/com/android/camera/imageprocessor/ZSLQueue.java
+++ b/src/com/android/camera/imageprocessor/ZSLQueue.java
@@ -34,6 +34,7 @@
import android.util.Log;
import com.android.camera.CaptureModule;
+import com.android.camera.util.PersistUtil;
import android.os.SystemProperties;
import java.util.Iterator;
@@ -49,8 +50,9 @@
private int mMetaHead;
private Object mLock = new Object();
private CaptureModule mModule;
- private static final boolean DEBUG = false;
- private static final boolean DEBUG_QUEUE = false;
+ private static final boolean DEBUG_QUEUE =
+ (PersistUtil.getCamera2Debug() == PersistUtil.CAMERA2_DEBUG_DUMP_LOG) ||
+ (PersistUtil.getCamera2Debug() == PersistUtil.CAMERA2_DEBUG_DUMP_ALL);
private static final String TAG = "ZSLQueue";
public ZSLQueue(CaptureModule module) {
diff --git a/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java b/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java
index ec60db7..e6c597b 100644
--- a/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/BeautificationFilter.java
@@ -53,7 +53,6 @@
int mStrideY;
int mStrideVU;
private CaptureModule mModule;
- private static boolean DEBUG = false;
private static String TAG = "BeautificationFilter";
private static boolean mIsSupported = false;
private static int FACE_TIMEOUT_VALUE = 60; //in frame count
diff --git a/src/com/android/camera/imageprocessor/filter/BestpictureFilter.java b/src/com/android/camera/imageprocessor/filter/BestpictureFilter.java
old mode 100755
new mode 100644
index e2c784f..418bd03
--- a/src/com/android/camera/imageprocessor/filter/BestpictureFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/BestpictureFilter.java
@@ -38,8 +38,8 @@
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
import android.net.Uri;
-import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.util.Log;
@@ -66,8 +66,7 @@
private int mStrideY;
private int mStrideVU;
private static String TAG = "BestpictureFilter";
- private static final boolean DEBUG = false;
- private static boolean mIsSupported = true;
+ private static boolean mIsSupported = false;
private CaptureModule mModule;
private CameraActivity mActivity;
private int mOrientation = 0;
@@ -85,6 +84,7 @@
private boolean mIsOn = false;
private PostProcessor mProcessor;
private ProgressDialog mProgressDialog;
+ private ImageFilter.ResultImage mBestpictureResultImage;
private static void Log(String msg) {
if (DEBUG) {
@@ -148,13 +148,14 @@
mBY = bY;
mBVU = bVU;
+ byte[] bytes = getYUVBytes(bY, bVU, imageNum);
long captureStartTime = System.currentTimeMillis();
mNamedImages.nameNewImage(captureStartTime);
PhotoModule.NamedImages.NamedEntity name = mNamedImages.getNextNameEntity();
String title = (name == null) ? null : name.title;
long date = (name == null) ? -1 : name.date;
mActivity.getMediaSaveService().addImage(
- nv21ToJpeg(mBY, mBVU, new Rect(0, 0, mWidth, mHeight), mOrientation, 0), title, date, null, mWidth, mHeight,
+ bytes, title, date, null, mWidth, mHeight,
mOrientation, null, new MediaSaveService.OnMediaSavedListener() {
@Override
public void onMediaSaved(final Uri uri) {
@@ -182,8 +183,8 @@
}
, mActivity.getContentResolver(), "jpeg");
}
- ImageSaveTask t = new ImageSaveTask(bY, bVU, new Rect(0, 0, mWidth, mHeight), mOrientation, imageNum);
- t.execute();
+ byte[] bytes = getYUVBytes(bY, bVU, imageNum);
+ saveBestPicture(bytes, imageNum);
}
@Override
@@ -200,6 +201,26 @@
});
}
+ private byte[] getYUVBytes(final ByteBuffer yBuf, final ByteBuffer vuBuf,
+ final int imageNum) {
+ synchronized (mClosingLock) {
+ if (!mIsOn) {
+ return null;
+ }
+ mBestpictureResultImage = new ImageFilter.ResultImage(ByteBuffer.allocateDirect(
+ mStrideY * mHeight * 3 / 2),
+ new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStrideY);
+ yBuf.get(mBestpictureResultImage.outBuffer.array(), 0, yBuf.remaining());
+ vuBuf.get(mBestpictureResultImage.outBuffer.array(), mStrideY * mHeight,
+ vuBuf.remaining());
+ yBuf.rewind();
+ vuBuf.rewind();
+
+ return nv21ToJpeg(mBestpictureResultImage, mOrientation,
+ mProcessor.waitForMetaData(imageNum));
+ }
+ }
+
private void dismissProgressDialog() {
mActivity.runOnUiThread(new Runnable() {
public void run() {
@@ -256,20 +277,15 @@
return mIsSupported;
}
- private byte[] nv21ToJpeg(ByteBuffer bY, ByteBuffer bVU, Rect roi, int orientation, int imageIndex) {
- ByteBuffer buf = ByteBuffer.allocate(mStrideY*mHeight*3/2);
- buf.put(bY);
- bY.rewind();
- if(bVU != null) {
- buf.put(bVU);
- bVU.rewind();
- }
+ private byte[] nv21ToJpeg(ImageFilter.ResultImage resultImage, int orientation,
+ TotalCaptureResult result) {
BitmapOutputStream bos = new BitmapOutputStream(1024);
- YuvImage im = new YuvImage(buf.array(), ImageFormat.NV21,
- mWidth, mHeight, new int[]{mStrideY, mStrideVU});
- im.compressToJpeg(roi, mProcessor.getJpegQualityValue(), bos);
+ YuvImage im = new YuvImage(resultImage.outBuffer.array(), ImageFormat.NV21,
+ resultImage.width, resultImage.height, new int[]{resultImage.stride,
+ resultImage.stride});
+ im.compressToJpeg(resultImage.outRoi, mProcessor.getJpegQualityValue(), bos);
byte[] bytes = bos.getArray();
- bytes = PostProcessor.addExifTags(bytes, orientation, mProcessor.waitForMetaData(imageIndex));
+ bytes = PostProcessor.addExifTags(bytes, orientation, result);
return bytes;
}
@@ -283,53 +299,22 @@
}
}
- private class ImageSaveTask extends AsyncTask<Void, Void, byte[]> {
- ByteBuffer bY;
- ByteBuffer bVU;
- Rect roi;
- int orientation;
- int imageNum;
-
- public ImageSaveTask(ByteBuffer bY, ByteBuffer bVU, Rect roi, int orientation, int imageNum) {
- this.bY = bY;
- this.bVU = bVU;
- this.roi = roi;
- this.orientation = orientation;
- this.imageNum = imageNum;
+ private void saveBestPicture(byte[] bytes, int imageNum) {
+ if(bytes == null)
+ return;
+ String filesPath = mActivity.getFilesDir()+"/Bestpicture";
+ File file = new File(filesPath);
+ if(!file.exists()) {
+ file.mkdir();
}
-
- @Override
- protected void onPreExecute() {
+ file = new File(filesPath+"/"+NAMES[imageNum]);
+ try {
+ FileOutputStream out = new FileOutputStream(file);
+ out.write(bytes, 0, bytes.length);
+ out.close();
+ } catch (Exception e) {
}
-
- @Override
- protected byte[] doInBackground(Void... v) {
- synchronized (mClosingLock) {
- if (!mIsOn) {
- return null;
- }
- return nv21ToJpeg(bY, bVU, roi, orientation, imageNum);
- }
- }
-
- @Override
- protected void onPostExecute(byte[] bytes) {
- if(bytes == null)
- return;
- String filesPath = mActivity.getFilesDir()+"/Bestpicture";
- File file = new File(filesPath);
- if(!file.exists()) {
- file.mkdir();
- }
- file = new File(filesPath+"/"+NAMES[imageNum]);
- try {
- FileOutputStream out = new FileOutputStream(file);
- out.write(bytes, 0, bytes.length);
- out.close();
- } catch (Exception e) {
- }
- mSavedCount++;
- Log(imageNum+" image is saved");
- }
+ mSavedCount++;
+ Log(imageNum+" image is saved");
}
}
diff --git a/src/com/android/camera/imageprocessor/filter/BlurbusterFilter.java b/src/com/android/camera/imageprocessor/filter/BlurbusterFilter.java
index 2ac8fdc..4ec2fe3 100644
--- a/src/com/android/camera/imageprocessor/filter/BlurbusterFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/BlurbusterFilter.java
@@ -47,7 +47,6 @@
private int mStrideY;
private int mStrideVU;
private static String TAG = "BlurbusterFilter";
- private static final boolean DEBUG = false;
private static boolean mIsSupported = false;
private ByteBuffer mOutBuf;
private CaptureModule mModule;
diff --git a/src/com/android/camera/imageprocessor/filter/ChromaflashFilter.java b/src/com/android/camera/imageprocessor/filter/ChromaflashFilter.java
index e244c10..607908e 100644
--- a/src/com/android/camera/imageprocessor/filter/ChromaflashFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/ChromaflashFilter.java
@@ -50,7 +50,7 @@
private int mStrideY;
private int mStrideVU;
private static String TAG = "ChromaflashFilter";
- private static final boolean DEBUG = false;
+
private static boolean mIsSupported = false;
private ByteBuffer mOutBuf;
private CaptureModule mModule;
diff --git a/src/com/android/camera/imageprocessor/filter/ImageFilter.java b/src/com/android/camera/imageprocessor/filter/ImageFilter.java
index bb581c9..ccdac0e 100644
--- a/src/com/android/camera/imageprocessor/filter/ImageFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/ImageFilter.java
@@ -33,12 +33,17 @@
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CaptureRequest;
import android.os.Handler;
+import com.android.camera.util.PersistUtil;
import java.nio.ByteBuffer;
import java.util.List;
public interface ImageFilter {
+ public static final boolean DEBUG =
+ (PersistUtil.getCamera2Debug() == PersistUtil.CAMERA2_DEBUG_DUMP_LOG) ||
+ (PersistUtil.getCamera2Debug() == PersistUtil.CAMERA2_DEBUG_DUMP_ALL);
+
/* Return the number of required images to process*/
List<CaptureRequest> setRequiredImages(CaptureRequest.Builder builder);
diff --git a/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java b/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java
index 486ea7a..8136ea0 100644
--- a/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/OptizoomFilter.java
@@ -47,7 +47,6 @@
private int mStrideY;
private int mStrideVU;
private static String TAG = "OptizoomFilter";
- private static final boolean DEBUG = false;
private int temp;
private static boolean mIsSupported = true;
private ByteBuffer mOutBuf;
diff --git a/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java b/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java
index c51e13c..611949f 100644
--- a/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/SharpshooterFilter.java
@@ -51,7 +51,6 @@
private int mStrideY;
private int mStrideVU;
private static String TAG = "SharpshooterFilter";
- private static final boolean DEBUG = false;
private int temp;
private static boolean mIsSupported = true;
private ByteBuffer mOutBuf;
diff --git a/src/com/android/camera/imageprocessor/filter/StillmoreFilter.java b/src/com/android/camera/imageprocessor/filter/StillmoreFilter.java
index e5c2c03..2f483bf 100644
--- a/src/com/android/camera/imageprocessor/filter/StillmoreFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/StillmoreFilter.java
@@ -50,7 +50,6 @@
private int mStrideY;
private int mStrideVU;
private static String TAG = "StillmoreFilter";
- private static final boolean DEBUG = false;
private static boolean mIsSupported = false;
private ByteBuffer mOutBuf;
private CaptureModule mModule;
diff --git a/src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java b/src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java
index de7fdff..964b221 100644
--- a/src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java
+++ b/src/com/android/camera/imageprocessor/filter/TrackingFocusFrameListener.java
@@ -54,7 +54,6 @@
int mStrideY;
int mStrideVU;
private CaptureModule mModule;
- private static boolean DEBUG = false;
private static String TAG = "TrackingFocusFrameListener";
private static boolean mIsSupported = false;
private Rect imageRect;
diff --git a/src/com/android/camera/imageprocessor/filter/UbifocusFilter.java b/src/com/android/camera/imageprocessor/filter/UbifocusFilter.java
index f32832d..d29f29f 100644
--- a/src/com/android/camera/imageprocessor/filter/UbifocusFilter.java
+++ b/src/com/android/camera/imageprocessor/filter/UbifocusFilter.java
@@ -39,6 +39,7 @@
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
+import android.hardware.camera2.TotalCaptureResult;
import android.os.Handler;
import android.util.Log;
import android.util.Range;
@@ -63,7 +64,6 @@
private int mStrideY;
private int mStrideVU;
private static String TAG = "UbifocusFilter";
- private static final boolean DEBUG = false;
private static final int FOCUS_ADJUST_TIME_OUT = 400;
private static final int META_BYTES_SIZE = 25;
private int temp;
@@ -75,6 +75,7 @@
private float mMinFocusDistance = -1f;
private Object mClosingLock = new Object();
private PostProcessor mPostProcessor;
+ private ImageFilter.ResultImage mUbifocusResultImage;
final String[] NAMES = {"00.jpg", "01.jpg", "02.jpg", "03.jpg",
"04.jpg", "DepthMapImage.y", "AllFocusImage.jpg"};
@@ -147,7 +148,8 @@
if(mOutBuf == null) {
return;
}
- saveToPrivateFile(imageNum, nv21ToJpeg(bY, bVU, new Rect(0, 0, mWidth, mHeight), mOrientation, imageNum));
+ byte[] bytes = getYUVBytes(bY, bVU, imageNum);
+ saveToPrivateFile(imageNum, bytes);
mSavedCount++;
}
}
@@ -274,6 +276,38 @@
}
}
+ private byte[] getYUVBytes(final ByteBuffer yBuf, final ByteBuffer vuBuf,
+ final int imageNum) {
+ synchronized (mClosingLock) {
+ if (mOutBuf == null) {
+ return null;
+ }
+ mUbifocusResultImage = new ImageFilter.ResultImage(ByteBuffer.allocateDirect(
+ mStrideY * mHeight * 3 / 2),
+ new Rect(0, 0, mWidth, mHeight), mWidth, mHeight, mStrideY);
+ yBuf.get(mUbifocusResultImage.outBuffer.array(), 0, yBuf.remaining());
+ vuBuf.get(mUbifocusResultImage.outBuffer.array(), mStrideY * mHeight,
+ vuBuf.remaining());
+ yBuf.rewind();
+ vuBuf.rewind();
+
+ return nv21ToJpeg(mUbifocusResultImage, mOrientation,
+ mPostProcessor.waitForMetaData(imageNum));
+ }
+ }
+
+ private byte[] nv21ToJpeg(ImageFilter.ResultImage resultImage, int orientation,
+ TotalCaptureResult result) {
+ BitmapOutputStream bos = new BitmapOutputStream(1024);
+ YuvImage im = new YuvImage(resultImage.outBuffer.array(), ImageFormat.NV21,
+ resultImage.width, resultImage.height, new int[]{resultImage.stride,
+ resultImage.stride});
+ im.compressToJpeg(resultImage.outRoi, mPostProcessor.getJpegQualityValue(), bos);
+ byte[] bytes = bos.getArray();
+ bytes = PostProcessor.addExifTags(bytes, orientation, result);
+ return bytes;
+ }
+
private native int nativeInit(int width, int height, int yStride, int vuStride, int numImages);
private native int nativeDeinit();
private native int nativeAddImage(ByteBuffer yB, ByteBuffer vuB, int ySize, int vuSize, int imageNum);
diff --git a/src/com/android/camera/mpo/MpoOutputStream.java b/src/com/android/camera/mpo/MpoOutputStream.java
index 5d01d26..df45c48 100644
--- a/src/com/android/camera/mpo/MpoOutputStream.java
+++ b/src/com/android/camera/mpo/MpoOutputStream.java
@@ -32,10 +32,13 @@
import com.android.camera.exif.JpegHeader;
import com.android.camera.exif.OrderedDataOutputStream;
import com.android.camera.mpo.MpoTag.MpEntry;
+import com.android.camera.util.PersistUtil;
class MpoOutputStream extends FilterOutputStream {
private static final String TAG = "MpoOutputStream";
- private static final boolean DEBUG = true;
+ private static final boolean DEBUG =
+ (PersistUtil.getCamera2Debug() == PersistUtil.CAMERA2_DEBUG_DUMP_LOG) ||
+ (PersistUtil.getCamera2Debug() == PersistUtil.CAMERA2_DEBUG_DUMP_ALL);
private static final int STREAMBUFFER_SIZE = 0x00010000; // 64Kb
private static final int STATE_SOI = 0;
diff --git a/src/com/android/camera/ui/FaceView.java b/src/com/android/camera/ui/FaceView.java
index dfa84ec..9a310af 100644
--- a/src/com/android/camera/ui/FaceView.java
+++ b/src/com/android/camera/ui/FaceView.java
@@ -34,7 +34,7 @@
import com.android.camera.PhotoUI;
import com.android.camera.util.CameraUtil;
import org.codeaurora.snapcam.R;
-import org.codeaurora.camera.ExtendedFace;
+import org.codeaurora.snapcam.wrapper.ExtendedFaceWrapper;
public class FaceView extends View
implements FocusIndicator, Rotatable,
@@ -233,113 +233,113 @@
mRect.offset(dx, dy);
canvas.drawOval(mRect, mPaint);
- if (mFaces[i] instanceof ExtendedFace) {
- ExtendedFace face = (ExtendedFace)mFaces[i];
+ if (ExtendedFaceWrapper.isExtendedFaceInstance(mFaces[i])) {
+ ExtendedFaceWrapper wrapper = new ExtendedFaceWrapper(mFaces[i]);
float[] point = new float[4];
int delta_x = mFaces[i].rect.width() / 12;
int delta_y = mFaces[i].rect.height() / 12;
- Log.e(TAG, "blink: (" + face.getLeftEyeBlinkDegree()+ ", " +
- face.getRightEyeBlinkDegree() + ")");
- if (face.leftEye != null) {
+ Log.e(TAG, "blink: (" + wrapper.getLeftEyeBlinkDegree()+ ", " +
+ wrapper.getRightEyeBlinkDegree() + ")");
+ if (wrapper.mFace.leftEye != null) {
if ((mDisplayRotation == 0) ||
(mDisplayRotation == 180)) {
- point[0] = face.leftEye.x;
- point[1] = face.leftEye.y - delta_y / 2;
- point[2] = face.leftEye.x;
- point[3] = face.leftEye.y + delta_y / 2;
+ point[0] = wrapper.mFace.leftEye.x;
+ point[1] = wrapper.mFace.leftEye.y - delta_y / 2;
+ point[2] = wrapper.mFace.leftEye.x;
+ point[3] = wrapper.mFace.leftEye.y + delta_y / 2;
} else {
- point[0] = face.leftEye.x - delta_x / 2;
- point[1] = face.leftEye.y;
- point[2] = face.leftEye.x + delta_x / 2;
- point[3] = face.leftEye.y;
+ point[0] = wrapper.mFace.leftEye.x - delta_x / 2;
+ point[1] = wrapper.mFace.leftEye.y;
+ point[2] = wrapper.mFace.leftEye.x + delta_x / 2;
+ point[3] = wrapper.mFace.leftEye.y;
}
mMatrix.mapPoints (point);
- if (face.getLeftEyeBlinkDegree() >= blink_threshold) {
+ if (wrapper.getLeftEyeBlinkDegree() >= blink_threshold) {
canvas.drawLine(point[0]+ dx, point[1]+ dy,
point[2]+ dx, point[3]+ dy, mPaint);
}
}
- if (face.rightEye != null) {
+ if (wrapper.mFace.rightEye != null) {
if ((mDisplayRotation == 0) ||
(mDisplayRotation == 180)) {
- point[0] = face.rightEye.x;
- point[1] = face.rightEye.y - delta_y / 2;
- point[2] = face.rightEye.x;
- point[3] = face.rightEye.y + delta_y / 2;
+ point[0] = wrapper.mFace.rightEye.x;
+ point[1] = wrapper.mFace.rightEye.y - delta_y / 2;
+ point[2] = wrapper.mFace.rightEye.x;
+ point[3] = wrapper.mFace.rightEye.y + delta_y / 2;
} else {
- point[0] = face.rightEye.x - delta_x / 2;
- point[1] = face.rightEye.y;
- point[2] = face.rightEye.x + delta_x / 2;
- point[3] = face.rightEye.y;
+ point[0] = wrapper.mFace.rightEye.x - delta_x / 2;
+ point[1] = wrapper.mFace.rightEye.y;
+ point[2] = wrapper.mFace.rightEye.x + delta_x / 2;
+ point[3] = wrapper.mFace.rightEye.y;
}
mMatrix.mapPoints (point);
- if (face.getRightEyeBlinkDegree() >= blink_threshold) {
+ if (wrapper.getRightEyeBlinkDegree() >= blink_threshold) {
//Add offset to the points if the rect has an offset
canvas.drawLine(point[0] + dx, point[1] + dy,
point[2] +dx, point[3] +dy, mPaint);
}
}
- if (face.getLeftRightGazeDegree() != 0
- || face.getTopBottomGazeDegree() != 0 ) {
+ if (wrapper.getLeftRightGazeDegree() != 0
+ || wrapper.getTopBottomGazeDegree() != 0 ) {
double length =
- Math.sqrt((face.leftEye.x - face.rightEye.x) *
- (face.leftEye.x - face.rightEye.x) +
- (face.leftEye.y - face.rightEye.y) *
- (face.leftEye.y - face.rightEye.y)) / 2.0;
- double nGazeYaw = -face.getLeftRightGazeDegree();
- double nGazePitch = -face.getTopBottomGazeDegree();
+ Math.sqrt((wrapper.mFace.leftEye.x - wrapper.mFace.rightEye.x) *
+ (wrapper.mFace.leftEye.x - wrapper.mFace.rightEye.x) +
+ (wrapper.mFace.leftEye.y - wrapper.mFace.rightEye.y) *
+ (wrapper.mFace.leftEye.y - wrapper.mFace.rightEye.y)) / 2.0;
+ double nGazeYaw = -wrapper.getLeftRightGazeDegree();
+ double nGazePitch = -wrapper.getTopBottomGazeDegree();
float gazeRollX =
(float)((-Math.sin(nGazeYaw/180.0*Math.PI) *
- Math.cos(-face.getRollDirection()/
+ Math.cos(-wrapper.getRollDirection()/
180.0*Math.PI) +
Math.sin(nGazePitch/180.0*Math.PI) *
Math.cos(nGazeYaw/180.0*Math.PI) *
- Math.sin(-face.getRollDirection()/
+ Math.sin(-wrapper.getRollDirection()/
180.0*Math.PI)) *
(-length) + 0.5);
float gazeRollY =
(float)((Math.sin(-nGazeYaw/180.0*Math.PI) *
- Math.sin(-face.getRollDirection()/
+ Math.sin(-wrapper.getRollDirection()/
180.0*Math.PI)-
Math.sin(nGazePitch/180.0*Math.PI) *
Math.cos(nGazeYaw/180.0*Math.PI) *
- Math.cos(-face.getRollDirection()/
+ Math.cos(-wrapper.getRollDirection()/
180.0*Math.PI)) *
(-length) + 0.5);
- if (face.getLeftEyeBlinkDegree() < blink_threshold) {
+ if (wrapper.getLeftEyeBlinkDegree() < blink_threshold) {
if ((mDisplayRotation == 90) ||
(mDisplayRotation == 270)) {
- point[0] = face.leftEye.x;
- point[1] = face.leftEye.y;
- point[2] = face.leftEye.x + gazeRollX;
- point[3] = face.leftEye.y + gazeRollY;
+ point[0] = wrapper.mFace.leftEye.x;
+ point[1] = wrapper.mFace.leftEye.y;
+ point[2] = wrapper.mFace.leftEye.x + gazeRollX;
+ point[3] = wrapper.mFace.leftEye.y + gazeRollY;
} else {
- point[0] = face.leftEye.x;
- point[1] = face.leftEye.y;
- point[2] = face.leftEye.x + gazeRollY;
- point[3] = face.leftEye.y + gazeRollX;
+ point[0] = wrapper.mFace.leftEye.x;
+ point[1] = wrapper.mFace.leftEye.y;
+ point[2] = wrapper.mFace.leftEye.x + gazeRollY;
+ point[3] = wrapper.mFace.leftEye.y + gazeRollX;
}
mMatrix.mapPoints (point);
canvas.drawLine(point[0] +dx, point[1] + dy,
point[2] + dx, point[3] +dy, mPaint);
}
- if (face.getRightEyeBlinkDegree() < blink_threshold) {
+ if (wrapper.getRightEyeBlinkDegree() < blink_threshold) {
if ((mDisplayRotation == 90) ||
(mDisplayRotation == 270)) {
- point[0] = face.rightEye.x;
- point[1] = face.rightEye.y;
- point[2] = face.rightEye.x + gazeRollX;
- point[3] = face.rightEye.y + gazeRollY;
+ point[0] = wrapper.mFace.rightEye.x;
+ point[1] = wrapper.mFace.rightEye.y;
+ point[2] = wrapper.mFace.rightEye.x + gazeRollX;
+ point[3] = wrapper.mFace.rightEye.y + gazeRollY;
} else {
- point[0] = face.rightEye.x;
- point[1] = face.rightEye.y;
- point[2] = face.rightEye.x + gazeRollY;
- point[3] = face.rightEye.y + gazeRollX;
+ point[0] = wrapper.mFace.rightEye.x;
+ point[1] = wrapper.mFace.rightEye.y;
+ point[2] = wrapper.mFace.rightEye.x + gazeRollY;
+ point[3] = wrapper.mFace.rightEye.y + gazeRollX;
}
mMatrix.mapPoints (point);
@@ -348,35 +348,35 @@
}
}
- if (face.mouth != null) {
- Log.e(TAG, "smile: " + face.getSmileDegree() + "," +
- face.getSmileScore());
- if (face.getSmileDegree() < smile_threashold_no_smile) {
- point[0] = face.mouth.x + dx - delta_x;
- point[1] = face.mouth.y;
- point[2] = face.mouth.x + dx + delta_x;
- point[3] = face.mouth.y;
+ if (wrapper.mFace.mouth != null) {
+ Log.e(TAG, "smile: " + wrapper.getSmileDegree() + "," +
+ wrapper.getSmileScore());
+ if (wrapper.getSmileDegree() < smile_threashold_no_smile) {
+ point[0] = wrapper.mFace.mouth.x + dx - delta_x;
+ point[1] = wrapper.mFace.mouth.y;
+ point[2] = wrapper.mFace.mouth.x + dx + delta_x;
+ point[3] = wrapper.mFace.mouth.y;
Matrix faceMatrix = new Matrix(mMatrix);
- faceMatrix.preRotate(face.getRollDirection(),
- face.mouth.x, face.mouth.y);
+ faceMatrix.preRotate(wrapper.getRollDirection(),
+ wrapper.mFace.mouth.x, wrapper.mFace.mouth.y);
faceMatrix.mapPoints(point);
canvas.drawLine(point[0] + dx, point[1] + dy,
point[2] + dx, point[3] + dy, mPaint);
- } else if (face.getSmileDegree() <
+ } else if (wrapper.getSmileDegree() <
smile_threashold_small_smile) {
int rotation_mouth = 360 - mDisplayRotation;
- mRect.set(face.mouth.x-delta_x,
- face.mouth.y-delta_y, face.mouth.x+delta_x,
- face.mouth.y+delta_y);
+ mRect.set(wrapper.mFace.mouth.x-delta_x,
+ wrapper.mFace.mouth.y-delta_y, wrapper.mFace.mouth.x+delta_x,
+ wrapper.mFace.mouth.y+delta_y);
mMatrix.mapRect(mRect);
mRect.offset(dx, dy);
canvas.drawArc(mRect, rotation_mouth,
180, true, mPaint);
} else {
- mRect.set(face.mouth.x-delta_x,
- face.mouth.y-delta_y, face.mouth.x+delta_x,
- face.mouth.y+delta_y);
+ mRect.set(wrapper.mFace.mouth.x-delta_x,
+ wrapper.mFace.mouth.y-delta_y, wrapper.mFace.mouth.x+delta_x,
+ wrapper.mFace.mouth.y+delta_y);
mMatrix.mapRect(mRect);
mRect.offset(dx, dy);
canvas.drawOval(mRect, mPaint);
diff --git a/src/com/android/camera/ui/PanoCaptureProcessView.java b/src/com/android/camera/ui/PanoCaptureProcessView.java
index 95e9a09..b62c588 100644
--- a/src/com/android/camera/ui/PanoCaptureProcessView.java
+++ b/src/com/android/camera/ui/PanoCaptureProcessView.java
@@ -628,9 +628,10 @@
if(mDir == DIRECTION_UPDOWN) {
orient = 0;
}
- final Uri uri = mController.savePanorama(jpegData, mFinalPictureWidth*8, mFinalPictureHeight, orient);
Bitmap bm = BitmapFactory.decodeByteArray(jpegData, 0, jpegData.length);
final Bitmap thumbBitmap = CameraUtil.rotate(bm, orient);
+ final Uri uri = mController.savePanorama(jpegData,
+ thumbBitmap.getWidth(), thumbBitmap.getHeight(), orient);
if(uri != null) {
mActivity.runOnUiThread(new Runnable() {
public void run() {
@@ -930,16 +931,29 @@
Canvas canvas = new Canvas(dstBitmap);
matrix.reset();
int sensorOrientation = mController.getCameraSensorOrientation();
- if(mOrientation == 0 || mOrientation == 270) {
- matrix.postRotate((sensorOrientation + mOrientation + 360) % 360, srcBitmap.getHeight() / 2, srcBitmap.getHeight() / 2);
+ matrix.setScale(ratio, ratio);
+ // See android.hardware.Camera.Parameters.setRotation for documentation.
+ // refer to CameraUtil.java getJpegRotation method
+ float rotationAngle = (sensorOrientation + mOrientation) % 360;
+ if (mOrientation == 0) {
+ if (sensorOrientation == 90) {
+ matrix.postRotate(rotationAngle, dstBitmap.getWidth() / 2,
+ dstBitmap.getWidth() / 2);
+ } else if (sensorOrientation == 270) {
+ matrix.postRotate(rotationAngle, dstBitmap.getHeight() / 2,
+ dstBitmap.getHeight() / 2);
+ }
} else if (mOrientation == 180){
- matrix.postRotate((sensorOrientation + mOrientation + 180 + 360) % 360, srcBitmap.getHeight() / 2, srcBitmap.getHeight() / 2);
- matrix.postRotate(180, srcBitmap.getHeight() / 2, srcBitmap.getWidth() / 2);
- } else if(mOrientation == 90) {
- matrix.postRotate((sensorOrientation + mOrientation + 180 + 360) % 360, srcBitmap.getHeight() / 2, srcBitmap.getHeight() / 2);
- matrix.postRotate(180, srcBitmap.getWidth() / 2, srcBitmap.getHeight() / 2);
+ if (sensorOrientation == 90) {
+ matrix.postRotate(rotationAngle, dstBitmap.getHeight() / 2,
+ dstBitmap.getHeight() / 2);
+ } else if (sensorOrientation == 270) {
+ matrix.postRotate(rotationAngle, dstBitmap.getWidth() / 2,
+ dstBitmap.getWidth() / 2);
+ }
+ } else if (mOrientation == 270 || mOrientation == 90) {
+ matrix.postRotate(rotationAngle, dstBitmap.getWidth() / 2, dstBitmap.getHeight() / 2);
}
- matrix.postScale(ratio, ratio);
canvas.drawBitmap(srcBitmap, matrix, null);
}
diff --git a/src/com/android/camera/util/PersistUtil.java b/src/com/android/camera/util/PersistUtil.java
index cce0e20..37bf6ce 100644
--- a/src/com/android/camera/util/PersistUtil.java
+++ b/src/com/android/camera/util/PersistUtil.java
@@ -44,6 +44,12 @@
SystemProperties.getBoolean("persist.camera.camera2", false);
private static final boolean PERSIST_CAMERA_ZSL =
SystemProperties.getBoolean("persist.camera.zsl.disabled", false);
+ private static final int PERSIST_CAMERA2_DEBUG =
+ SystemProperties.getInt("persist.camera2.debug", 0);
+
+ public static final int CAMERA2_DEBUG_DUMP_IMAGE = 1;
+ public static final int CAMERA2_DEBUG_DUMP_LOG = 2;
+ public static final int CAMERA2_DEBUG_DUMP_ALL = 100;
public static int getMemoryLimit() {
return PERSIST_MEMORY_LIMIT;
@@ -68,4 +74,8 @@
public static boolean getCameraZSLDisabled() {
return PERSIST_CAMERA_ZSL;
}
+
+ public static int getCamera2Debug() {
+ return PERSIST_CAMERA2_DEBUG;
+ }
}
diff --git a/src/org/codeaurora/snapcam/filter/ClearSightImageProcessor.java b/src/org/codeaurora/snapcam/filter/ClearSightImageProcessor.java
index fa9ca62..bfd58db 100755
--- a/src/org/codeaurora/snapcam/filter/ClearSightImageProcessor.java
+++ b/src/org/codeaurora/snapcam/filter/ClearSightImageProcessor.java
@@ -416,7 +416,7 @@
private ImageReader createEncodeImageReader(final int cam, int width, int height) {
ImageReader reader = ImageReader.newInstance(width, height,
- ImageFormat.JPEG, mNumFrameCount);
+ ImageFormat.JPEG, mNumFrameCount + 1);
reader.setOnImageAvailableListener(new OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
diff --git a/src/org/codeaurora/snapcam/wrapper/ExtendedFaceWrapper.java b/src/org/codeaurora/snapcam/wrapper/ExtendedFaceWrapper.java
new file mode 100644
index 0000000..4c92e63
--- /dev/null
+++ b/src/org/codeaurora/snapcam/wrapper/ExtendedFaceWrapper.java
@@ -0,0 +1,127 @@
+/*
+ * Copyright (c) 2017, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+package org.codeaurora.snapcam.wrapper;
+
+import java.lang.reflect.Method;
+
+import android.hardware.Camera.Face;
+import android.os.Bundle;
+
+public class ExtendedFaceWrapper{
+ private final static String CLASS_NAME = "org.codeaurora.camera.ExtendedFace";
+ private static Class<?> mExtendFaceClass;
+ public Face mFace;
+ public ExtendedFaceWrapper(Face face){
+ mFace = face;
+ }
+
+ public static boolean isExtendedFaceInstance(Object object) {
+ if ( mExtendFaceClass == null ){
+ try {
+ mExtendFaceClass = Class.forName(CLASS_NAME);
+ }catch (Exception exception){
+ exception.printStackTrace();
+ return false;
+ }
+ }
+ return mExtendFaceClass.isInstance(object);
+ }
+
+ public int getSmileDegree() {
+ return (int)invokeMethod("getSmileDegree");
+ }
+
+ public int getSmileScore() {
+ return (int)invokeMethod("getSmileScore");
+ }
+
+ public int getBlinkDetected() {
+ return (int)invokeMethod("getBlinkDetected");
+ }
+
+
+ public int getFaceRecognized() {
+ return (int)invokeMethod("getFaceRecognized");
+ }
+
+ public int getGazeAngle() {
+ return (int)invokeMethod("getGazeAngle");
+ }
+
+ public int getUpDownDirection() {
+ return (int)invokeMethod("getUpDownDirection");
+ }
+
+ public int getLeftRightDirection() {
+ return (int)invokeMethod("getLeftRightDirection");
+ }
+
+
+ public int getRollDirection() {
+ return (int)invokeMethod("getRollDirection");
+ }
+
+ public int getLeftEyeBlinkDegree() {
+ return (int)invokeMethod("getLeftEyeBlinkDegree");
+ }
+
+
+ public int getRightEyeBlinkDegree() {
+ return (int)invokeMethod("getRightEyeBlinkDegree");
+ }
+
+
+ public int getLeftRightGazeDegree() {
+ return (int)invokeMethod("getLeftRightGazeDegree");
+ }
+
+
+ public int getTopBottomGazeDegree() {
+ return (int)invokeMethod("getTopBottomGazeDegree");
+ }
+
+ public Bundle getExtendedFaceInfo() {
+ return (Bundle)invokeMethod("getExtendedFaceInfo");
+ }
+
+ private Object invokeMethod(String name){
+ Object result = null;
+ try {
+ if ( mExtendFaceClass == null ){
+ mExtendFaceClass = Class.forName(CLASS_NAME);
+ }
+ Method method = mExtendFaceClass.getDeclaredMethod(name);
+ result = method.invoke(mFace);
+ }catch(Exception exception){
+ exception.printStackTrace();
+ }
+ return result;
+ }
+}