Merge "dvrapi: Pass layer_count down" into oc-dev
diff --git a/cmds/installd/InstalldNativeService.cpp b/cmds/installd/InstalldNativeService.cpp
index 19dfb87..a0d987d 100644
--- a/cmds/installd/InstalldNativeService.cpp
+++ b/cmds/installd/InstalldNativeService.cpp
@@ -1100,10 +1100,13 @@
 
     ALOGV("unlink %s\n", dex_path);
     if (unlink(dex_path) < 0) {
-        return error(StringPrintf("Failed to unlink %s", dex_path));
-    } else {
-        return ok();
+        // It's ok if we don't have a dalvik cache path. Report error only when the path exists
+        // but could not be unlinked.
+        if (errno != ENOENT) {
+            return error(StringPrintf("Failed to unlink %s", dex_path));
+        }
     }
+    return ok();
 }
 
 struct stats {
diff --git a/cmds/installd/tests/installd_service_test.cpp b/cmds/installd/tests/installd_service_test.cpp
index 4a1f333..34818f6 100644
--- a/cmds/installd/tests/installd_service_test.cpp
+++ b/cmds/installd/tests/installd_service_test.cpp
@@ -54,10 +54,12 @@
     return false;
 }
 
-bool create_cache_path(char path[PKG_PATH_MAX] ATTRIBUTE_UNUSED,
-        const char *src ATTRIBUTE_UNUSED,
-        const char *instruction_set ATTRIBUTE_UNUSED) {
-    return false;
+bool create_cache_path(char path[PKG_PATH_MAX],
+        const char *src,
+        const char *instruction_set) {
+    // Not really a valid path but it's good enough for testing.
+    sprintf(path,"/data/dalvik-cache/%s/%s", instruction_set, src);
+    return true;
 }
 
 static void mkdir(const char* path, uid_t owner, gid_t group, mode_t mode) {
@@ -151,5 +153,13 @@
     EXPECT_EQ(10000, stat_gid("com.example/bar/file"));
 }
 
+TEST_F(ServiceTest, RmDexNoDalvikCache) {
+    LOG(INFO) << "RmDexNoDalvikCache";
+
+    // Try to remove a non existing dalvik cache dex. The call should be
+    // successful because there's nothing to remove.
+    EXPECT_TRUE(service->rmdex("com.example", "arm").isOk());
+}
+
 }  // namespace installd
 }  // namespace android
diff --git a/libs/vr/libbufferhubqueue/buffer_hub_queue_producer.cpp b/libs/vr/libbufferhubqueue/buffer_hub_queue_producer.cpp
index 64a7944..5121508 100644
--- a/libs/vr/libbufferhubqueue/buffer_hub_queue_producer.cpp
+++ b/libs/vr/libbufferhubqueue/buffer_hub_queue_producer.cpp
@@ -141,10 +141,10 @@
     LocalHandle fence;
     auto buffer_status =
         core_->producer_->Dequeue(core_->dequeue_timeout_ms_, &slot, &fence);
-    if (!buffer_producer)
-      return NO_MEMORY;
 
     buffer_producer = buffer_status.take();
+    if (!buffer_producer)
+      return NO_MEMORY;
 
     if (width == buffer_producer->width() &&
         height == buffer_producer->height() &&
diff --git a/libs/vr/libeds/Android.bp b/libs/vr/libeds/Android.bp
deleted file mode 100644
index a149853..0000000
--- a/libs/vr/libeds/Android.bp
+++ /dev/null
@@ -1,85 +0,0 @@
-// Copyright (C) 2015 The Android Open Source Project
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-sourceFiles = [
-    "eds.cpp",
-    "eds_mesh.cpp",
-    "composite_hmd.cpp",
-    "display_metrics.cpp",
-    "distortion_renderer.cpp",
-    "device_metrics.cpp",
-    "polynomial_radial_distortion.cpp",
-]
-
-localIncludeFiles = [
-    "include",
-]
-
-sharedLibraries = [
-    "libbase",
-    "libcutils",
-    "liblog",
-    "libEGL",
-    "libGLESv1_CM",
-    "libGLESv2",
-    "libui",
-    "libutils",
-    "libvulkan",
-]
-
-staticLibraries = [
-    "libdisplay",
-    "libdvrcommon",
-    "libdvrgraphics",
-    "libvrsensor",
-    "libpdx_default_transport",
-]
-
-cc_library_static {
-    srcs: sourceFiles,
-    cflags: [
-        "-DGL_GLEXT_PROTOTYPES",
-        "-DEGL_EGLEXT_PROTOTYPES",
-        "-Wno-unused-parameter"],
-    // Enable debug options below to show GL errors and use gdb.
-    // + ["-UNDEBUG", "-DDEBUG", "-O0", "-g", ]
-    export_include_dirs: localIncludeFiles,
-    shared_libs: sharedLibraries,
-    static_libs: staticLibraries,
-    name: "libeds",
-}
-
-testFiles = ["tests/eds_app_tests.cpp"]
-
-cc_test {
-    name: "eds_app_tests",
-    tags: ["optional"],
-
-    srcs: testFiles,
-
-    shared_libs: [
-        "libhardware",
-        "libsync",
-    ] + sharedLibraries,
-
-    static_libs: [
-        "libgmock_main",
-        "libgmock",
-        "libeds",
-    ] + staticLibraries + [
-        "libbufferhub",
-        "libbufferhubqueue",
-    ],
-
-}
diff --git a/libs/vr/libeds/composite_hmd.cpp b/libs/vr/libeds/composite_hmd.cpp
deleted file mode 100644
index d6bf164..0000000
--- a/libs/vr/libeds/composite_hmd.cpp
+++ /dev/null
@@ -1,257 +0,0 @@
-#include "include/private/dvr/composite_hmd.h"
-
-#include <log/log.h>
-
-#include <private/dvr/numeric.h>
-
-namespace android {
-namespace dvr {
-
-CompositeHmd::CompositeHmd(const HeadMountMetrics& head_mount_metrics,
-                           const DisplayMetrics& display_metrics)
-    : head_mount_metrics_(head_mount_metrics),
-      display_metrics_(display_metrics) {
-  MetricsChanged();
-}
-
-float CompositeHmd::GetTargetFrameDuration() const {
-  return display_metrics_.GetFrameDurationSeconds();
-}
-
-vec2 CompositeHmd::ComputeDistortedPoint(EyeType eye, vec2 position,
-                                         RgbColorChannel channel) const {
-  position = TransformPoint(eye_tan_angle_from_norm_screen_matrix_[eye], position);
-  vec2 distorted =
-      head_mount_metrics_.GetColorChannelDistortion(channel).Distort(position);
-  return TransformPoint(eye_norm_texture_from_tan_angle_matrix_[eye], distorted);
-}
-
-vec2 CompositeHmd::ComputeInverseDistortedPoint(EyeType eye, vec2 position,
-                                                RgbColorChannel channel) const {
-  position = TransformPoint(eye_norm_texture_from_tan_angle_inv_matrix_[eye], position);
-  vec2 distorted =
-      head_mount_metrics_.GetColorChannelDistortion(channel).DistortInverse(
-          position);
-  return TransformPoint(eye_tan_angle_from_norm_screen_inv_matrix_[eye], distorted);
-}
-
-void CompositeHmd::ComputeDistortedVertex(EyeType eye, vec2 uv_in,
-                                          vec2* vertex_out,
-                                          vec2* uv_out) const {
-  // The mesh vertices holds the shape of the distortion.
-  vec2 vertex_position = ComputeInverseDistortedPoint(eye, uv_in, kRed);
-  *vertex_out = vec2(vertex_position.x() - 0.5f, vertex_position.y() - 0.5f);
-
-  if (uv_out) {
-    // Compute the texture coordinate for each vertex coordinate.
-    // Red's is the inverse of the inverse, skip the calculation and use uv_in.
-    uv_out[kRed] = uv_in;
-    uv_out[kGreen] = ComputeDistortedPoint(eye, vertex_position, kGreen);
-    uv_out[kBlue] = ComputeDistortedPoint(eye, vertex_position, kBlue);
-  }
-}
-
-vec2i CompositeHmd::GetRecommendedRenderTargetSize() const {
-  return recommended_render_target_size_;
-}
-
-Range2i CompositeHmd::GetDisplayRange() const { return display_range_; }
-
-mat4 CompositeHmd::GetEyeFromHeadMatrix(EyeType eye) const {
-  return eye_from_head_matrix_[eye];
-}
-
-FieldOfView CompositeHmd::GetEyeFov(EyeType eye) const { return eye_fov_[eye]; }
-
-Range2i CompositeHmd::GetEyeViewportBounds(EyeType eye) const {
-  return eye_viewport_range_[eye];
-}
-
-void CompositeHmd::SetHeadMountMetrics(
-    const HeadMountMetrics& head_mount_metrics) {
-  // Use the assignement operator to do memberwise copy.
-  head_mount_metrics_ = head_mount_metrics;
-  MetricsChanged();
-}
-
-const HeadMountMetrics& CompositeHmd::GetHeadMountMetrics() const {
-  return head_mount_metrics_;
-}
-
-void CompositeHmd::SetDisplayMetrics(const DisplayMetrics& display_metrics) {
-  // Use the assignment operator to do memberwise copy.
-  display_metrics_ = display_metrics;
-  MetricsChanged();
-}
-
-const DisplayMetrics& CompositeHmd::GetDisplayMetrics() const {
-  return display_metrics_;
-}
-
-void CompositeHmd::MetricsChanged() {
-  // Abbreviations in variable names:
-  //   "vp": viewport
-  //   "ta": tan-angle
-  const HeadMountMetrics& mount = head_mount_metrics_;
-  DisplayMetrics display = display_metrics_;
-
-  if (display.IsPortrait()) {
-    // If we're in portrait mode, toggle the orientation so that all
-    // calculations are done in landscape mode.
-    display.ToggleOrientation();
-  }
-
-  float display_width_meters = display.GetSizeMeters()[0];
-  float display_height_meters = display.GetSizeMeters()[1];
-
-  vec2 pixels_per_meter = vec2(1.0f / display.GetMetersPerPixel()[0],
-                               1.0f / display.GetMetersPerPixel()[1]);
-
-  // virtual_eye_to_screen_dist is the distance from the screen to the eye
-  // after it has been projected through the lens.  This would normally be
-  // slightly different from the distance to the actual eye.
-  float virtual_eye_to_screen_dist = mount.GetVirtualEyeToScreenDistance();
-  float meters_per_tan_angle = virtual_eye_to_screen_dist;
-  vec2 pixels_per_tan_angle = pixels_per_meter * meters_per_tan_angle;
-
-  LOG_ALWAYS_FATAL_IF(0.0f == display_width_meters);
-  LOG_ALWAYS_FATAL_IF(0.0f == display_height_meters);
-  LOG_ALWAYS_FATAL_IF(0.0f == virtual_eye_to_screen_dist);
-
-  // Height of lenses from the bottom of the screen.
-  float lens_y_center = 0;
-  float bottom_dist = 0;
-  float top_dist = 0;
-
-  // bottom_display_dist and top_display_dist represent the distance from the
-  // lens center to the edge of the display.
-  float bottom_display_dist = 0;
-  float top_display_dist = 0;
-  switch (mount.GetVerticalAlignment()) {
-    case HeadMountMetrics::kBottom:
-      lens_y_center =
-          mount.GetTrayToLensDistance() - display.GetBorderSizeMeters();
-      bottom_dist = lens_y_center;
-      top_dist = lens_y_center;
-      bottom_display_dist = lens_y_center;
-      top_display_dist = display_height_meters - lens_y_center;
-      break;
-    case HeadMountMetrics::kCenter:
-      // TODO(hendrikw): This should respect the border size, but since we
-      //                 currently hard code the border size, it would break
-      //                 the distortion on some devices.  Revisit when border
-      //                 size is fixed.
-      lens_y_center = display_height_meters * 0.5f;
-      bottom_dist = lens_y_center;
-      top_dist = lens_y_center;
-      bottom_display_dist = lens_y_center;
-      top_display_dist = lens_y_center;
-      break;
-    case HeadMountMetrics::kTop:
-      lens_y_center = display_height_meters - (mount.GetTrayToLensDistance() -
-                                               display.GetBorderSizeMeters());
-      bottom_dist =
-          mount.GetTrayToLensDistance() - display.GetBorderSizeMeters();
-      top_dist = bottom_dist;
-      bottom_display_dist = lens_y_center;
-      top_display_dist = display_height_meters - lens_y_center;
-      break;
-  }
-
-  float inner_dist = mount.GetScreenCenterToLensDistance();
-  float outer_dist = display_width_meters * 0.5f - inner_dist;
-
-  // We don't take chromatic aberration into account yet for computing FOV,
-  // viewport, etc, so we only use the green channel for now. Note the actual
-  // Distort function *does* implement chromatic aberration.
-  const ColorChannelDistortion& distortion =
-      mount.GetColorChannelDistortion(kGreen);
-
-  vec2 outer_point(outer_dist / virtual_eye_to_screen_dist, 0.0f);
-  vec2 inner_point(inner_dist / virtual_eye_to_screen_dist, 0.0f);
-  vec2 bottom_point(0.0f, bottom_dist / virtual_eye_to_screen_dist);
-  vec2 top_point(0.0f, top_dist / virtual_eye_to_screen_dist);
-
-  float outer_angle = atanf(distortion.Distort(outer_point)[0]);
-  float inner_angle = atanf(distortion.Distort(inner_point)[0]);
-  float bottom_angle = atanf(distortion.Distort(bottom_point)[1]);
-  float top_angle = atanf(distortion.Distort(top_point)[1]);
-
-  for (EyeType eye : {kLeftEye, kRightEye}) {
-    const FieldOfView max_fov = mount.GetEyeMaxFov(eye);
-    float left_angle = (eye == kLeftEye) ? outer_angle : inner_angle;
-    float right_angle = (eye == kLeftEye) ? inner_angle : outer_angle;
-
-    eye_fov_[eye] = FieldOfView(std::min(left_angle, max_fov.GetLeft()),
-                                std::min(right_angle, max_fov.GetRight()),
-                                std::min(bottom_angle, max_fov.GetBottom()),
-                                std::min(top_angle, max_fov.GetTop()));
-
-    vec2 texture_vp_ta_p1 =
-        vec2(-tanf(eye_fov_[eye].GetLeft()), -tanf(eye_fov_[eye].GetBottom()));
-    vec2 texture_vp_ta_p2 =
-        vec2(tanf(eye_fov_[eye].GetRight()), tanf(eye_fov_[eye].GetTop()));
-    vec2 texture_vp_size_ta = texture_vp_ta_p2 - texture_vp_ta_p1;
-
-    vec2 texture_vp_sizef_pixels =
-        texture_vp_size_ta.array() * pixels_per_tan_angle.array();
-
-    vec2i texture_vp_size_pixels =
-        vec2i(static_cast<int32_t>(roundf(texture_vp_sizef_pixels[0])),
-              static_cast<int32_t>(roundf(texture_vp_sizef_pixels[1])));
-    int vp_start_x =
-        (eye == kLeftEye) ? 0 : eye_viewport_range_[kLeftEye].p2[0];
-
-    eye_viewport_range_[eye] =
-        Range2i::FromSize(vec2i(vp_start_x, 0), texture_vp_size_pixels);
-    float left_dist = (eye == kLeftEye) ? outer_dist : inner_dist;
-    float right_dist = (eye == kLeftEye) ? inner_dist : outer_dist;
-    vec2 screen_ta_p1(-left_dist / virtual_eye_to_screen_dist,
-                      -bottom_display_dist / virtual_eye_to_screen_dist);
-    vec2 screen_ta_p2(right_dist / virtual_eye_to_screen_dist,
-                      top_display_dist / virtual_eye_to_screen_dist);
-    vec2 screen_ta_size = screen_ta_p2 - screen_ta_p1;
-
-    // Align the tan angle coordinates to the nearest pixel.  This will ensure
-    // that the optical center doesn't straddle multiple pixels.
-    // TODO(hendrikw): verify that this works correctly for Daydream View.
-    vec2 tan_angle_per_pixel(screen_ta_size.array() /
-                             texture_vp_size_pixels.cast<float>().array());
-    vec2 pixel_p1(screen_ta_p1.array() / tan_angle_per_pixel.array());
-    vec2 pixel_shift(roundf(pixel_p1.x()) - pixel_p1.x(),
-                     roundf(pixel_p1.y()) - pixel_p1.y());
-    screen_ta_p1 +=
-        (tan_angle_per_pixel.array() * pixel_shift.array()).matrix();
-    screen_ta_p2 +=
-        (tan_angle_per_pixel.array() * pixel_shift.array()).matrix();
-
-    // Calculate the transformations needed for the distortions.
-    eye_tan_angle_from_norm_screen_matrix_[eye] =
-        TranslationMatrix(vec2(screen_ta_p1)) *
-        ScaleMatrix(screen_ta_size);
-    eye_tan_angle_from_norm_screen_inv_matrix_[eye] =
-        eye_tan_angle_from_norm_screen_matrix_[eye].inverse();
-
-    eye_norm_texture_from_tan_angle_inv_matrix_[eye] =
-        TranslationMatrix(texture_vp_ta_p1) *
-        ScaleMatrix(texture_vp_size_ta);
-    eye_norm_texture_from_tan_angle_matrix_[eye] =
-        eye_norm_texture_from_tan_angle_inv_matrix_[eye].inverse();
-  }
-  vec2i left_vp_size = eye_viewport_range_[kLeftEye].GetSize();
-  vec2i right_vp_size = eye_viewport_range_[kRightEye].GetSize();
-
-  recommended_render_target_size_ =
-      vec2i(left_vp_size[0] + right_vp_size[0],
-            std::max(left_vp_size[1], right_vp_size[1]));
-
-  display_range_ = Range2i::FromSize(vec2i(0, 0), display.GetSizePixels());
-
-  eye_from_head_matrix_[kLeftEye] = Eigen::Translation3f(
-      vec3(mount.GetScreenCenterToLensDistance(), 0.0f, 0.0f));
-  eye_from_head_matrix_[kRightEye] = Eigen::Translation3f(
-      vec3(-mount.GetScreenCenterToLensDistance(), 0.0f, 0.0f));
-}
-
-}  // namespace dvr
-}  // namespace android
diff --git a/libs/vr/libeds/device_metrics.cpp b/libs/vr/libeds/device_metrics.cpp
deleted file mode 100644
index 68ee186..0000000
--- a/libs/vr/libeds/device_metrics.cpp
+++ /dev/null
@@ -1,172 +0,0 @@
-#include <private/dvr/device_metrics.h>
-
-#include <cutils/properties.h>
-#include <private/dvr/head_mount_metrics.h>
-#include <private/dvr/identity_distortion.h>
-#include <private/dvr/polynomial_radial_distortion.h>
-#include <private/dvr/types.h>
-#include "include/private/dvr/display_metrics.h"
-
-namespace {
-
-static constexpr char kRPolynomial[] = "persist.dvr.r_poly";
-static constexpr char kGPolynomial[] = "persist.dvr.g_poly";
-static constexpr char kBPolynomial[] = "persist.dvr.b_poly";
-static constexpr char kLensDistance[] = "persist.dvr.lens_distance";
-static constexpr char kDisplayGap[] = "persist.dvr.display_gap";
-static constexpr char kVEyeToDisplay[] = "persist.dvr.v_eye_to_display";
-static constexpr char kFovIOBT[] = "persist.dvr.fov_iobt";
-static constexpr char kScreenSize[] = "persist.dvr.screen_size";
-
-bool StringToFloat(const char* str, float* result) {
-  char* endptr = nullptr;
-  *result = std::strtof(str, &endptr);
-  return !(str == endptr || !endptr);
-}
-
-std::vector<std::string> SplitString(const std::string& string_to_split,
-                                     char deliminator) {
-  std::vector<std::string> result;
-  std::string sub_string;
-  std::stringstream ss(string_to_split);
-  while (std::getline(ss, sub_string, deliminator))
-    result.push_back(sub_string);
-  return result;
-}
-
-std::vector<float> GetProperty(const char* name,
-                               const std::vector<float>& default_values) {
-  char prop[PROPERTY_VALUE_MAX + 1] = {};
-  property_get(name, prop, "");
-  std::vector<std::string> values = SplitString(prop, ',');
-  std::vector<float> results;
-  for (const auto& value : values) {
-    float result = 0.0f;
-    if (StringToFloat(value.c_str(), &result)) {
-      results.push_back(static_cast<float>(result));
-    }
-  }
-  if (results.empty()) {
-    return default_values;
-  }
-  return results;
-}
-
-float GetProperty(const char* name, float default_value) {
-  char prop[PROPERTY_VALUE_MAX + 1] = {};
-  property_get(name, prop, "");
-  float result = 0.0f;
-  if (StringToFloat(prop, &result)) {
-    return static_cast<float>(result);
-  }
-  return default_value;
-}
-
-float GetInterLensDistance() { return GetProperty(kLensDistance, 0.064f); }
-
-float GetDisplayGap() { return GetProperty(kDisplayGap, 0.0f); }
-
-float GetTrayToLensDistance() { return 0.035f; }
-
-float GetVEyeToDisplay() { return GetProperty(kVEyeToDisplay, 0.042f); }
-
-android::dvr::vec2 GetDisplaySize() {
-  static const std::vector<float> default_size = {0.0742177f, 0.131943f};
-  std::vector<float> sizes = GetProperty(kScreenSize, default_size);
-  if (sizes.size() != 0)
-    sizes = default_size;
-  return android::dvr::vec2(sizes[0], sizes[1]);
-}
-
-std::vector<float> GetMaxFOVs() {
-  static const std::vector<float> defaults = {43.7f, 47.8f, 54.2f, 54.2f};
-  std::vector<float> fovs = GetProperty(kFovIOBT, defaults);
-  if (fovs.size() != 4)
-    fovs = defaults;
-  for (auto& value : fovs) {
-    value = value * M_PI / 180.0f;
-  }
-  return fovs;
-}
-
-static const android::dvr::HeadMountMetrics::VerticalAlignment
-    kDefaultVerticalAlignment = android::dvr::HeadMountMetrics::kCenter;
-
-// Default border size in meters.
-static const float kScreenBorderSize = 0.004f;
-
-// Refresh rate.
-static const float kScreenRefreshRate = 60.0f;
-
-// Default display orientation is portrait.
-static const android::dvr::DisplayOrientation kDisplayOrientation =
-    android::dvr::DisplayOrientation::kPortrait;
-
-}  // anonymous namespace
-
-namespace android {
-namespace dvr {
-
-HeadMountMetrics CreateHeadMountMetrics(const FieldOfView& l_fov,
-                                        const FieldOfView& r_fov) {
-  static const std::vector<float> default_r = {
-      0.00103f, 2.63917f, -7.14427f, 8.98036f, -4.10586f, 0.83705f, 0.00130f};
-  static const std::vector<float> default_g = {
-      0.08944f, 2.26005f, -6.30924f, 7.94561f, -3.22788f, 0.45577f, 0.07300f};
-  static const std::vector<float> default_b = {
-      0.16364f, 1.94083f, -5.55033f, 6.89578f, -2.19053f, -0.04050f, 0.17380f};
-  std::vector<float> poly_r = GetProperty(kRPolynomial, default_r);
-  std::vector<float> poly_g = GetProperty(kGPolynomial, default_g);
-  std::vector<float> poly_b = GetProperty(kBPolynomial, default_b);
-
-  std::shared_ptr<ColorChannelDistortion> distortion_r(
-      new PolynomialRadialDistortion(poly_r));
-  std::shared_ptr<ColorChannelDistortion> distortion_g(
-      new PolynomialRadialDistortion(poly_g));
-  std::shared_ptr<ColorChannelDistortion> distortion_b(
-      new PolynomialRadialDistortion(poly_b));
-
-  return HeadMountMetrics(GetInterLensDistance(), GetTrayToLensDistance(),
-                          GetVEyeToDisplay(), kDefaultVerticalAlignment, l_fov,
-                          r_fov, distortion_r, distortion_g, distortion_b,
-                          HeadMountMetrics::EyeOrientation::kCCW0Degrees,
-                          HeadMountMetrics::EyeOrientation::kCCW0Degrees,
-                          (GetInterLensDistance() - GetDisplayGap()) / 2.0f);
-}
-
-HeadMountMetrics CreateHeadMountMetrics() {
-  std::vector<float> fovs = GetMaxFOVs();
-  FieldOfView l_fov(fovs[1], fovs[0], fovs[2], fovs[3]);
-  FieldOfView r_fov(fovs[0], fovs[1], fovs[2], fovs[3]);
-  return CreateHeadMountMetrics(l_fov, r_fov);
-}
-
-DisplayMetrics CreateDisplayMetrics(vec2i screen_size) {
-  android::dvr::vec2 size_in_meters = GetDisplaySize();
-  vec2 meters_per_pixel(size_in_meters[0] / static_cast<float>(screen_size[0]),
-                        size_in_meters[1] / static_cast<float>(screen_size[1]));
-  return DisplayMetrics(screen_size, meters_per_pixel, kScreenBorderSize,
-                        1000.0f / kScreenRefreshRate, kDisplayOrientation);
-}
-
-HeadMountMetrics CreateUndistortedHeadMountMetrics() {
-  std::vector<float> fovs = GetMaxFOVs();
-  FieldOfView l_fov(fovs[1], fovs[0], fovs[2], fovs[3]);
-  FieldOfView r_fov(fovs[0], fovs[1], fovs[2], fovs[3]);
-  return CreateUndistortedHeadMountMetrics(l_fov, r_fov);
-}
-
-HeadMountMetrics CreateUndistortedHeadMountMetrics(const FieldOfView& l_fov,
-                                                   const FieldOfView& r_fov) {
-  auto distortion_all = std::make_shared<IdentityDistortion>();
-
-  return HeadMountMetrics(GetInterLensDistance(), GetVEyeToDisplay(),
-                          GetVEyeToDisplay(), kDefaultVerticalAlignment, l_fov,
-                          r_fov, distortion_all, distortion_all, distortion_all,
-                          HeadMountMetrics::EyeOrientation::kCCW0Degrees,
-                          HeadMountMetrics::EyeOrientation::kCCW0Degrees,
-                          (GetInterLensDistance() - GetDisplayGap()) / 2.0f);
-}
-
-}  // namespace dvr
-}  // namespace android
diff --git a/libs/vr/libeds/display_metrics.cpp b/libs/vr/libeds/display_metrics.cpp
deleted file mode 100644
index e129395..0000000
--- a/libs/vr/libeds/display_metrics.cpp
+++ /dev/null
@@ -1,30 +0,0 @@
-#include "include/private/dvr/display_metrics.h"
-
-namespace android {
-namespace dvr {
-
-DisplayMetrics::DisplayMetrics(vec2i size_pixels, vec2 meters_per_pixel,
-                               float border_size_meters,
-                               float frame_duration_seconds,
-                               DisplayOrientation orientation)
-    : size_pixels_(size_pixels),
-      meters_per_pixel_(meters_per_pixel),
-      border_size_meters_(border_size_meters),
-      frame_duration_seconds_(frame_duration_seconds),
-      orientation_(orientation) {}
-
-void DisplayMetrics::ToggleOrientation() {
-  std::swap(size_pixels_[0], size_pixels_[1]);
-  std::swap(meters_per_pixel_[0], meters_per_pixel_[1]);
-  if (orientation_ == DisplayOrientation::kPortrait)
-    orientation_ = DisplayOrientation::kLandscape;
-  else
-    orientation_ = DisplayOrientation::kPortrait;
-}
-
-DisplayMetrics::DisplayMetrics()
-    : DisplayMetrics(vec2i(0, 0), vec2(0.0f, 0.0f), 0.0f, 0.0f,
-                     DisplayOrientation::kLandscape) {}
-
-}  // namespace dvr
-}  // namespace android
diff --git a/libs/vr/libeds/distortion_renderer.cpp b/libs/vr/libeds/distortion_renderer.cpp
deleted file mode 100644
index 13090ca..0000000
--- a/libs/vr/libeds/distortion_renderer.cpp
+++ /dev/null
@@ -1,792 +0,0 @@
-#include "include/private/dvr/distortion_renderer.h"
-
-#include <float.h>
-
-#include <string>
-
-#include <utils/Log.h>
-#define ATRACE_TAG ATRACE_TAG_GRAPHICS
-#include <utils/Trace.h>
-
-#include <log/log.h>
-#include <private/dvr/clock_ns.h>
-#include <private/dvr/composite_hmd.h>
-#include <private/dvr/debug.h>
-#include <private/dvr/graphics/gpu_profiler.h>
-#include <private/dvr/ortho.h>
-#include <private/dvr/sensor_constants.h>
-
-#define STRINGIFY2(s) #s
-#define STRINGIFY(s) STRINGIFY2(s)
-
-#define POSITION_ATTR 0
-#define VIEWPORT_COORD_R_ATTR 1
-#define VIEWPORT_COORD_G_ATTR 2
-#define VIEWPORT_COORD_B_ATTR 3
-
-// Pose data uniform buffer bindings. Must be sequential.
-#define POSE_BINDING 0
-#define POSE_BINDING2 1
-
-// Texture unit bindings. Must be sequential.
-// Things break if we start at binding 0 (samples come back black).
-#define SAMPLER_BINDING 1
-#define SAMPLER_BINDING2 2
-
-#define GLSL_VIGNETTE_FUNC                                       \
-  "float vignette(vec2 texCoords) {\n"                           \
-  "  const float fadeDist = 0.01;\n"                             \
-  "  const float fadeDistInv = 1.0 / fadeDist;\n"                \
-  "  const float inset = 0.02;\n"                                \
-  "  vec2 lowEdge = vec2(inset - fadeDist);\n"                   \
-  "  vec2 highEdge = vec2(1.0 - inset + fadeDist);\n"            \
-  "  vec2 vignetteMin = "                                        \
-  "    clamp(-fadeDistInv * (lowEdge - texCoords), 0.0, 1.0);\n" \
-  "  vec2 vignetteMax = "                                        \
-  "    clamp(fadeDistInv * (highEdge - texCoords), 0.0, 1.0);\n" \
-  "  vec2 vignette = vignetteMin * vignetteMax;\n"               \
-  "  return vignette.x * vignette.y;\n"                          \
-  "}\n"
-
-namespace {
-
-// If enabled, the pixel shader will blend by reading back the current pixel
-// from the framebuffer.
-// TODO(jbates) With framebuffer read coherency disabled, this seems to perform
-//   well enough. That requires a GL extension, so for now we disable this path.
-constexpr bool kUseFramebufferReadback = false;
-
-static const char* kVertexShaderChromaticAberrationString =
-    "uniform mat4 uProjectionMatrix;\n"
-    "layout(binding = " STRINGIFY(POSE_BINDING) ", std140)\n"
-    "uniform LateLatchData {\n"
-    "  mat4 uTexFromRecommendedViewportMatrix;\n"
-    "};\n"
-    "#ifdef COMPOSITE_LAYER_2\n"
-    "layout(binding = " STRINGIFY(POSE_BINDING2) ", std140)\n"
-    "uniform LateLatchData2 {\n"
-    "  mat4 uTexFromRecommendedViewportMatrix2;\n"
-    "};\n"
-    "#endif\n"
-    "uniform vec4 uTexXMinMax;\n"
-    "layout(location = " STRINGIFY(POSITION_ATTR) ") in vec2 aPosition;\n"
-    "layout(location = " STRINGIFY(VIEWPORT_COORD_R_ATTR)
-           ") in vec2 aViewportCoordsR;\n"
-    "layout(location = " STRINGIFY(VIEWPORT_COORD_G_ATTR)
-           ") in vec2 aViewportCoordsG;\n"
-    "layout(location = " STRINGIFY(VIEWPORT_COORD_B_ATTR)
-           ") in vec2 aViewportCoordsB;\n"
-    "mediump out vec4 vTexCoordsRG;\n"
-    "mediump out vec2 vTexCoordsB;\n"
-    "#ifdef COMPOSITE_LAYER_2\n"
-    "mediump out vec4 vTexCoordsRG2;\n"
-    "mediump out vec2 vTexCoordsB2;\n"
-    "#endif\n"
-    "mediump out vec3 vVignette;\n"
-    "\n" GLSL_VIGNETTE_FUNC
-    "void main(void) {\n"
-    "  vVignette.r = vignette(aViewportCoordsR);\n"
-    "  vVignette.g = vignette(aViewportCoordsG);\n"
-    "  vVignette.b = vignette(aViewportCoordsB);\n"
-    "  vec4 redTexCoords = (uTexFromRecommendedViewportMatrix * \n"
-    "                       vec4(aViewportCoordsR, 0., 1.));\n"
-    "  vec4 greenTexCoords = (uTexFromRecommendedViewportMatrix * \n"
-    "                         vec4(aViewportCoordsG, 0., 1.));\n"
-    "  vec4 blueTexCoords = (uTexFromRecommendedViewportMatrix * \n"
-    "                        vec4(aViewportCoordsB, 0., 1.));\n"
-    "  vTexCoordsRG.xy = redTexCoords.xy / redTexCoords.w;\n"
-    "  vTexCoordsRG.zw = greenTexCoords.xy / greenTexCoords.w;\n"
-    "  vTexCoordsB = blueTexCoords.xy / blueTexCoords.w;\n"
-    "  vTexCoordsRG.x = clamp(vTexCoordsRG.x, uTexXMinMax.x, uTexXMinMax.y);\n"
-    "  vTexCoordsRG.z = clamp(vTexCoordsRG.z, uTexXMinMax.x, uTexXMinMax.y);\n"
-    "  vTexCoordsB.x = clamp(vTexCoordsB.x, uTexXMinMax.x, uTexXMinMax.y);\n"
-    "#ifdef COMPOSITE_LAYER_2\n"
-    "  redTexCoords = (uTexFromRecommendedViewportMatrix2 * \n"
-    "                  vec4(aViewportCoordsR, 0., 1.));\n"
-    "  greenTexCoords = (uTexFromRecommendedViewportMatrix2 * \n"
-    "                    vec4(aViewportCoordsG, 0., 1.));\n"
-    "  blueTexCoords = (uTexFromRecommendedViewportMatrix2 * \n"
-    "                   vec4(aViewportCoordsB, 0., 1.));\n"
-    "  vTexCoordsRG2.xy = redTexCoords.xy / redTexCoords.w;\n"
-    "  vTexCoordsRG2.zw = greenTexCoords.xy / greenTexCoords.w;\n"
-    "  vTexCoordsB2 = blueTexCoords.xy / blueTexCoords.w;\n"
-    "  vTexCoordsRG2.x = clamp(vTexCoordsRG2.x,\n"
-    "                          uTexXMinMax.z, uTexXMinMax.w);\n"
-    "  vTexCoordsRG2.z = clamp(vTexCoordsRG2.z, uTexXMinMax.z,\n"
-    "                          uTexXMinMax.w);\n"
-    "  vTexCoordsB2.x = clamp(vTexCoordsB2.x, uTexXMinMax.z, uTexXMinMax.w);\n"
-    "#endif\n"
-    "  gl_Position = uProjectionMatrix * vec4(aPosition, 0., 1.);\n"
-    "}\n";
-
-static const char* kFragmentShaderChromaticAberrationString =
-    "#ifdef GL_ES\n"
-    "precision mediump float;\n"
-    "#endif\n"
-    " \n"
-    "layout(binding = " STRINGIFY(SAMPLER_BINDING) ")\n"
-    "uniform sampler2D uDistortionTexture; \n"
-    "mediump in vec4 vTexCoordsRG;\n"
-    "mediump in vec2 vTexCoordsB;\n"
-    "#ifdef COMPOSITE_LAYER_2\n"
-    "layout(binding = " STRINGIFY(SAMPLER_BINDING2) ")\n"
-    "uniform sampler2D uDistortionTexture2; \n"
-    "mediump in vec4 vTexCoordsRG2;\n"
-    "mediump in vec2 vTexCoordsB2;\n"
-    "#endif\n"
-    "mediump in vec3 vVignette;\n"
-    "#ifdef BLEND_WITH_PREVIOUS_LAYER \n"
-    "inout vec4 fragColor; \n"
-    "#else \n"
-    "out vec4 fragColor; \n"
-    "#endif \n"
-    " \n"
-    "void main(void) { \n"
-    "  vec4 ra = texture(uDistortionTexture, vTexCoordsRG.xy); \n"
-    "  vec4 ga = texture(uDistortionTexture, vTexCoordsRG.zw); \n"
-    "  vec4 ba = texture(uDistortionTexture, vTexCoordsB); \n"
-    "#ifdef BLEND_WITH_PREVIOUS_LAYER \n"
-    "  vec3 alpha1 = vec3(ra.a, ga.a, ba.a); \n"
-    "  vec3 color = (vec3(1.0) - alpha1) * fragColor.rgb + \n"
-    "               alpha1 * vec3(ra.r, ga.g, ba.b); \n"
-    "#else // BLEND_WITH_PREVIOUS_LAYER \n"
-    "  vec3 color = vec3(ra.r, ga.g, ba.b); \n"
-    "#endif // BLEND_WITH_PREVIOUS_LAYER \n"
-    "#ifdef COMPOSITE_LAYER_2 \n"
-    "  // Alpha blend layer 2 onto layer 1. \n"
-    "  vec4 ra2 = texture(uDistortionTexture2, vTexCoordsRG2.xy); \n"
-    "  vec4 ga2 = texture(uDistortionTexture2, vTexCoordsRG2.zw); \n"
-    "  vec4 ba2 = texture(uDistortionTexture2, vTexCoordsB2); \n"
-    "  vec3 color2 = vec3(ra2.r, ga2.g, ba2.b); \n"
-    "  vec3 alpha2 = vec3(ra2.a, ga2.a, ba2.a); \n"
-    "  color = (vec3(1.0) - alpha2) * color + alpha2 * color2; \n"
-    "#endif \n"
-    "#ifdef ALPHA_VIGNETTE\n"
-    "  fragColor = vec4(color, vVignette.b * ga.a); \n"
-    "#else // ALPHA_VIGNETTE\n"
-    "  fragColor = vec4(vVignette.rgb * color, ga.a); \n"
-    "#endif // ALPHA_VIGNETTE\n"
-    "} \n";
-
-static const char* kVertexShaderNoChromaticAberrationString =
-    "uniform mat4 uProjectionMatrix;\n"
-    "layout(binding = " STRINGIFY(POSE_BINDING) ", std140)\n"
-    "uniform LateLatchData {\n"
-    "  mat4 uTexFromRecommendedViewportMatrix;\n"
-    "};\n"
-    "#ifdef COMPOSITE_LAYER_2\n"
-    "layout(binding = " STRINGIFY(POSE_BINDING2) ", std140)\n"
-    "uniform LateLatchData2 {\n"
-    "  mat4 uTexFromRecommendedViewportMatrix2;\n"
-    "};\n"
-    "#endif\n"
-    "uniform vec4 uTexXMinMax;\n"
-    "layout(location = " STRINGIFY(POSITION_ATTR) ") in vec2 aPosition;\n"
-    "layout(location = " STRINGIFY(VIEWPORT_COORD_G_ATTR)
-           ") in vec2 aViewportCoords;\n"
-    "mediump out vec2 vTexCoords;\n"
-    "#ifdef COMPOSITE_LAYER_2\n"
-    "mediump out vec2 vTexCoords2;\n"
-    "#endif\n"
-    "mediump out vec3 vVignette;\n"
-    "\n" GLSL_VIGNETTE_FUNC
-    "void main(void) {\n"
-    "  float fVignette = vignette(aViewportCoords);\n"
-    "  vVignette = vec3(fVignette, fVignette, fVignette);\n"
-    "  vec4 texCoords = (uTexFromRecommendedViewportMatrix * \n"
-    "                    vec4(aViewportCoords, 0., 1.));\n"
-    "  vTexCoords = texCoords.xy / texCoords.w;\n"
-    "  vTexCoords.x = clamp(vTexCoords.x, uTexXMinMax.x, uTexXMinMax.y);\n"
-    "#ifdef COMPOSITE_LAYER_2\n"
-    "  texCoords = (uTexFromRecommendedViewportMatrix2 * \n"
-    "               vec4(aViewportCoords, 0., 1.));\n"
-    "  vTexCoords2 = texCoords.xy / texCoords.w;\n"
-    "  vTexCoords2.x = clamp(vTexCoords2.x, uTexXMinMax.z, uTexXMinMax.w);\n"
-    "#endif\n"
-    "  gl_Position = uProjectionMatrix * vec4(aPosition, 0., 1.);\n"
-    "}\n";
-
-static const char* kFragmentShaderNoChromaticAberrationString =
-    "#ifdef GL_ES\n"
-    "precision mediump float;\n"
-    "#endif\n"
-    " \n"
-    "layout(binding = " STRINGIFY(SAMPLER_BINDING) ")\n"
-    "uniform sampler2D uDistortionTexture; \n"
-    "mediump in vec2 vTexCoords;\n"
-    "#ifdef COMPOSITE_LAYER_2\n"
-    "layout(binding = " STRINGIFY(SAMPLER_BINDING2) ")\n"
-    "uniform sampler2D uDistortionTexture2; \n"
-    "mediump in vec2 vTexCoords2;\n"
-    "#endif\n"
-    "mediump in vec3 vVignette;\n"
-    "out vec4 fragColor;\n"
-    " \n"
-    "void main(void) { \n"
-    "  vec4 color = texture(uDistortionTexture, vTexCoords); \n"
-    "#ifdef COMPOSITE_LAYER_2 \n"
-    "  // Alpha blend layer 2 onto layer 1. \n"
-    "  vec4 color2 = texture(uDistortionTexture2, vTexCoords2); \n"
-    "  float alpha2 = color2.a; \n"
-    "  color.rgb = (1.0 - alpha2) * color.rgb + alpha2 * color2.rgb; \n"
-    "#endif \n"
-    "  fragColor = vec4(vVignette * color.rgb, color.a); \n"
-    "} \n";
-
-static const char* kVertexShaderSimpleVideoQuadString =
-    "uniform mat4 uProjectionMatrix;\n"
-    "layout(binding = " STRINGIFY(POSE_BINDING) ", std140)\n"
-    "uniform LateLatchData {\n"
-    "  mat4 uEdsCorrection;\n"
-    "};\n"
-    "uniform mat4 uTexFromEyeMatrix;\n"
-    "uniform mat4 uEyeFromViewportMatrix;\n"
-    "layout(location = " STRINGIFY(POSITION_ATTR) ") in vec2 aPosition;\n"
-    "layout(location = " STRINGIFY(VIEWPORT_COORD_G_ATTR)
-           ") in vec2 aViewportCoords;\n"
-    "mediump out vec2 vTexCoords;\n"
-    "void main(void) {\n"
-    "  mat4 m = uTexFromEyeMatrix * inverse(uEdsCorrection) * uEyeFromViewportMatrix;\n"
-    "  mat3 uTexFromViewportMatrix = inverse(mat3(m[0].xyw, m[1].xyw, m[3].xyw)); \n"
-    "  vec3 texCoords = uTexFromViewportMatrix * vec3(aViewportCoords, 1.0);\n"
-    "  vTexCoords = texCoords.xy / texCoords.z;\n"
-    "  gl_Position = uProjectionMatrix * vec4(aPosition, 0.0, 1.0);\n"
-    "}\n";
-
-static const char* kFragmentShaderSimpleVideoQuadString =
-    "#extension GL_OES_EGL_image_external_essl3 : enable\n"
-    " \n"
-    "#ifdef GL_ES\n"
-    "precision mediump float;\n"
-    "#endif\n"
-    " \n"
-    "layout(binding = " STRINGIFY(SAMPLER_BINDING) ")\n"
-    "uniform samplerExternalOES uDistortionTexture; \n"
-    "mediump in vec2 vTexCoords;\n"
-    "out vec4 fragColor;\n"
-    " \n"
-    "void main(void) { \n"
-    "  if (clamp(vTexCoords, 0.0, 1.0) != vTexCoords) { \n"
-    "    fragColor = vec4(0.0, 0.0, 0.0, 0.0); \n"
-    "  } else { \n"
-    "    fragColor = texture(uDistortionTexture, vTexCoords); \n"
-    "  } \n"
-    "} \n";
-
-}  // anonymous namespace
-
-namespace android {
-namespace dvr {
-
-// Note that converting from Clip Space ([-1,1]^3) to Viewport Space
-// for one eye ([0,1]x[0,1]) requires dividing by 2 in x and y.
-const mat4 DistortionRenderer::kViewportFromClipMatrix =
-    Eigen::Translation3f(vec3(0.5f, 0.5f, 0)) *
-    Eigen::DiagonalMatrix<float, 3>(vec3(0.5f, 0.5f, 1.0f));
-
-const mat4 DistortionRenderer::kClipFromViewportMatrix =
-    Eigen::DiagonalMatrix<float, 3>(vec3(2.0f, 2.0f, 1.0f)) *
-    Eigen::Translation3f(vec3(-0.5f, -0.5f, 0));
-
-void DistortionRenderer::EdsShader::load(const char* vertex,
-                                         const char* fragment, int num_layers,
-                                         bool use_alpha_vignette,
-                                         float rotation, bool flip_vertical,
-                                         bool blend_with_previous_layer) {
-  std::string vert_builder = "#version 310 es\n";
-  std::string frag_builder = "#version 310 es\n";
-  if (blend_with_previous_layer && kUseFramebufferReadback) {
-    frag_builder += "#extension GL_EXT_shader_framebuffer_fetch : require\n";
-  }
-
-  if (num_layers == 2) {
-    vert_builder += "#define COMPOSITE_LAYER_2\n";
-    frag_builder += "#define COMPOSITE_LAYER_2\n";
-  } else {
-    LOG_ALWAYS_FATAL_IF(num_layers != 1);
-  }
-  if (blend_with_previous_layer) {
-    // Check for unsupported shader combinations:
-    LOG_ALWAYS_FATAL_IF(num_layers != 1);
-    LOG_ALWAYS_FATAL_IF(use_alpha_vignette);
-    if (kUseFramebufferReadback)
-      frag_builder += "#define BLEND_WITH_PREVIOUS_LAYER\n";
-  }
-  if (use_alpha_vignette) {
-    vert_builder += "#define ALPHA_VIGNETTE\n";
-    frag_builder += "#define ALPHA_VIGNETTE\n";
-  }
-
-  vert_builder += vertex;
-  frag_builder += fragment;
-  pgm.Link(vert_builder, frag_builder);
-  LOG_ALWAYS_FATAL_IF(!pgm.IsUsable());
-
-  pgm.Use();
-
-  uProjectionMatrix =
-      glGetUniformLocation(pgm.GetProgram(), "uProjectionMatrix");
-  uTexFromEyeMatrix =
-      glGetUniformLocation(pgm.GetProgram(), "uTexFromEyeMatrix");
-  uEyeFromViewportMatrix =
-      glGetUniformLocation(pgm.GetProgram(), "uEyeFromViewportMatrix");
-  uTexXMinMax = glGetUniformLocation(pgm.GetProgram(), "uTexXMinMax");
-  CHECK_GL();
-
-  float vertical_multiply = flip_vertical ? -1.0 : 1.0;
-  mat4 projectionMatrix = OrthoMatrix(-0.5f, 0.5f, vertical_multiply * -0.5f,
-                                      vertical_multiply * 0.5f, -1.0f, 1.0f);
-
-  // Rotate the mesh into the screen's orientation.
-  // TODO(hendrikw): Once the display is finalized, and perhaps not portrait,
-  //                 look into removing this matrix altogether.
-  projectionMatrix =
-      projectionMatrix * Eigen::AngleAxisf(rotation, vec3::UnitZ());
-
-  LOG_ALWAYS_FATAL_IF(sizeof(mat4) != 4 * 4 * 4);
-  glUniformMatrix4fv(uProjectionMatrix, 1, false, projectionMatrix.data());
-}
-
-DistortionRenderer::DistortionRenderer(
-    const CompositeHmd& hmd, vec2i display_size, int distortion_mesh_resolution,
-    bool flip_texture_horizontally, bool flip_texture_vertically,
-    bool separated_eye_buffers, bool eds_enabled, bool late_latch_enabled)
-    : shader_type_(kChromaticAberrationCorrection),
-      eds_enabled_(eds_enabled),
-      chromatic_aberration_correction_enabled_(true),
-      use_alpha_vignette_(false),
-      distortion_mesh_resolution_(distortion_mesh_resolution),
-      last_distortion_texture_id_(0),
-      app_texture_target_(GL_TEXTURE_2D),
-      display_size_(display_size),
-      separated_eye_buffers_(separated_eye_buffers) {
-  ATRACE_NAME("DistortionRenderer::DistortionRenderer");
-
-  float device_rotation = 0.0;
-
-  if (eds_enabled_) {
-    // Late latch must be on if eds_enabled_ is true.
-    if (!late_latch_enabled) {
-      ALOGE("Cannot enable EDS without late latch. Force enabling late latch.");
-      late_latch_enabled = true;
-    }
-  }
-
-  // TODO(hendrikw): Look into moving this logic into DisplayMetrics.
-  if (hmd.GetDisplayMetrics().IsPortrait()) {
-    device_rotation = -M_PI / 2.0f;
-  }
-
-  // Create shader programs.
-  shaders_[kNoChromaticAberrationCorrection].load(
-      kVertexShaderNoChromaticAberrationString,
-      kFragmentShaderNoChromaticAberrationString, 1, false, device_rotation,
-      flip_texture_horizontally, false);
-  shaders_[kNoChromaticAberrationCorrectionTwoLayers].load(
-      kVertexShaderNoChromaticAberrationString,
-      kFragmentShaderNoChromaticAberrationString, 2, false, device_rotation,
-      flip_texture_horizontally, false);
-  shaders_[kChromaticAberrationCorrection].load(
-      kVertexShaderChromaticAberrationString,
-      kFragmentShaderChromaticAberrationString, 1, false, device_rotation,
-      flip_texture_horizontally, false);
-  shaders_[kChromaticAberrationCorrectionTwoLayers].load(
-      kVertexShaderChromaticAberrationString,
-      kFragmentShaderChromaticAberrationString, 2, false, device_rotation,
-      flip_texture_horizontally, false);
-  shaders_[kChromaticAberrationCorrectionAlphaVignette].load(
-      kVertexShaderChromaticAberrationString,
-      kFragmentShaderChromaticAberrationString, 1, true, device_rotation,
-      flip_texture_horizontally, false);
-  shaders_[kChromaticAberrationCorrectionAlphaVignetteTwoLayers].load(
-      kVertexShaderChromaticAberrationString,
-      kFragmentShaderChromaticAberrationString, 2, true, device_rotation,
-      flip_texture_horizontally, false);
-  shaders_[kChromaticAberrationCorrectionWithBlend].load(
-      kVertexShaderChromaticAberrationString,
-      kFragmentShaderChromaticAberrationString, 1, false, device_rotation,
-      flip_texture_horizontally, true);
-  shaders_[kSimpleVideoQuad].load(
-      kVertexShaderSimpleVideoQuadString,
-      kFragmentShaderSimpleVideoQuadString, 1, false, device_rotation,
-      flip_texture_horizontally, true);
-  CHECK_GL();
-
-  mat4 tex_from_recommended_viewport_matrix[2][2][2];
-  for (int eye = 0; eye < 2; ++eye) {
-    // Near and far plane don't actually matter for the clip_from_eye_matrix
-    // below since it is only used (for EDS) to transform coordinates for
-    // which the Z has been dropped.
-    static const float kNear = 0.1f, kFar = 100.0f;
-    const FieldOfView& fov =
-        (eye == kLeftEye ? hmd.GetEyeFov(kLeftEye) : hmd.GetEyeFov(kRightEye));
-    mat4 c_clip_from_eye_matrix = fov.GetProjectionMatrix(kNear, kFar);
-    mat4 c_eye_from_clip_matrix = c_clip_from_eye_matrix.inverse();
-
-    // Compute tex_from_recommended_viewport_matrix.
-
-    // flip_texture_vertically defines the default flip behavior.
-    // do_flip[0] should be the default, while do_flip[1] should be the
-    // inverse of the default.
-    int do_flip[2] = {flip_texture_vertically ? 1 : 0,
-                      flip_texture_vertically ? 0 : 1};
-    for (int flip = 0; flip < 2; ++flip) {
-      vec2 flip_scale(1.0f, do_flip[flip] ? -1.0f : 1.0f);
-      vec2 flip_offset(0.0f, do_flip[flip] ? 1.0f : 0.0f);
-
-      for (int separate_eye = 0; separate_eye < 2; ++separate_eye) {
-        vec2 viewport_corner_offset = (eye == kLeftEye || separate_eye)
-                                          ? vec2(0.0f, 0.0f)
-                                          : vec2(0.5f, 0.0f);
-        const vec2 txy = viewport_corner_offset + flip_offset;
-        const vec2 scalexy = vec2(separate_eye ? 1.0f : 0.5f, 1.0f);
-        tex_from_recommended_viewport_matrix[eye][flip][separate_eye] =
-            Eigen::Translation3f(vec3(txy.x(), txy.y(), 0.0f)) *
-            Eigen::DiagonalMatrix<float, 3>(vec3(flip_scale.x() * scalexy.x(),
-                                                 flip_scale.y(), scalexy.y()));
-
-        tex_from_eye_matrix_[eye][flip][separate_eye] =
-            tex_from_recommended_viewport_matrix[eye][flip][separate_eye] *
-            kViewportFromClipMatrix * c_clip_from_eye_matrix;
-      }
-    }
-
-    eye_from_viewport_matrix_[eye] =
-        c_eye_from_clip_matrix * kClipFromViewportMatrix;
-  }
-
-  // Create UBO for setting the EDS matrix to identity when EDS is disabled.
-  glGenBuffers(2 * 2 * 2, &uTexFromRecommendedViewportMatrix[0][0][0]);
-  for (int eye = 0; eye < 2; ++eye) {
-    for (int flip = 0; flip < 2; ++flip) {
-      for (int separate_eye = 0; separate_eye < 2; ++separate_eye) {
-        glBindBuffer(
-            GL_UNIFORM_BUFFER,
-            uTexFromRecommendedViewportMatrix[eye][flip][separate_eye]);
-        glBufferData(GL_UNIFORM_BUFFER, sizeof(mat4), 0, GL_STATIC_DRAW);
-        CHECK_GL();
-        mat4* mat = static_cast<mat4*>(glMapBufferRange(
-            GL_UNIFORM_BUFFER, 0, sizeof(mat4), GL_MAP_WRITE_BIT));
-        CHECK_GL();
-        *mat = tex_from_recommended_viewport_matrix[eye][flip][separate_eye];
-        glUnmapBuffer(GL_UNIFORM_BUFFER);
-      }
-    }
-  }
-  glBindBuffer(GL_UNIFORM_BUFFER, 0);
-
-  // Create distortion meshes and associated GL resources.
-  glGenBuffers(2, mesh_vbo_);
-  glGenVertexArrays(2, mesh_vao_);
-  glGenBuffers(2, mesh_ibo_);
-  RecomputeDistortion(hmd);
-
-  SetDisplaySize(display_size);
-
-  if (hmd.GetDisplayMetrics().IsPortrait()) {
-    eye_viewport_origin_[0] =
-        vec2i(0, flip_texture_horizontally ? 0 : display_size_[1] / 2);
-    eye_viewport_origin_[1] =
-        vec2i(0, flip_texture_horizontally ? display_size_[1] / 2 : 0);
-    eye_viewport_size_ = vec2i(display_size_[0], display_size_[1] / 2);
-  } else {
-    eye_viewport_origin_[0] = vec2i(0, 0);
-    eye_viewport_origin_[1] = vec2i(display_size_[0] / 2, 0);
-    eye_viewport_size_ = vec2i(display_size_[0] / 2, display_size_[1]);
-  }
-
-  CHECK_GL();
-}
-
-DistortionRenderer::~DistortionRenderer() {
-  glDeleteBuffers(2 * 2 * 2, &uTexFromRecommendedViewportMatrix[0][0][0]);
-  glDeleteBuffers(2, mesh_vbo_);
-  glDeleteVertexArrays(2, mesh_vao_);
-  glDeleteBuffers(2, mesh_ibo_);
-}
-
-void DistortionRenderer::ApplyDistortionCorrectionToTexture(
-    EyeType eye, const GLuint* texture_ids, const bool* vertical_flip,
-    const bool* separate_eye, const int* late_latch_layer, int num_textures,
-    bool blend_with_previous_layer, bool do_gl_state_prep) {
-  ATRACE_NAME(__PRETTY_FUNCTION__);
-
-  bool use_gl_blend = use_alpha_vignette_ ||
-                      (blend_with_previous_layer && !kUseFramebufferReadback);
-  if (use_gl_blend) {
-    glEnable(GL_BLEND);
-    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
-  }
-  DrawEye(eye, texture_ids, vertical_flip, separate_eye, late_latch_layer,
-          num_textures, blend_with_previous_layer, do_gl_state_prep);
-  if (use_gl_blend) {
-    glDisable(GL_BLEND);
-  }
-  CHECK_GL();
-}
-
-void DistortionRenderer::DrawVideoQuad(EyeType eye, int layer_i,
-                                       GLuint texture_id,
-                                       const mat4& transform) {
-  shaders_[kSimpleVideoQuad].use();
-
-  shaders_[kSimpleVideoQuad].SetTexFromEyeTransform(
-      tex_from_eye_matrix_[eye][0][1]);
-  shaders_[kSimpleVideoQuad].SetEyeFromViewportTransform(
-      transform * kClipFromViewportMatrix);
-
-  if (eds_enabled_) {
-    // Bind late latch view-projection UBO that is produced by AddEdsLateLatch.
-    late_latch_[layer_i]->BindUniformBuffer(
-        POSE_BINDING, LateLatch::kViewMatrix, eye);
-    CHECK_GL();
-  } else {
-    // When EDS is disabled we just set the matrix here with no pose offset.
-    glBindBufferBase(GL_UNIFORM_BUFFER, POSE_BINDING + layer_i,
-                     uTexFromRecommendedViewportMatrix[eye][0][1]);
-    CHECK_GL();
-  }
-
-  glActiveTexture(GL_TEXTURE0 + SAMPLER_BINDING);
-  glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture_id);
-  CHECK_GL();
-
-  glDrawElements(GL_TRIANGLE_STRIP, mesh_node_[eye].indices.size(),
-                 GL_UNSIGNED_SHORT, nullptr);
-
-  CHECK_GL();
-}
-
-void DistortionRenderer::DoLateLatch(uint32_t target_vsync_count,
-                                     const uint32_t* render_buffer_index,
-                                     const GLuint* render_pose_buffer_objects,
-                                     const bool* vertical_flip,
-                                     const bool* separate_eye,
-                                     int num_textures) {
-  if (eds_enabled_) {
-    LateLatchInput data;
-    memset(&data, 0, sizeof(data));
-    for (int ti = 0; ti < num_textures; ++ti) {
-      if (late_latch_[ti] == nullptr)
-        late_latch_[ti].reset(new LateLatch(false));
-
-      int flip_index = vertical_flip[ti] ? 1 : 0;
-      int separate_eye_i = separate_eye[ti] ? 1 : 0;
-      // Copy data into late latch input struct.
-      for (int eye = 0; eye < 2; ++eye) {
-        data.eds_mat1[eye] =
-            tex_from_eye_matrix_[eye][flip_index][separate_eye_i];
-        data.eds_mat2[eye] = eye_from_viewport_matrix_[eye];
-      }
-      data.pose_index = target_vsync_count & kPoseAsyncBufferIndexMask;
-      data.render_pose_index = render_buffer_index[ti];
-
-      late_latch_[ti]->AddEdsLateLatch(data, render_pose_buffer_objects[ti]);
-    }
-  }
-}
-
-void DistortionRenderer::PrepGlState(EyeType eye) {
-  glViewport(eye_viewport_origin_[eye].x(), eye_viewport_origin_[eye].y(),
-             eye_viewport_size_.x(), eye_viewport_size_.y());
-
-  glBindVertexArray(mesh_vao_[eye]);
-  CHECK_GL();
-
-  glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mesh_ibo_[eye]);
-  CHECK_GL();
-
-  if (!eds_enabled_) {
-    glMemoryBarrier(GL_UNIFORM_BARRIER_BIT);
-  }
-}
-
-void DistortionRenderer::ResetGlState(int num_textures) {
-  glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
-  glBindBuffer(GL_ARRAY_BUFFER, 0);
-  glBindVertexArray(0);
-  if (eds_enabled_) {
-    for (int ti = 0; ti < num_textures; ++ti)
-      glBindBufferBase(GL_UNIFORM_BUFFER, POSE_BINDING + ti, 0);
-  } else {
-    glBindBuffer(GL_UNIFORM_BUFFER, 0);
-  }
-
-  CHECK_GL();
-
-  // Unbind all texture inputs.
-  for (int ti = 0; ti < num_textures; ++ti) {
-    glActiveTexture(GL_TEXTURE0 + SAMPLER_BINDING + ti);
-    glBindTexture(app_texture_target_, 0);
-  }
-  glActiveTexture(GL_TEXTURE0);
-}
-
-void DistortionRenderer::DrawEye(EyeType eye, const GLuint* texture_ids,
-                                 const bool* vertical_flip,
-                                 const bool* separate_eye,
-                                 const int* late_latch_layer, int num_textures,
-                                 bool blend_with_previous_layer,
-                                 bool do_gl_state_prep) {
-  if (do_gl_state_prep)
-    PrepGlState(eye);
-
-  if (num_textures > kMaxLayers) {
-    ALOGE("Too many textures for DistortionRenderer");
-    num_textures = kMaxLayers;
-  }
-
-  LOG_ALWAYS_FATAL_IF(num_textures != 1 && num_textures != 2);
-
-  if (num_textures == 2) {
-    if (chromatic_aberration_correction_enabled_) {
-      if (use_alpha_vignette_) {
-        shader_type_ = kChromaticAberrationCorrectionAlphaVignetteTwoLayers;
-      } else {
-        shader_type_ = kChromaticAberrationCorrectionTwoLayers;
-      }
-    } else {
-      shader_type_ = kNoChromaticAberrationCorrectionTwoLayers;
-    }
-  } else {
-    if (chromatic_aberration_correction_enabled_) {
-      if (blend_with_previous_layer) {
-        shader_type_ = kChromaticAberrationCorrectionWithBlend;
-      } else if (use_alpha_vignette_) {
-        shader_type_ = kChromaticAberrationCorrectionAlphaVignette;
-      } else {
-        shader_type_ = kChromaticAberrationCorrection;
-      }
-    } else {
-      shader_type_ = kNoChromaticAberrationCorrection;
-    }
-  }
-  shaders_[shader_type_].use();
-
-  for (int ti = 0; ti < num_textures; ++ti) {
-    int flip_index = vertical_flip[ti] ? 1 : 0;
-    if (eds_enabled_) {
-      // Bind late latch view-projection UBO that is produced by
-      // AddEdsLateLatch.
-      late_latch_[late_latch_layer[ti]]->BindUniformBuffer(
-          POSE_BINDING + ti, LateLatch::kViewProjMatrix, eye);
-      CHECK_GL();
-    } else {
-      // When EDS is disabled we just set the matrix here with no pose offset.
-      // With app late-latching, we can't know the pose that the app used
-      // because it's in the app's framebuffer.
-      int separate_eye_i = separate_eye[ti] ? 1 : 0;
-      glBindBufferBase(
-          GL_UNIFORM_BUFFER, POSE_BINDING + ti,
-          uTexFromRecommendedViewportMatrix[eye][flip_index][separate_eye_i]);
-      CHECK_GL();
-    }
-
-    glActiveTexture(GL_TEXTURE0 + SAMPLER_BINDING + ti);
-    glBindTexture(app_texture_target_, texture_ids[ti]);
-    CHECK_GL();
-  }
-
-  // Prevents left eye data from bleeding into right eye and vice-versa.
-  vec2 layer_min_max[kMaxLayers];
-  for (int i = 0; i < kMaxLayers; ++i)
-    layer_min_max[i] = vec2(0.0f, 0.0f);
-  for (int ti = 0; ti < num_textures; ++ti) {
-    if (separate_eye[ti]) {
-      layer_min_max[ti] = vec2(0.0f, 1.0f);  // Use the whole texture.
-    } else if (eye == kLeftEye) {
-      layer_min_max[ti] = vec2(0.0f, 0.499f);
-    } else {
-      layer_min_max[ti] = vec2(0.501f, 1.0f);
-    }
-  }
-  // The second layer stores its x min and max in the z,w slots of the vec4.
-  vec4 xTexMinMax(layer_min_max[0].x(), layer_min_max[0].y(),
-                  layer_min_max[1].x(), layer_min_max[1].y());
-
-  glUniform4fv(shaders_[shader_type_].uTexXMinMax, 1, &xTexMinMax[0]);
-  CHECK_GL();
-
-  glDrawElements(GL_TRIANGLE_STRIP, mesh_node_[eye].indices.size(),
-                 GL_UNSIGNED_SHORT, nullptr);
-  CHECK_GL();
-  if (do_gl_state_prep)
-    ResetGlState(num_textures);
-}
-
-void DistortionRenderer::SetDisplaySize(vec2i display_size) {
-  display_size_ = display_size;
-}
-
-void DistortionRenderer::SetEdsEnabled(bool enabled) { eds_enabled_ = enabled; }
-
-void DistortionRenderer::RecomputeDistortion(const CompositeHmd& hmd) {
-  using std::placeholders::_1;
-  using std::placeholders::_2;
-  using std::placeholders::_3;
-  using std::placeholders::_4;
-  DistortionFunction distortion_function =
-      std::bind(&CompositeHmd::ComputeDistortedVertex, &hmd, _1, _2, _3, _4);
-
-  for (int i = 0; i < 2; ++i) {
-    mesh_node_[i] =
-        BuildDistortionMesh(static_cast<EyeType>(i),
-                            distortion_mesh_resolution_, distortion_function);
-
-    glBindVertexArray(mesh_vao_[i]);
-
-    glBindBuffer(GL_ARRAY_BUFFER, mesh_vbo_[i]);
-    glBufferData(GL_ARRAY_BUFFER,
-                 sizeof(EdsVertex) * mesh_node_[i].vertices.size(),
-                 &mesh_node_[i].vertices.front(), GL_STATIC_DRAW);
-
-    glEnableVertexAttribArray(POSITION_ATTR);
-    glEnableVertexAttribArray(VIEWPORT_COORD_R_ATTR);
-    glEnableVertexAttribArray(VIEWPORT_COORD_G_ATTR);
-    glEnableVertexAttribArray(VIEWPORT_COORD_B_ATTR);
-
-    glVertexAttribPointer(
-        POSITION_ATTR, 2, GL_FLOAT, GL_FALSE, sizeof(EdsVertex),
-        reinterpret_cast<void*>(offsetof(EdsVertex, position)));
-
-    glVertexAttribPointer(
-        VIEWPORT_COORD_R_ATTR, 2, GL_FLOAT, GL_FALSE, sizeof(EdsVertex),
-        reinterpret_cast<void*>(offsetof(EdsVertex, red_viewport_coords)));
-
-    glVertexAttribPointer(
-        VIEWPORT_COORD_G_ATTR, 2, GL_FLOAT, GL_FALSE, sizeof(EdsVertex),
-        reinterpret_cast<void*>(offsetof(EdsVertex, green_viewport_coords)));
-
-    glVertexAttribPointer(
-        VIEWPORT_COORD_B_ATTR, 2, GL_FLOAT, GL_FALSE, sizeof(EdsVertex),
-        reinterpret_cast<void*>(offsetof(EdsVertex, blue_viewport_coords)));
-
-    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mesh_ibo_[i]);
-    glBufferData(GL_ELEMENT_ARRAY_BUFFER,
-                 sizeof(uint16_t) * mesh_node_[i].indices.size(),
-                 &mesh_node_[i].indices.front(), GL_STATIC_DRAW);
-    CHECK_GL();
-  }
-  glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
-  glBindBuffer(GL_ARRAY_BUFFER, 0);
-
-  glBindVertexArray(0);
-}
-
-bool DistortionRenderer::GetLastEdsPose(LateLatchOutput* out_data, int layer_id) const {
-  if (layer_id >= kMaxLayers) {
-    ALOGE("Accessing invalid layer %d", layer_id);
-    return false;
-  }
-
-  if (late_latch_[layer_id] != nullptr) {
-    late_latch_[layer_id]->CaptureOutputData(out_data);
-    return true;
-  } else {
-    ALOGE("Late latch shader not enabled.");
-    return false;
-  }
-}
-
-}  // namespace dvr
-}  // namespace android
diff --git a/libs/vr/libeds/eds.cpp b/libs/vr/libeds/eds.cpp
deleted file mode 100644
index 8af5b27..0000000
--- a/libs/vr/libeds/eds.cpp
+++ /dev/null
@@ -1,35 +0,0 @@
-#include <dvr/eds.h>
-
-#include <private/dvr/graphics/vr_gl_extensions.h>
-#include <private/dvr/late_latch.h>
-#include <private/dvr/types.h>
-
-// TODO(jbates) delete this file and eds.h
-
-extern "C" int dvrEdsInit(bool with_late_latch) { return 0; }
-
-extern "C" void dvrEdsDeinit() {}
-
-extern "C" int dvrEdsCapturePoseAsync(int eye, uint32_t target_vsync_count,
-                                      const float* projection_matrix,
-                                      const float* eye_from_head_matrix,
-                                      const float* pose_offset_matrix) {
-  return 0;
-}
-
-extern "C" int dvrEdsBindPose(int eye, uint32_t ubo_binding, intptr_t offset,
-                              ssize_t size) {
-  return 0;
-}
-
-extern "C" int dvrEdsBlitPose(int eye, int viewport_width,
-                              int viewport_height) {
-  return 0;
-}
-
-extern "C" int dvrEdsBlitPoseFromCpu(int eye, int viewport_width,
-                                     int viewport_height,
-                                     const float* pose_quaternion,
-                                     const float* pose_position) {
-  return 0;
-}
diff --git a/libs/vr/libeds/eds_mesh.cpp b/libs/vr/libeds/eds_mesh.cpp
deleted file mode 100644
index 01a90cf..0000000
--- a/libs/vr/libeds/eds_mesh.cpp
+++ /dev/null
@@ -1,136 +0,0 @@
-#include "include/private/dvr/eds_mesh.h"
-
-#include <log/log.h>
-#include <math.h>
-
-#include <private/dvr/types.h>
-
-namespace {
-
-using android::dvr::EdsVertex;
-using android::dvr::EyeType;
-using android::dvr::DistortionFunction;
-using android::dvr::vec2;
-
-// Computes the vertices for a distortion mesh with resolution |resolution| and
-// distortion provided by |hmd| and stores them in |vertices|.
-static void ComputeDistortionMeshVertices(
-    EdsVertex* vertices, int resolution,
-    const DistortionFunction& distortion_function, EyeType eye) {
-  for (int row = 0; row < resolution; row++) {
-    for (int col = 0; col < resolution; col++) {
-      const float x_norm =
-          static_cast<float>(col) / (static_cast<float>(resolution - 1U));
-      const float y_norm =
-          static_cast<float>(row) / (static_cast<float>(resolution - 1U));
-
-      const vec2 xy_norm(x_norm, y_norm);
-      const size_t index = col * resolution + row;
-
-      // Evaluate distortion function to get the new coordinates for each color
-      // channel. The distortion function returns the new coordinates relative
-      // to a full viewport with 0 <= x <= 1 for each eye.
-      vec2 coords[3];
-      distortion_function(eye, xy_norm, &vertices[index].position, coords);
-
-      // Store distortion mapping in texture coordinates.
-      vertices[index].red_viewport_coords = coords[0];
-      vertices[index].green_viewport_coords = coords[1];
-      vertices[index].blue_viewport_coords = coords[2];
-    }
-  }
-}
-
-// Computes the triangle strip indices for a distortion mesh with resolution
-// |resolution| and stores them in |indices|.
-static void ComputeDistortionMeshIndices(uint16_t* indices, int resolution) {
-  // The following strip method has been used in the Cardboard SDK
-  // (java/com/google/vrtoolkit/cardboard/DistortionRenderer.java) and has
-  // originally been described at:
-  //
-  // http://dan.lecocq.us/wordpress/2009/12/25/triangle-strip-for-grids-a-construction/
-  //
-  // For a grid with 4 rows and 4 columns of vertices, the strip would
-  // look like:
-  //                             ↻
-  //         0    -    4    -    8    -   12
-  //         ↓    ↗    ↓    ↗    ↓    ↗    ↓
-  //         1    -    5    -    9    -   13
-  //         ↓    ↖    ↓    ↖    ↓    ↖    ↓
-  //         2    -    6    -   10    -   14
-  //         ↓    ↗    ↓    ↗    ↓    ↗    ↓
-  //         3    -    7    -   11    -   15
-  //                   ↺
-  //
-  // Note the little circular arrows next to 7 and 8 that indicate
-  // repeating that vertex once so as to produce degenerate triangles.
-  //
-  // To facilitate scanline racing, the vertex order is left to right.
-
-  int16_t index_offset = 0;
-  int16_t vertex_offset = 0;
-  for (int row = 0; row < resolution - 1; ++row) {
-    if (row > 0) {
-      indices[index_offset] = indices[index_offset - 1];
-      ++index_offset;
-    }
-    for (int col = 0; col < resolution; ++col) {
-      if (col > 0) {
-        if (row % 2 == 0) {
-          // Move right on even rows.
-          ++vertex_offset;
-        } else {
-          --vertex_offset;
-        }
-      }
-      // A cast to uint16_t is safe here as |vertex_offset| will not drop below
-      // zero in this loop. As col is initially equal to zero |vertex_offset| is
-      // always incremented before being decremented, is initialized to zero and
-      // is only incremented outside of the loop.
-      indices[index_offset++] = static_cast<uint16_t>(vertex_offset);
-      indices[index_offset++] = static_cast<uint16_t>(
-          vertex_offset + static_cast<int16_t>(resolution));
-    }
-    vertex_offset =
-        static_cast<int16_t>(static_cast<int>(resolution) + vertex_offset);
-  }
-}
-
-}  // anonymous namespace
-
-namespace android {
-namespace dvr {
-
-// Builds a distortion mesh of resolution |resolution| using the distortion
-// provided by |hmd| for |eye|.
-EdsMesh BuildDistortionMesh(EyeType eye, int resolution,
-                            const DistortionFunction& distortion_function) {
-  LOG_ALWAYS_FATAL_IF(resolution <= 2);
-
-  // Number of indices produced by the strip method
-  // (see comment in ComputeDistortionMeshIndices):
-  //
-  //     1 vertex per triangle
-  //     2 triangles per quad, (rows - 1) * (cols - 1) quads
-  //     2 vertices at the start of each row for the first triangle
-  //     1 extra vertex per row (except first and last) for a
-  //       degenerate triangle
-  //
-  const uint16_t index_count =
-      static_cast<uint16_t>(resolution * (2 * resolution - 1U) - 2U);
-  const uint16_t vertex_count = static_cast<uint16_t>(resolution * resolution);
-
-  EdsMesh mesh;
-  mesh.vertices.resize(vertex_count);
-  mesh.indices.resize(index_count);
-
-  // Populate vertex and index buffer.
-  ComputeDistortionMeshVertices(&mesh.vertices[0], resolution,
-                                distortion_function, eye);
-  ComputeDistortionMeshIndices(&mesh.indices[0], resolution);
-
-  return mesh;
-}
-
-}  // namespace dvr
-}  // namespace android
diff --git a/libs/vr/libeds/include/CPPLINT.cfg b/libs/vr/libeds/include/CPPLINT.cfg
deleted file mode 100644
index 2f8a3c0..0000000
--- a/libs/vr/libeds/include/CPPLINT.cfg
+++ /dev/null
@@ -1 +0,0 @@
-filter=-build/header_guard
diff --git a/libs/vr/libeds/include/dvr/eds.h b/libs/vr/libeds/include/dvr/eds.h
deleted file mode 100644
index 37b1297..0000000
--- a/libs/vr/libeds/include/dvr/eds.h
+++ /dev/null
@@ -1,150 +0,0 @@
-#ifndef ANDROID_DVR_EDS_H_
-#define ANDROID_DVR_EDS_H_
-
-#include <stdbool.h>
-#include <stdint.h>
-#include <sys/cdefs.h>
-#include <sys/types.h>
-
-__BEGIN_DECLS
-
-// This struct aligns with GLSL uniform blocks with std140 layout.
-// std140 allows padding between certain types, so padding must be explicitly
-// added as struct members.
-struct __attribute__((__packed__)) DvrLateLatchData {
-  // Column-major order.
-  float view_proj_matrix[16];
-  // Column-major order.
-  float view_matrix[16];
-  float pose_quaternion[4];
-  float pose_position[4];
-};
-
-//
-// These APIs are not thread safe and must be called on a single thread with an
-// actively bound GL context corresponding to a display surface.
-//
-
-// Prepares EDS and Late Latching system. Idempotent if called more than once.
-// The target GL context must be created and bound.
-//
-// If |with_late_latch| is true, a thread will be created that asynchronously
-// updates the pose in memory.
-//
-// The following GL states are modified as follows:
-// glBindBuffer(GL_ARRAY_BUFFER, 0);
-// glBindBuffer(GL_UNIFORM_BUFFER, 0);
-//
-// Returns 0 on success, negative error code on failure.
-// Check GL errors with glGetError for other error conditions.
-int dvrEdsInit(bool with_late_latch);
-
-// Stops and destroys the EDS Late Latching system.
-void dvrEdsDeinit();
-
-// Submits GL draw command that will capture the latest head pose into a uniform
-// buffer object. This should be called twice per frame, before the app begins
-// drawing for each eye.
-// For each eye, a later call to dvrEdsBlitPose will write this pose into
-// the application framebuffer corner so that the EDS service knows what pose
-// the frame was rendered with.
-//
-// |eye| is 0 for left eye and 1 for right eye.
-//
-// The following GL states are modified as follows:
-// glUseProgram(0);
-// glBindBuffer(GL_UNIFORM_BUFFER, 0);
-// glBindBufferBase(GL_TRANSFORM_FEEDBACK_BUFFER, 0, id);
-// glDisable(GL_RASTERIZER_DISCARD);
-//
-// Returns 0 on success, negative error code on failure:
-//   EPERM - dvrEdsInit(true) was not called.
-// Check GL errors with glGetError for other error conditions.
-int dvrEdsCapturePoseAsync(int eye, uint32_t target_vsync_count,
-                           const float* projection_matrix,
-                           const float* eye_from_head_matrix,
-                           const float* pose_offset_matrix);
-
-// Binds the late-latch output data as a GL_UNIFORM_BUFFER so that your vertex
-// shaders can use the latest head pose. For example, to bind just the
-// view_matrix from the output:
-//
-// dvrEdsBindPose(eye, BINDING,
-//                       offsetof(DvrLateLatchData, view_matrix),
-//                       sizeof(DvrLateLatchData::view_matrix));
-//
-// Or more commonly, bind the view projection matrix:
-//
-// dvrEdsBindPose(eye, BINDING,
-//                       offsetof(DvrLateLatchData, view_proj_matrix),
-//                       sizeof(DvrLateLatchData::view_proj_matrix));
-//
-// BINDING in the above examples is the binding location of the uniform
-// interface block in the GLSL shader.
-//
-// Shader example (3 would be the |ubo_binding| passed to this function):
-//  layout(binding = 3, std140) uniform LateLatchData {
-//    mat4 uViewProjection;
-//  };
-//
-// |eye| is 0 for left eye and 1 for right eye.
-//
-// The following GL states are modified as follows:
-// glBindBuffer(GL_UNIFORM_BUFFER, ...);
-// glBindBufferRange(GL_UNIFORM_BUFFER, ...);
-//
-// To clear the binding, call glBindBuffer(GL_UNIFORM_BUFFER, 0);
-//
-// Returns 0 on success, negative error code on failure:
-//   EPERM - dvrEdsInit(true) was not called.
-// Check GL errors with glGetError for other error conditions.
-int dvrEdsBindPose(int eye, uint32_t ubo_binding, intptr_t offset,
-                   ssize_t size);
-
-// DEPRECATED
-//
-// Blits the pose captured previously into the currently bound framebuffer.
-// The current framebuffer is assumed to be the default framebuffer 0, the
-// surface that will be sent to the display and have EDS and lens warp applied
-// to it.
-//
-// |eye| is 0 for left eye and 1 for right eye.
-// |viewport_width| is the width of the viewport for this eye, which is
-//                  usually half the width of the framebuffer.
-// |viewport_height| is the height of the viewport for this eye, which is
-//                   usually the height of the framebuffer.
-//
-// The following GL states are modified as follows:
-// glUseProgram(0);
-// glBindBuffer(GL_UNIFORM_BUFFER, 0);
-// glBindBufferRange(GL_UNIFORM_BUFFER, 23, ...);
-//
-// Returns 0 on success, negative error code on failure:
-//   EPERM - dvrEdsInit was not called.
-// Check GL errors with glGetError for other error conditions.
-int dvrEdsBlitPose(int eye, int viewport_width, int viewport_height);
-
-// DEPRECATED
-//
-// Same as dvrEdsBlitPose except that the pose is provided as an
-// parameter instead of getting it from dvrEdsBindPose. This is for
-// applications that want EDS but do not want late-latching.
-//
-// |pose_quaternion| should point to 4 floats that represent a quaternion.
-// |pose_position| should point to 3 floats that represent x,y,z position.
-//
-// GL states are modified as follows:
-// glUseProgram(0);
-// glBindBuffer(GL_UNIFORM_BUFFER, 0);
-// glBindBufferBase(GL_UNIFORM_BUFFER, 23, ...);
-//
-// Returns 0 on success, negative error code on failure:
-//   EPERM - dvrEdsInit was not called.
-// Check GL errors with glGetError for other error conditions.
-int dvrEdsBlitPoseFromCpu(int eye, int viewport_width, int viewport_height,
-                          const float* pose_quaternion,
-                          const float* pose_position);
-
-__END_DECLS
-
-#endif  // ANDROID_DVR_EDS_H_
diff --git a/libs/vr/libeds/include/private/dvr/color_channel_distortion.h b/libs/vr/libeds/include/private/dvr/color_channel_distortion.h
deleted file mode 100644
index 4e612cd..0000000
--- a/libs/vr/libeds/include/private/dvr/color_channel_distortion.h
+++ /dev/null
@@ -1,30 +0,0 @@
-#ifndef ANDROID_DVR_COLOR_CHANNEL_DISTORTION_H_
-#define ANDROID_DVR_COLOR_CHANNEL_DISTORTION_H_
-
-#include <private/dvr/types.h>
-
-namespace android {
-namespace dvr {
-
-// ColorChannelDistortion encapsulates the way one color channel (wavelength)
-// is distorted optically when an image is viewed through a lens.
-class ColorChannelDistortion {
- public:
-  virtual ~ColorChannelDistortion() {}
-
-  // Given a 2d point p, returns the corresponding distorted point.
-  // The units of both the input and output points are tan-angle units,
-  // which can be computed as the distance on the screen divided by
-  // distance from the virtual eye to the screen.  For both the input
-  // and output points, the intersection of the optical axis of the lens
-  // with the screen defines the origin, the x axis points right, and
-  // the y axis points up.
-  virtual vec2 Distort(vec2 p) const = 0;
-
-  virtual vec2 DistortInverse(vec2 p) const = 0;
-};
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_COLOR_CHANNEL_DISTORTION_H_
diff --git a/libs/vr/libeds/include/private/dvr/composite_hmd.h b/libs/vr/libeds/include/private/dvr/composite_hmd.h
deleted file mode 100644
index 70727e0..0000000
--- a/libs/vr/libeds/include/private/dvr/composite_hmd.h
+++ /dev/null
@@ -1,89 +0,0 @@
-#ifndef ANDROID_DVR_COMPOSITE_HMD_H_
-#define ANDROID_DVR_COMPOSITE_HMD_H_
-
-#include <private/dvr/display_metrics.h>
-#include <private/dvr/head_mount_metrics.h>
-#include <private/dvr/types.h>
-
-namespace android {
-namespace dvr {
-
-// An intermediate structure composed of a head mount (described by
-// HeadMountMetrics) and a display (described by DisplayMetrics).
-class CompositeHmd {
- public:
-  // Constructs a new CompositeHmd given a HeadMountMetrics and a
-  // DisplayMetrics.
-  CompositeHmd(const HeadMountMetrics& head_mount_metrics,
-               const DisplayMetrics& display_metrics);
-
-  CompositeHmd(CompositeHmd&& composite_hmd) = delete;
-  CompositeHmd(const CompositeHmd& composite_hmd) = delete;
-  CompositeHmd& operator=(CompositeHmd&& composite_hmd) = delete;
-  CompositeHmd& operator=(const CompositeHmd& composite_hmd) = delete;
-
-  // Headset metadata.
-  float GetTargetFrameDuration() const;
-  void ComputeDistortedVertex(EyeType eye, vec2 uv_in, vec2* vertex_out,
-                              vec2* uv_out) const;
-
-  // Eye-unspecific view accessors.
-  vec2i GetRecommendedRenderTargetSize() const;
-  Range2i GetDisplayRange() const;
-
-  // Eye-specific view accessors.
-  mat4 GetEyeFromHeadMatrix(EyeType eye) const;
-  FieldOfView GetEyeFov(EyeType eye) const;
-  Range2i GetEyeViewportBounds(EyeType eye) const;
-
-  // Set HeadMountMetrics and recompute everything that depends on
-  // HeadMountMetrics.
-  void SetHeadMountMetrics(const HeadMountMetrics& head_mount_metrics);
-
-  // Returns a reference to the |head_mount_metrics_| member.
-  const HeadMountMetrics& GetHeadMountMetrics() const;
-
-  // Set DisplayMetrics and recompute everything that depends on DisplayMetrics.
-  void SetDisplayMetrics(const DisplayMetrics& display_metrics);
-
-  // Returns a reference to the current display metrics.
-  const DisplayMetrics& GetDisplayMetrics() const;
-
-  // Compute the distorted point for a single channel.
-  vec2 ComputeDistortedPoint(EyeType eye, vec2 position,
-                             RgbColorChannel channel) const;
-
-  // Compute the inverse distorted point for a single channel.
-  vec2 ComputeInverseDistortedPoint(EyeType eye, vec2 position,
-                                    RgbColorChannel channel) const;
-
- private:
-  FieldOfView eye_fov_[2];
-  Range2i eye_viewport_range_[2];
-  mat4 eye_from_head_matrix_[2];
-  Range2i display_range_;
-  vec2i recommended_render_target_size_;
-
-  // Per-eye scale and translation to convert from normalized Screen Space
-  // ([0:1]x[0:1]) to tan-angle space.
-  mat3 eye_tan_angle_from_norm_screen_matrix_[2];
-  mat3 eye_tan_angle_from_norm_screen_inv_matrix_[2];
-
-  // Per-eye scale and translation to convert from tan-angle space to normalized
-  // Texture Space ([0:1]x[0:1]).
-  mat3 eye_norm_texture_from_tan_angle_matrix_[2];
-  mat3 eye_norm_texture_from_tan_angle_inv_matrix_[2];
-
-  HeadMountMetrics head_mount_metrics_;
-  DisplayMetrics display_metrics_;
-
-  // Called by SetHeadMountMetrics/SetDisplayMetrics after metrics get changed.
-  // This function will update head_mount_metrics_/display_metrics_ based on the
-  // metrics supplied in the above two methods.
-  void MetricsChanged();
-};
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_COMPOSITE_HMD_H_
diff --git a/libs/vr/libeds/include/private/dvr/device_metrics.h b/libs/vr/libeds/include/private/dvr/device_metrics.h
deleted file mode 100644
index 7985f28..0000000
--- a/libs/vr/libeds/include/private/dvr/device_metrics.h
+++ /dev/null
@@ -1,22 +0,0 @@
-#ifndef ANDROID_DVR_DEVICE_METRICS_H_
-#define ANDROID_DVR_DEVICE_METRICS_H_
-
-#include <private/dvr/display_metrics.h>
-#include <private/dvr/head_mount_metrics.h>
-#include <private/dvr/types.h>
-
-namespace android {
-namespace dvr {
-
-HeadMountMetrics CreateHeadMountMetrics();
-HeadMountMetrics CreateHeadMountMetrics(const FieldOfView& l_fov,
-                                        const FieldOfView& r_fov);
-HeadMountMetrics CreateUndistortedHeadMountMetrics();
-HeadMountMetrics CreateUndistortedHeadMountMetrics(const FieldOfView& l_fov,
-                                                   const FieldOfView& r_fov);
-DisplayMetrics CreateDisplayMetrics(vec2i screen_size);
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_DEVICE_METRICS_H_
diff --git a/libs/vr/libeds/include/private/dvr/display_metrics.h b/libs/vr/libeds/include/private/dvr/display_metrics.h
deleted file mode 100644
index 87d9d04..0000000
--- a/libs/vr/libeds/include/private/dvr/display_metrics.h
+++ /dev/null
@@ -1,79 +0,0 @@
-#ifndef ANDROID_DVR_DISPLAY_METRICS_H_
-#define ANDROID_DVR_DISPLAY_METRICS_H_
-
-#include <private/dvr/types.h>
-
-namespace android {
-namespace dvr {
-
-enum class DisplayOrientation { kPortrait, kLandscape };
-
-// DisplayMetrics encapsulates metrics describing a display to be used
-// with a head mount to create a head mounted display.
-class DisplayMetrics {
- public:
-  DisplayMetrics();
-  // Constructs a DisplayMetrics given a display size in pixels,
-  // meters per pixel, border size in meters, and frame duration in
-  // seconds.
-  //
-  // size_pixels The size of the display in pixels.
-  // meters_per_pixel The meters per pixel in each dimension.
-  // border_size_meters The size of the border around the display
-  //     in meters.  When the device sits on a surface in the proper
-  //     orientation this is the distance from the surface to the edge
-  //     of the display.
-  // frame_duration_seconds The duration in seconds of each frame
-  //     (i.e., 1 / framerate).
-  DisplayMetrics(vec2i size_pixels, vec2 meters_per_pixel,
-                 float border_size_meters, float frame_duration_seconds,
-                 DisplayOrientation orientation);
-
-  // Gets the size of the display in physical pixels (not logical pixels).
-  vec2i GetSizePixels() const { return size_pixels_; }
-
-  DisplayOrientation GetOrientation() const { return orientation_; }
-  bool IsPortrait() const {
-    return orientation_ == DisplayOrientation::kPortrait;
-  }
-
-  // Gets the size of the display in meters.
-  vec2 GetSizeMeters() const {
-    return vec2(static_cast<float>(size_pixels_[0]),
-                static_cast<float>(size_pixels_[1]))
-               .array() *
-           meters_per_pixel_.array();
-  }
-
-  // Gets the meters per pixel.
-  vec2 GetMetersPerPixel() const { return meters_per_pixel_; }
-
-  // Gets the size of the border around the display.
-  // For a phone in landscape position this would be the distance from
-  // the bottom the edge of the phone to the bottom of the screen.
-  float GetBorderSizeMeters() const { return border_size_meters_; }
-
-  // Gets the frame duration in seconds for the display.
-  float GetFrameDurationSeconds() const { return frame_duration_seconds_; }
-
-  // Toggles the orientation and swaps all of the settings such that the
-  // display is being held in the other orientation.
-  void ToggleOrientation();
-
-  // Override the meters per pixel.
-  void SetMetersPerPixel(const vec2& meters_per_pixel) {
-    meters_per_pixel_ = meters_per_pixel;
-  }
-
- private:
-  vec2i size_pixels_;
-  vec2 meters_per_pixel_;
-  float border_size_meters_;
-  float frame_duration_seconds_;
-  DisplayOrientation orientation_;
-};
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_DISPLAY_METRICS_H_
diff --git a/libs/vr/libeds/include/private/dvr/distortion_renderer.h b/libs/vr/libeds/include/private/dvr/distortion_renderer.h
deleted file mode 100644
index 28fd48a..0000000
--- a/libs/vr/libeds/include/private/dvr/distortion_renderer.h
+++ /dev/null
@@ -1,232 +0,0 @@
-#ifndef ANDROID_DVR_DISTORTION_RENDERER_H_
-#define ANDROID_DVR_DISTORTION_RENDERER_H_
-
-#include <EGL/egl.h>
-#include <GLES2/gl2.h>
-#include <array>
-#include <functional>
-
-#include <private/dvr/eds_mesh.h>
-#include <private/dvr/graphics/shader_program.h>
-#include <private/dvr/late_latch.h>
-#include <private/dvr/render_texture_params.h>
-#include <private/dvr/types.h>
-
-namespace android {
-namespace dvr {
-
-class CompositeHmd;
-
-// Encapsulates the rendering operations to correct for the HMD's lens
-// distortion.
-class DistortionRenderer {
- public:
-  static constexpr int kMaxLayers = 2;
-  static constexpr int kMaxLatchedLayers = 4;
-
-  static const mat4 kViewportFromClipMatrix;
-  static const mat4 kClipFromViewportMatrix;
-
-  // Creates a distortion renderer for distortion function.
-  //
-  // distortion_function the black-box distortion function to apply.
-  // display_size the resolution of the output of the distortion renderer.
-  // distortion_mesh_resolution the amount of subdivision in the
-  //     distortion mesh.
-  DistortionRenderer(const CompositeHmd& hmd, vec2i display_size,
-                     int distortion_mesh_resolution,
-                     bool flip_texture_horizontally,
-                     bool flip_texture_vertically, bool separated_eye_buffers,
-                     bool eds_enabled, bool late_latch_enabled);
-  ~DistortionRenderer();
-
-  // Returns the distortion factor array for the distortion function that was
-  // passed in at creation time. The distortion factor array contains the
-  // magnification factor induced by the distortion mesh at every vertex. There
-  // is one entry per vertex, and entries are ordered in row-major major. The
-  // array contains the magnification for both eyes averaged.
-  const std::vector<float>& GetDistortionFactorArray();
-
-  // |render_pose_buffer_object| is the per-texture pose array buffer object.
-  // |render_buffer_index| is the per-texture index into the pose array buffer
-  //                       object. This selects which pose was rendered into the
-  //                       corresponding texture.
-  void DoLateLatch(uint32_t target_vsync_count,
-                   const uint32_t* render_buffer_index,
-                   const GLuint* render_pose_buffer_objects,
-                   const bool* vertical_flip, const bool* separate_eye,
-                   int num_textures);
-
-  // Convenience method that does no flipping.
-  void DoLateLatch(uint32_t target_vsync_count,
-                   const uint32_t* render_buffer_index,
-                   const GLuint* render_pose_buffer_objects, int num_textures) {
-    bool flip[kMaxLayers] = {false};
-    bool separate[kMaxLayers] = {separated_eye_buffers_};
-    DoLateLatch(target_vsync_count, render_buffer_index,
-                render_pose_buffer_objects, flip, separate, num_textures);
-  }
-
-  void PrepGlState(EyeType eye);
-  void ResetGlState(int num_textures);
-
-  // Applies distortion correction to the given textures by rendering into the
-  // current output target.
-  //
-  // eye Which eye is being corrected.
-  // texture_ids The OpenGL texture IDs of the texture layers.
-  // texture_sizes Dimensions of the corresponding textures.
-  // vertical_flip Whether to flip each input texture vertically.
-  // separate_eye Whether the correspending texture is a separate texture for
-  //              left and right eyes. If false, it is a shared texture with
-  //              the left view on the left half and right on the right half.
-  // late_latch_layer Which late latch layer index to use for each texture.
-  //     Typically this is just {0, 1} unless blend_with_previous_layer is used.
-  // num_textures Number of textures in texture_ids and texture_sizes.
-  // blend_with_previous_layer If enabled, blend this single layer with the
-  //     existing framebuffer contents.
-  void ApplyDistortionCorrectionToTexture(
-      EyeType eye, const GLuint* texture_ids, const bool* vertical_flip,
-      const bool* separate_eye, const int* late_latch_layer, int num_textures,
-      bool blend_with_previous_layer, bool do_gl_state_prep);
-
-  // Convenience method that does no flipping.
-  void ApplyDistortionCorrectionToTexture(EyeType eye,
-                                          const GLuint* texture_ids,
-                                          int num_textures) {
-    bool flip[kMaxLayers] = {false};
-    bool separate[kMaxLayers] = {separated_eye_buffers_,
-                                 separated_eye_buffers_};
-    int latch_layer[kMaxLayers] = {0, 1};
-    ApplyDistortionCorrectionToTexture(eye, texture_ids, flip, separate,
-                                       latch_layer, num_textures, false, true);
-  }
-
-  // Draw a video quad based on the given video texture by rendering into the
-  // current output target.
-  //
-  // eye Which eye is being corrected.
-  // layer_id Which compositor layer the video mesh should be drawn into.
-  // texture_ids The OpenGL texture IDs of the texture layers.
-  // transform The transformation matrix that transforms the video mesh to its
-  //           desired eye space position for the target eye.
-  void DrawVideoQuad(EyeType eye, int layer_id, GLuint texture_id,
-                     const mat4& transform);
-
-  // Modifies the size of the output display. This is the number of physical
-  // pixels per dimension covered by the display on the output device. Calling
-  // this method is cheap; it only updates the state table of the two
-  // eye-specific mesh nodes.
-  void SetDisplaySize(vec2i size);
-
-  void SetEdsEnabled(bool enabled);
-  void SetChromaticAberrationCorrectionEnabled(bool enabled) {
-    chromatic_aberration_correction_enabled_ = enabled;
-  }
-  void SetUseAlphaVignette(bool enabled) { use_alpha_vignette_ = enabled; }
-
-  bool GetLastEdsPose(LateLatchOutput* out_data, int layer_id = 0) const;
-
- private:
-  enum ShaderProgramType {
-    kNoChromaticAberrationCorrection,
-    kNoChromaticAberrationCorrectionTwoLayers,
-    kChromaticAberrationCorrection,
-    kChromaticAberrationCorrectionTwoLayers,
-    kChromaticAberrationCorrectionAlphaVignette,
-    kChromaticAberrationCorrectionAlphaVignetteTwoLayers,
-    kChromaticAberrationCorrectionWithBlend,
-    kSimpleVideoQuad,
-    kNumShaderPrograms,
-  };
-
-  struct EdsShader {
-    EdsShader() {}
-    ~EdsShader() {
-    }
-
-    void load(const char* vertex, const char* fragment, int num_layers,
-              bool use_alpha_vignette, float rotation, bool flip_vertical,
-              bool blend_with_previous_layer);
-    void use() { pgm.Use(); }
-
-    // Update uTexFromEyeMatrix and uEyeFromViewportMatrix by the distortion
-    // renderer with the transform matrix.
-    void SetTexFromEyeTransform(const mat4& transform) {
-      glUniformMatrix4fv(uTexFromEyeMatrix, 1, false, transform.data());
-    }
-
-    void SetEyeFromViewportTransform(const mat4& transform) {
-      glUniformMatrix4fv(uEyeFromViewportMatrix, 1, false, transform.data());
-    }
-
-    ShaderProgram pgm;
-
-    // Texture variables, named to match shader strings for convenience.
-    GLint uProjectionMatrix;
-    GLint uTexFromEyeMatrix;
-    GLint uEyeFromViewportMatrix;
-    GLint uTexXMinMax;
-  };
-
-  void DrawEye(EyeType eye, const GLuint* texture_ids,
-               const bool* vertical_flip, const bool* separate_eye,
-               const int* late_latch_layer, int num_textures,
-               bool blend_with_previous_layer, bool do_gl_state_prep);
-
-  // This function is called when there is an update on Hmd and distortion mesh
-  // vertices and factor array will be updated.
-  void RecomputeDistortion(const CompositeHmd& hmd);
-
-  // Per-eye, per flip, per separate eye mode buffers for setting EDS matrix
-  // when EDS is disabled.
-  GLuint uTexFromRecommendedViewportMatrix[2][2][2];
-
-  // Distortion mesh for the each eye.
-  EdsMesh mesh_node_[2];
-  // VBO (vertex buffer object) for distortion mesh vertices.
-  GLuint mesh_vbo_[2];
-  // VAO (vertex array object) for distortion mesh vertex array data.
-  GLuint mesh_vao_[2];
-  // IBO (index buffer object) for distortion mesh indices.
-  GLuint mesh_ibo_[2];
-
-  EdsShader shaders_[kNumShaderPrograms];
-
-  // Enum to indicate which shader program is being used.
-  ShaderProgramType shader_type_;
-
-  bool eds_enabled_;
-  bool chromatic_aberration_correction_enabled_;
-  bool use_alpha_vignette_;
-
-  // This keeps track of what distortion mesh resolution we are using currently.
-  // When there is an update on Hmd, the distortion mesh vertices/factor array
-  // will be re-computed with the old resolution that is stored here.
-  int distortion_mesh_resolution_;
-
-  // The OpenGL ID of the last texture passed to
-  // ApplyDistortionCorrectionToTexture().
-  GLuint last_distortion_texture_id_;
-
-  // GL texture 2D target for application texture.
-  GLint app_texture_target_;
-
-  // Precomputed matrices for EDS and viewport transforms.
-  mat4 tex_from_eye_matrix_[2][2][2];
-  mat4 eye_from_viewport_matrix_[2];
-
-  // Eye viewport locations.
-  vec2i eye_viewport_origin_[2];
-  vec2i eye_viewport_size_;
-
-  vec2i display_size_;
-
-  std::unique_ptr<LateLatch> late_latch_[kMaxLatchedLayers];
-  bool separated_eye_buffers_;
-};
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_DISTORTION_RENDERER_H_
diff --git a/libs/vr/libeds/include/private/dvr/eds_mesh.h b/libs/vr/libeds/include/private/dvr/eds_mesh.h
deleted file mode 100644
index d2c901e..0000000
--- a/libs/vr/libeds/include/private/dvr/eds_mesh.h
+++ /dev/null
@@ -1,38 +0,0 @@
-#ifndef ANDROID_DVR_EDS_MESH_H_
-#define ANDROID_DVR_EDS_MESH_H_
-
-#include <stdint.h>
-#include <functional>
-#include <vector>
-
-#include <private/dvr/types.h>
-
-namespace android {
-namespace dvr {
-
-struct EdsVertex {
-  vec2 position;
-  vec2 red_viewport_coords;
-  vec2 green_viewport_coords;
-  vec2 blue_viewport_coords;
-};
-
-struct EdsMesh {
-  std::vector<EdsVertex> vertices;
-  std::vector<uint16_t> indices;
-};
-
-// Distortion function takes in a point in the range [0..1, 0..1] and returns
-// the vertex position and the three distorted points for separate R, G and B
-// channels.
-typedef std::function<void(EyeType, vec2, vec2*, vec2*)> DistortionFunction;
-
-// Builds a distortion mesh of resolution |resolution| using
-// the distortion provided by |hmd| for |eye|.
-EdsMesh BuildDistortionMesh(EyeType eye, int resolution,
-                            const DistortionFunction& distortion_function);
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_EDS_MESH_H_
diff --git a/libs/vr/libeds/include/private/dvr/head_mount_metrics.h b/libs/vr/libeds/include/private/dvr/head_mount_metrics.h
deleted file mode 100644
index f3e63a6..0000000
--- a/libs/vr/libeds/include/private/dvr/head_mount_metrics.h
+++ /dev/null
@@ -1,134 +0,0 @@
-#ifndef ANDROID_DVR_HEAD_MOUNT_METRICS_H_
-#define ANDROID_DVR_HEAD_MOUNT_METRICS_H_
-
-#include <array>
-
-#include <private/dvr/color_channel_distortion.h>
-#include <private/dvr/types.h>
-
-namespace android {
-namespace dvr {
-
-// HeadMountMetrics encapsulates metrics describing a head mount to be used
-// with a display to create a head mounted display.
-class HeadMountMetrics {
- public:
-  // The vertical point of the HMD where the lens distance is measured from.
-  enum VerticalAlignment { kBottom = 0, kCenter = 1, kTop = 2 };
-
-  enum EyeOrientation {
-    kCCW0Degrees = 0,
-    kCCW90Degrees = 1,
-    kCCW180Degrees = 2,
-    kCCW270Degrees = 3,
-    kCCW0DegreesMirrored = 4,
-    kCCW90DegreesMirrored = 5,
-    kCCW180DegreesMirrored = 6,
-    kCCW270DegreesMirrored = 7,
-
-    // Rotations that consist of an odd number of 90 degree rotations will swap
-    // the height and width of any bounding boxes/viewports. This bit informs
-    // any viewport manipulating code to perform the appropriate transformation.
-    kRightAngleBit = 0x01,
-    // Viewports are represented as four floating point values (four half
-    // angles). Rotating this structure can be done through a shift operation.
-    // This mask extracts the rotation portion of the orientation.
-    kRotationMask = 0x03,
-    // This mask specifies whether the output is mirrored.
-    kMirroredBit = 0x04
-  };
-
-  HeadMountMetrics(
-      float inter_lens_distance, float tray_to_lens_distance,
-      float virtual_eye_to_screen_distance,
-      VerticalAlignment vertical_alignment, const FieldOfView& left_eye_max_fov,
-      const FieldOfView& right_eye_max_fov,
-      const std::shared_ptr<ColorChannelDistortion>& red_distortion,
-      const std::shared_ptr<ColorChannelDistortion>& green_distortion,
-      const std::shared_ptr<ColorChannelDistortion>& blue_distortion,
-      EyeOrientation left_eye_orientation, EyeOrientation right_eye_orientation,
-      float screen_center_to_lens_distance)
-      : inter_lens_distance_(inter_lens_distance),
-        tray_to_lens_distance_(tray_to_lens_distance),
-        virtual_eye_to_screen_distance_(virtual_eye_to_screen_distance),
-        screen_center_to_lens_distance_(screen_center_to_lens_distance),
-        vertical_alignment_(vertical_alignment),
-        eye_max_fov_({{left_eye_max_fov, right_eye_max_fov}}),
-        color_channel_distortion_(
-            {{red_distortion, green_distortion, blue_distortion}}),
-        supports_chromatic_aberration_correction_(true),
-        eye_orientation_({{left_eye_orientation, right_eye_orientation}}) {
-    // If we're missing the green or blur distortions, assume that we don't
-    // correct for chromatic aberration.
-    if (!green_distortion || !blue_distortion) {
-      color_channel_distortion_[1] = red_distortion;
-      color_channel_distortion_[2] = red_distortion;
-      supports_chromatic_aberration_correction_ = false;
-    }
-  }
-
-  // Returns the distance in meters between the optical centers of the two
-  // lenses.
-  float GetInterLensDistance() const { return inter_lens_distance_; }
-
-  // Returns the distance in meters from the "tray" upon which the display
-  // rests to the optical center of a lens.
-  float GetTrayToLensDistance() const { return tray_to_lens_distance_; }
-
-  // Returns the distance in meters from the virtual eye to the screen.
-  // See http://go/vr-distortion-correction for an explanation of what
-  // this distance is.
-  float GetVirtualEyeToScreenDistance() const {
-    return virtual_eye_to_screen_distance_;
-  }
-
-  // Returns the horizontal distance from the center of the screen to the center
-  // of the lens, in meters.
-  float GetScreenCenterToLensDistance() const {
-    return screen_center_to_lens_distance_;
-  }
-
-  // Returns the vertical alignment of the HMD.  The tray-to-lens distance
-  // is relative to this position.  Exception: if the alignment is kCenter,
-  // then the offset has no meaning.
-  VerticalAlignment GetVerticalAlignment() const { return vertical_alignment_; }
-
-  // Returns the given eye's maximum field of view visible through the lens.
-  // The actual rendered field of view will be limited by this and also by
-  // the size of the screen.
-  const FieldOfView& GetEyeMaxFov(EyeType eye) const {
-    return eye_max_fov_[eye];
-  }
-
-  // Returns the ColorChannelDistortion object representing the distortion
-  // caused by the lenses for the given color channel.
-  const ColorChannelDistortion& GetColorChannelDistortion(
-      RgbColorChannel channel) const {
-    return *color_channel_distortion_[channel];
-  }
-
-  bool supports_chromatic_aberration_correction() const {
-    return supports_chromatic_aberration_correction_;
-  }
-
-  EyeOrientation GetEyeOrientation(EyeType eye) const {
-    return eye_orientation_[eye];
-  }
-
- private:
-  float inter_lens_distance_;
-  float tray_to_lens_distance_;
-  float virtual_eye_to_screen_distance_;
-  float screen_center_to_lens_distance_;
-  VerticalAlignment vertical_alignment_;
-  std::array<FieldOfView, 2> eye_max_fov_;
-  std::array<std::shared_ptr<ColorChannelDistortion>, 3>
-      color_channel_distortion_;
-  bool supports_chromatic_aberration_correction_;
-  std::array<EyeOrientation, 2> eye_orientation_;
-};
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_HEAD_MOUNT_METRICS_H_
diff --git a/libs/vr/libeds/include/private/dvr/identity_distortion.h b/libs/vr/libeds/include/private/dvr/identity_distortion.h
deleted file mode 100644
index b9c5cf6..0000000
--- a/libs/vr/libeds/include/private/dvr/identity_distortion.h
+++ /dev/null
@@ -1,23 +0,0 @@
-#ifndef ANDROID_DVR_IDENTITY_DISTORTION_H_
-#define ANDROID_DVR_IDENTITY_DISTORTION_H_
-
-#include <private/dvr/color_channel_distortion.h>
-
-namespace android {
-namespace dvr {
-
-// Provides an identity distortion operation if running the device without any
-// lenses.
-class IdentityDistortion : public ColorChannelDistortion {
- public:
-  IdentityDistortion() {}
-
-  vec2 Distort(vec2 p) const override { return p; }
-
-  vec2 DistortInverse(vec2 p) const override { return p; }
-};
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_IDENTITY_DISTORTION_H_
diff --git a/libs/vr/libeds/include/private/dvr/polynomial_radial_distortion.h b/libs/vr/libeds/include/private/dvr/polynomial_radial_distortion.h
deleted file mode 100644
index 8f080aa..0000000
--- a/libs/vr/libeds/include/private/dvr/polynomial_radial_distortion.h
+++ /dev/null
@@ -1,60 +0,0 @@
-#ifndef ANDROID_DVR_POLYNOMIAL_RADIAL_DISTORTION_H_
-#define ANDROID_DVR_POLYNOMIAL_RADIAL_DISTORTION_H_
-
-#include <vector>
-
-#include <private/dvr/color_channel_distortion.h>
-
-namespace android {
-namespace dvr {
-
-// PolynomialRadialDistortion implements a radial distortion based using
-// a set of coefficients describing a polynomial function.
-// See http://en.wikipedia.org/wiki/Distortion_(optics).
-//
-// Unless otherwise stated, the units used in this class are tan-angle units
-// which can be computed as distance on the screen divided by distance from the
-// virtual eye to the screen.
-class PolynomialRadialDistortion : public ColorChannelDistortion {
- public:
-  // Construct a PolynomialRadialDistortion with coefficients for
-  // the radial distortion equation:
-  //
-  //   p' = p (1 + K1 r^2 + K2 r^4 + ... + Kn r^(2n))
-  //
-  // where r is the distance in tan-angle units from the optical center,
-  // p the input point and p' the output point.
-  // The provided vector contains the coefficients for the even monomials
-  // in the distortion equation: coefficients[0] is K1, coefficients[1] is K2,
-  // etc.  Thus the polynomial used for distortion has degree
-  // (2 * coefficients.size()).
-  explicit PolynomialRadialDistortion(const std::vector<float>& coefficients);
-
-  // Given a radius (measuring distance from the optical axis of the lens),
-  // returns the distortion factor for that radius.
-  float DistortionFactor(float r_squared) const;
-
-  // Given a radius (measuring distance from the optical axis of the lens),
-  // returns the corresponding distorted radius.
-  float DistortRadius(float r) const;
-
-  // Given a 2d point p, returns the corresponding distorted point.
-  // distance from the virtual eye to the screen.  The optical axis
-  // of the lens defines the origin for both input and output points.
-  vec2 Distort(vec2 p) const override;
-
-  // Given a 2d point p, returns the point that would need to be passed to
-  // Distort to get point p (approximately).
-  vec2 DistortInverse(vec2 p) const override;
-
-  // Returns the distortion coefficients.
-  const std::vector<float>& GetCoefficients() const;
-
- private:
-  std::vector<float> coefficients_;
-};
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_POLYNOMIAL_RADIAL_DISTORTION_H_
diff --git a/libs/vr/libeds/include/private/dvr/raw_pose.h b/libs/vr/libeds/include/private/dvr/raw_pose.h
deleted file mode 100644
index 7058f1a..0000000
--- a/libs/vr/libeds/include/private/dvr/raw_pose.h
+++ /dev/null
@@ -1,54 +0,0 @@
-#ifndef ANDROID_DVR_RAW_POSE_H_
-#define ANDROID_DVR_RAW_POSE_H_
-
-#include <atomic>
-
-namespace android {
-namespace dvr {
-
-// POD raw data of a head pose with a count field for read consistency checking.
-// Warning: The layout of this struct and RawPosePair are specific to match the
-// corresponding buffer type in the shader in late_latch.cpp.
-struct RawPose {
-  void Reset(uint32_t new_count) volatile {
-    qx = qy = qz = 0.0f;
-    qw = 1.0f;
-    px = py = pz = 0.0f;
-    count = new_count;
-  }
-
-  float qx, qy, qz, qw;
-  float px, py, pz;
-  std::atomic<uint32_t> count;
-};
-
-// RawPosePair is used for lock-free writing at about 1khz by the CPU/DSP
-// and reading by the GPU. At creation time, pose1 is given count = 1 and
-// pose2 is given count = 2.
-//
-// The lock-free write pattern is:
-// - write to pose with least count.
-// - memory write barrier.
-// - write count = count + 2.
-//
-// For reads, there is an important assumption about the GPU: it generally
-// processes things contiguously, without arbitrary preemptions that save and
-// restore full cache states. In other words, if the GPU is preempted and then
-// later resumed, any data that was read from memory before the preemption will
-// be re-read from memory after resume. This allows the following read trick to
-// work:
-// - read the full RawPosePair into a shader.
-// - select the pose with the newest count.
-//
-// The older pose may be partially written by the async stores from CPU/DSP, but
-// because of the memory barrier and GPU characteristics, the highest count pose
-// should always be a fully consistent RawPose.
-struct RawPosePair {
-  RawPose pose1;
-  RawPose pose2;
-};
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_RAW_POSE_H_
diff --git a/libs/vr/libeds/include/private/dvr/render_texture_params.h b/libs/vr/libeds/include/private/dvr/render_texture_params.h
deleted file mode 100644
index 71aebef..0000000
--- a/libs/vr/libeds/include/private/dvr/render_texture_params.h
+++ /dev/null
@@ -1,55 +0,0 @@
-#ifndef ANDROID_DVR_RENDER_TEXTURE_PARAMS_H_
-#define ANDROID_DVR_RENDER_TEXTURE_PARAMS_H_
-
-#include <private/dvr/types.h>
-
-namespace android {
-namespace dvr {
-
-// Encapsulates information about the render texture, includes the size
-// of the render texture, and the left/right viewport which define the
-// portion each eye is rendering onto. This struct will be passed to
-// PresentFrame every frame before the client actually drawing the scene.
-struct RenderTextureParams {
-  RenderTextureParams() {}
-
-  RenderTextureParams(vec2i target_texture_size,
-                      const Range2i& eye_viewport_bounds_left,
-                      const Range2i& eye_viewport_bounds_right,
-                      const FieldOfView& eye_fov_left,
-                      const FieldOfView& eye_fov_right)
-      : texture_size(target_texture_size) {
-    eye_viewport_bounds[kLeftEye] = eye_viewport_bounds_left;
-    eye_viewport_bounds[kRightEye] = eye_viewport_bounds_right;
-    eye_fov[kLeftEye] = eye_fov_left;
-    eye_fov[kRightEye] = eye_fov_right;
-  }
-
-  explicit RenderTextureParams(vec2i target_texture_size,
-                               const FieldOfView& eye_fov_left,
-                               const FieldOfView& eye_fov_right) {
-    texture_size = target_texture_size;
-    eye_viewport_bounds[0] = Range2i::FromSize(
-        vec2i(0, 0), vec2i(texture_size[0] / 2, texture_size[1]));
-    eye_viewport_bounds[1] =
-        Range2i::FromSize(vec2i(texture_size[0] / 2, 0),
-                          vec2i(texture_size[0] / 2, texture_size[1]));
-
-    eye_fov[kLeftEye] = eye_fov_left;
-    eye_fov[kRightEye] = eye_fov_right;
-  }
-
-  // The render texture size.
-  vec2i texture_size;
-
-  // The viewport bounds on the render texture for each eye.
-  Range2i eye_viewport_bounds[2];
-
-  // The field of view for each eye in degrees.
-  FieldOfView eye_fov[2];
-};
-
-}  // namespace dvr
-}  // namespace android
-
-#endif  // ANDROID_DVR_RENDER_TEXTURE_PARAMS_H_
diff --git a/libs/vr/libeds/polynomial_radial_distortion.cpp b/libs/vr/libeds/polynomial_radial_distortion.cpp
deleted file mode 100644
index fa01bb4..0000000
--- a/libs/vr/libeds/polynomial_radial_distortion.cpp
+++ /dev/null
@@ -1,53 +0,0 @@
-#include "include/private/dvr/polynomial_radial_distortion.h"
-
-namespace android {
-namespace dvr {
-
-PolynomialRadialDistortion::PolynomialRadialDistortion(
-    const std::vector<float>& coefficients)
-    : coefficients_(coefficients) {}
-
-float PolynomialRadialDistortion::DistortionFactor(float r_squared) const {
-  float r_factor = 1.0f;
-  float distortion_factor = 1.0f;
-
-  for (float ki : coefficients_) {
-    r_factor *= r_squared;
-    distortion_factor += ki * r_factor;
-  }
-
-  return distortion_factor;
-}
-
-float PolynomialRadialDistortion::DistortRadius(float r) const {
-  return r * DistortionFactor(r * r);
-}
-
-vec2 PolynomialRadialDistortion::Distort(vec2 p) const {
-  return p * DistortionFactor(p.squaredNorm());
-}
-
-vec2 PolynomialRadialDistortion::DistortInverse(vec2 p) const {
-  // Secant method.
-  const float radius = p.norm();
-  float r0 = radius / 0.9f;
-  float r1 = radius * 0.9f;
-  float r2;
-  float dr0 = radius - DistortRadius(r0);
-  float dr1;
-  while (fabsf(r1 - r0) > 0.0001f /** 0.1mm */) {
-    dr1 = radius - DistortRadius(r1);
-    r2 = r1 - dr1 * ((r1 - r0) / (dr1 - dr0));
-    r0 = r1;
-    r1 = r2;
-    dr0 = dr1;
-  }
-  return (r1 / radius) * p;
-}
-
-const std::vector<float>& PolynomialRadialDistortion::GetCoefficients() const {
-  return coefficients_;
-}
-
-}  // namespace dvr
-}  // namespace android
diff --git a/libs/vr/libeds/tests/eds_app_tests.cpp b/libs/vr/libeds/tests/eds_app_tests.cpp
deleted file mode 100644
index 549d864..0000000
--- a/libs/vr/libeds/tests/eds_app_tests.cpp
+++ /dev/null
@@ -1,140 +0,0 @@
-#include <EGL/egl.h>
-#include <GLES2/gl2.h>
-
-#include <dvr/graphics.h>
-#include <dvr/pose_client.h>
-#include <gtest/gtest.h>
-#include <private/dvr/graphics/shader_program.h>
-#include <private/dvr/types.h>
-
-namespace {
-
-#define POSE_BINDING 0
-
-#ifndef STRINGIFY
-#define STRINGIFY2(s) #s
-#define STRINGIFY(s) STRINGIFY2(s)
-#endif
-
-static const char g_vert_shader[] =
-    "layout(binding = " STRINGIFY(POSE_BINDING) ", std140)\n"
-    "uniform LateLatchData {\n"
-    "  mat4 uViewProjection;\n"
-    "};\n"
-    "void main() {\n"
-    "  vec2 verts[4];\n"
-    "  verts[0] = vec2(-1, -1);\n"
-    "  verts[1] = vec2(-1, 1);\n"
-    "  verts[2] = vec2(1, -1);\n"
-    "  verts[3] = vec2(1, 1);\n"
-    "  gl_Position = uViewProjection * vec4(verts[gl_VertexID], 0.0, 1.0);\n"
-    "}\n";
-
-static const char g_frag_shader[] =
-    "precision mediump float;\n"
-    "out vec4 outColor;\n"
-    "void main() {\n"
-    "  outColor = vec4(1.0);\n"
-    "}\n";
-
-DvrGraphicsContext* CreateContext(int* surface_width, int* surface_height) {
-  DvrGraphicsContext* context = nullptr;
-  int display_width = 0, display_height = 0;
-  float inter_lens_meters = 0.0f;
-  float left_fov[4] = {0.0f};
-  float right_fov[4] = {0.0f};
-  int disable_warp = 0;
-  int enable_late_latch = 1;
-  DvrSurfaceParameter surface_params[] = {
-      DVR_SURFACE_PARAMETER_IN(DISABLE_DISTORTION, disable_warp),
-      DVR_SURFACE_PARAMETER_IN(ENABLE_LATE_LATCH, enable_late_latch),
-      DVR_SURFACE_PARAMETER_OUT(DISPLAY_WIDTH, &display_width),
-      DVR_SURFACE_PARAMETER_OUT(DISPLAY_HEIGHT, &display_height),
-      DVR_SURFACE_PARAMETER_OUT(SURFACE_WIDTH, surface_width),
-      DVR_SURFACE_PARAMETER_OUT(SURFACE_HEIGHT, surface_height),
-      DVR_SURFACE_PARAMETER_OUT(INTER_LENS_METERS, &inter_lens_meters),
-      DVR_SURFACE_PARAMETER_OUT(LEFT_FOV_LRBT, left_fov),
-      DVR_SURFACE_PARAMETER_OUT(RIGHT_FOV_LRBT, right_fov),
-      DVR_SURFACE_PARAMETER_LIST_END,
-  };
-  dvrGraphicsContextCreate(surface_params, &context);
-  return context;
-}
-
-}  // namespace
-
-TEST(SensorAppTests, EdsWithLateLatch) {
-  int surface_width = 0, surface_height = 0;
-  DvrGraphicsContext* context = CreateContext(&surface_width, &surface_height);
-  ASSERT_NE(nullptr, context);
-
-  android::dvr::ShaderProgram shader(g_vert_shader, g_frag_shader);
-
-  for (int i = 0; i < 5; ++i) {
-    DvrFrameSchedule schedule;
-    dvrGraphicsWaitNextFrame(context, 0, &schedule);
-
-    const auto ident_mat = android::dvr::mat4::Identity();
-    const float* ident_mats[] = { ident_mat.data(), ident_mat.data() };
-    GLuint late_latch_buffer_id = 0;
-    int ret = dvrBeginRenderFrameLateLatch(context, 0, schedule.vsync_count, 2,
-                                           ident_mats, ident_mats, ident_mats,
-                                           &late_latch_buffer_id);
-    EXPECT_EQ(0, ret);
-    for (int eye = 0; eye < 2; ++eye) {
-      if (eye == 0)
-        glViewport(0, 0, surface_width / 2, surface_height);
-      else
-        glViewport(surface_width / 2, 0, surface_width / 2, surface_height);
-
-      glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
-      shader.Use();
-
-      // Bind late latch pose matrix buffer.
-      glBindBufferRange(
-          GL_UNIFORM_BUFFER, POSE_BINDING, late_latch_buffer_id,
-          offsetof(DvrGraphicsLateLatchData, view_proj_matrix[eye]),
-          16 * sizeof(float));
-
-      // TODO(jbates): use transform feedback here to grab the vertex output
-      // and verify that it received late-latch pose data. Combine this with
-      // mocked pose data to verify that late-latching is working.
-      glDrawArrays(GL_POINTS, 0, 4);
-    }
-    dvrPresent(context);
-  }
-
-  glFinish();
-  dvrGraphicsContextDestroy(context);
-}
-
-TEST(SensorAppTests, EdsWithoutLateLatch) {
-  int surface_width = 0, surface_height = 0;
-  DvrGraphicsContext* context = CreateContext(&surface_width, &surface_height);
-  ASSERT_NE(nullptr, context);
-  DvrPose* client = dvrPoseCreate();
-  ASSERT_NE(nullptr, client);
-
-  for (int i = 0; i < 5; ++i) {
-    DvrFrameSchedule schedule;
-    dvrGraphicsWaitNextFrame(context, 0, &schedule);
-    DvrPoseAsync pose;
-    int ret = dvrPoseGet(client, schedule.vsync_count, &pose);
-    ASSERT_EQ(0, ret);
-
-    dvrBeginRenderFrameEds(context, pose.orientation, pose.translation);
-    for (int eye = 0; eye < 2; ++eye) {
-      if (eye == 0)
-        glViewport(0, 0, surface_width / 2, surface_height);
-      else
-        glViewport(surface_width / 2, 0, surface_width / 2, surface_height);
-
-      glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
-      EXPECT_EQ(0, ret);
-    }
-    dvrPresent(context);
-  }
-
-  dvrPoseDestroy(client);
-  dvrGraphicsContextDestroy(context);
-}
diff --git a/libs/vr/libvrflinger/Android.bp b/libs/vr/libvrflinger/Android.bp
index dc81c60..6218e8e 100644
--- a/libs/vr/libvrflinger/Android.bp
+++ b/libs/vr/libvrflinger/Android.bp
@@ -30,7 +30,6 @@
     "libhwcomposer-command-buffer",
     "libbufferhub",
     "libbufferhubqueue",
-    "libeds",
     "libdisplay",
     "libdvrcommon",
     "libdvrgraphics",
diff --git a/libs/vr/libvrflinger/hardware_composer.cpp b/libs/vr/libvrflinger/hardware_composer.cpp
index 079d6fd..4c08284 100644
--- a/libs/vr/libvrflinger/hardware_composer.cpp
+++ b/libs/vr/libvrflinger/hardware_composer.cpp
@@ -49,10 +49,6 @@
 // Offset before vsync to submit frames to hardware composer.
 constexpr int64_t kFramePostOffsetNs = 4000000;  // 4ms
 
-constexpr size_t kDefaultDisplayConfigCount = 32;
-
-constexpr float kMetersPerInch = 0.0254f;
-
 const char kBacklightBrightnessSysFile[] =
     "/sys/class/leds/lcd-backlight/brightness";
 
@@ -65,23 +61,6 @@
 
 const char kRightEyeOffsetProperty[] = "dvr.right_eye_offset_ns";
 
-// Returns our best guess for the time the compositor will spend rendering the
-// next frame.
-int64_t GuessFrameTime(int compositor_visible_layer_count) {
-  // The cost of asynchronous EDS and lens warp is currently measured at 2.5ms
-  // for one layer and 7ms for two layers, but guess a higher frame time to
-  // account for CPU overhead. This guess is only used before we've measured the
-  // actual time to render a frame for the current compositor configuration.
-  switch (compositor_visible_layer_count) {
-    case 0:
-      return 500000;  // .5ms
-    case 1:
-      return 5000000;  // 5ms
-    default:
-      return 10500000;  // 10.5ms
-  }
-}
-
 // Get time offset from a vsync to when the pose for that vsync should be
 // predicted out to. For example, if scanout gets halfway through the frame
 // at the halfway point between vsyncs, then this could be half the period.
@@ -241,14 +220,6 @@
 }
 
 void HardwareComposer::OnPostThreadResumed() {
-  constexpr int format = HAL_PIXEL_FORMAT_RGBA_8888;
-  constexpr int usage =
-      GRALLOC_USAGE_HW_FB | GRALLOC_USAGE_HW_COMPOSER | GRALLOC_USAGE_HW_RENDER;
-
-  framebuffer_target_ = std::make_shared<IonBuffer>(
-      native_display_metrics_.width, native_display_metrics_.height, format,
-      usage);
-
   hwc2_hidl_->resetCommands();
 
   // Connect to pose service.
@@ -275,7 +246,6 @@
 }
 
 void HardwareComposer::OnPostThreadPaused() {
-  framebuffer_target_.reset();
   retire_fence_fds_.clear();
   display_surfaces_.clear();
 
diff --git a/libs/vr/libvrflinger/hardware_composer.h b/libs/vr/libvrflinger/hardware_composer.h
index 5a75f42..20327a3 100644
--- a/libs/vr/libvrflinger/hardware_composer.h
+++ b/libs/vr/libvrflinger/hardware_composer.h
@@ -383,9 +383,6 @@
   // Transform required to get from native to logical display orientation.
   HWC::Transform display_transform_ = HWC::Transform::None;
 
-  // Buffer for the background layer required by hardware composer.
-  std::shared_ptr<IonBuffer> framebuffer_target_;
-
   // Pending surface list. Set by the display service when DirectSurfaces are
   // added, removed, or change visibility. Written by the message dispatch
   // thread and read by the post thread.
diff --git a/libs/vr/libvrsensor/Android.bp b/libs/vr/libvrsensor/Android.bp
index d59182e..3588b5e 100644
--- a/libs/vr/libvrsensor/Android.bp
+++ b/libs/vr/libvrsensor/Android.bp
@@ -66,7 +66,6 @@
         "libgmock_main",
         "libgmock",
         "libdisplay",
-        "libeds",
         "libvrsensor",
         "libdvrgraphics",
     ] + staticLibraries,
diff --git a/services/sensorservice/SensorService.cpp b/services/sensorservice/SensorService.cpp
index c5bbeee..d60768c 100644
--- a/services/sensorservice/SensorService.cpp
+++ b/services/sensorservice/SensorService.cpp
@@ -1250,7 +1250,7 @@
     }
 
     // Check maximum delay for the sensor.
-    nsecs_t maxDelayNs = sensor->getSensor().getMaxDelay() * 1000;
+    nsecs_t maxDelayNs = sensor->getSensor().getMaxDelay() * 1000LL;
     if (maxDelayNs > 0 && (samplingPeriodNs > maxDelayNs)) {
         samplingPeriodNs = maxDelayNs;
     }
@@ -1511,4 +1511,3 @@
 }
 
 }; // namespace android
-
diff --git a/services/surfaceflinger/SurfaceFlinger.cpp b/services/surfaceflinger/SurfaceFlinger.cpp
index 45cac6f..fec1f1e 100644
--- a/services/surfaceflinger/SurfaceFlinger.cpp
+++ b/services/surfaceflinger/SurfaceFlinger.cpp
@@ -1262,7 +1262,6 @@
         }
     }
     mDisplays.clear();
-    initializeDisplays();
 }
 
 void SurfaceFlinger::updateVrFlinger() {
@@ -1309,6 +1308,12 @@
     // parts of this class rely on the primary display always being available.
     createDefaultDisplayDevice();
 
+    // Reset the timing values to account for the period of the swapped in HWC
+    const auto& activeConfig = mHwc->getActiveConfig(HWC_DISPLAY_PRIMARY);
+    const nsecs_t period = activeConfig->getVsyncPeriod();
+    mAnimFrameTracker.setDisplayRefreshPeriod(period);
+    setCompositorTimingSnapped(0, period, 0);
+
     android_atomic_or(1, &mRepaintEverything);
     setTransactionFlags(eDisplayTransactionNeeded);
 }
diff --git a/services/vr/vr_window_manager/Android.bp b/services/vr/vr_window_manager/Android.bp
index d7ddba1..bba3ac9 100644
--- a/services/vr/vr_window_manager/Android.bp
+++ b/services/vr/vr_window_manager/Android.bp
@@ -31,7 +31,6 @@
     "libdisplay",
     "libbufferhub",
     "libbufferhubqueue",
-    "libeds",
     "libdvrgraphics",
     "libdvrcommon",
     "libhwcomposer-client",
diff --git a/vulkan/libvulkan/driver.cpp b/vulkan/libvulkan/driver.cpp
index f2cd8e6..0005a90 100644
--- a/vulkan/libvulkan/driver.cpp
+++ b/vulkan/libvulkan/driver.cpp
@@ -887,6 +887,19 @@
     const VkAllocationCallbacks& data_allocator =
         (pAllocator) ? *pAllocator : GetDefaultAllocator();
 
+    if (pCreateInfo->pApplicationInfo &&
+        pCreateInfo->pApplicationInfo->apiVersion >= VK_MAKE_VERSION(1, 1, 0)) {
+#pragma clang diagnostic push
+#pragma clang diagnostic ignored "-Wold-style-cast"
+        ALOGI(
+            "Requested Vulkan instance version %d.%d is greater than max "
+            "supported version (1.0)",
+            VK_VERSION_MAJOR(pCreateInfo->pApplicationInfo->apiVersion),
+            VK_VERSION_MINOR(pCreateInfo->pApplicationInfo->apiVersion));
+#pragma clang diagnostic pop
+        return VK_ERROR_INCOMPATIBLE_DRIVER;
+    }
+
     CreateInfoWrapper wrapper(*pCreateInfo, data_allocator);
     VkResult result = wrapper.Validate();
     if (result != VK_SUCCESS)