Add DaydreamVR native libraries and services

Upstreaming the main VR system components from master-dreamos-dev
into goog/master.

Bug: None
Test: `m -j32` succeeds. Sailfish boots and basic_vr sample app works
Change-Id: I853015872afc443aecee10411ef2d6b79184d051
diff --git a/libs/vr/libeds/Android.mk b/libs/vr/libeds/Android.mk
new file mode 100644
index 0000000..0345f6d
--- /dev/null
+++ b/libs/vr/libeds/Android.mk
@@ -0,0 +1,89 @@
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+
+sourceFiles := \
+	eds.cpp \
+	eds_mesh.cpp \
+	composite_hmd.cpp \
+	cpu_thread_pose_updater.cpp \
+	display_metrics.cpp \
+	distortion_renderer.cpp \
+	lucid_metrics.cpp \
+	lucid_pose_tracker.cpp \
+	lookup_radial_distortion.cpp \
+	polynomial_radial_distortion.cpp
+
+includeFiles += \
+	$(LOCAL_PATH)/include
+
+sharedLibraries := \
+	libbase \
+	libcutils \
+	liblog \
+	libEGL \
+	libGLESv1_CM \
+	libGLESv2 \
+	libvulkan
+
+staticLibraries := \
+	libchrome \
+	libdisplay \
+	libdvrcommon \
+	libdvrgraphics \
+	libsensor \
+	libpdx_default_transport \
+
+include $(CLEAR_VARS)
+LOCAL_SRC_FILES := $(sourceFiles)
+LOCAL_C_INCLUDES := $(includeFiles)
+LOCAL_CFLAGS := -DGL_GLEXT_PROTOTYPES -DEGL_EGLEXT_PROTOTYPES
+LOCAL_CFLAGS += -Wno-unused-parameter
+# Enable debug options below to show GL errors and use gdb.
+# LOCAL_CFLAGS += -UNDEBUG -DDEBUG -O0 -g
+LOCAL_EXPORT_C_INCLUDE_DIRS := $(includeFiles)
+LOCAL_SHARED_LIBRARIES := $(sharedLibraries)
+LOCAL_STATIC_LIBRARIES := $(staticLibraries)
+LOCAL_MODULE := libeds
+include $(BUILD_STATIC_LIBRARY)
+
+
+testFiles := \
+  tests/eds_app_tests.cpp
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := eds_app_tests
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := \
+  $(testFiles) \
+
+LOCAL_C_INCLUDES := \
+  $(includeFiles) \
+
+LOCAL_SHARED_LIBRARIES := \
+  libhardware \
+  libsync \
+  $(sharedLibraries) \
+
+LOCAL_STATIC_LIBRARIES := \
+  libgmock_main \
+  libgmock \
+  libdisplay \
+  libeds \
+  libbufferhub \
+  $(staticLibraries) \
+
+include $(BUILD_NATIVE_TEST)
diff --git a/libs/vr/libeds/composite_hmd.cpp b/libs/vr/libeds/composite_hmd.cpp
new file mode 100644
index 0000000..d29cd65
--- /dev/null
+++ b/libs/vr/libeds/composite_hmd.cpp
@@ -0,0 +1,256 @@
+#include "include/private/dvr/composite_hmd.h"
+
+#include <base/logging.h>
+#include <private/dvr/numeric.h>
+
+namespace android {
+namespace dvr {
+
+CompositeHmd::CompositeHmd(const HeadMountMetrics& head_mount_metrics,
+                           const DisplayMetrics& display_metrics)
+    : head_mount_metrics_(head_mount_metrics),
+      display_metrics_(display_metrics) {
+  MetricsChanged();
+}
+
+float CompositeHmd::GetTargetFrameDuration() const {
+  return display_metrics_.GetFrameDurationSeconds();
+}
+
+vec2 CompositeHmd::ComputeDistortedPoint(EyeType eye, vec2 position,
+                                         RgbColorChannel channel) const {
+  position = TransformPoint(eye_tan_angle_from_norm_screen_matrix_[eye], position);
+  vec2 distorted =
+      head_mount_metrics_.GetColorChannelDistortion(channel).Distort(position);
+  return TransformPoint(eye_norm_texture_from_tan_angle_matrix_[eye], distorted);
+}
+
+vec2 CompositeHmd::ComputeInverseDistortedPoint(EyeType eye, vec2 position,
+                                                RgbColorChannel channel) const {
+  position = TransformPoint(eye_norm_texture_from_tan_angle_inv_matrix_[eye], position);
+  vec2 distorted =
+      head_mount_metrics_.GetColorChannelDistortion(channel).DistortInverse(
+          position);
+  return TransformPoint(eye_tan_angle_from_norm_screen_inv_matrix_[eye], distorted);
+}
+
+void CompositeHmd::ComputeDistortedVertex(EyeType eye, vec2 uv_in,
+                                          vec2* vertex_out,
+                                          vec2* uv_out) const {
+  // The mesh vertices holds the shape of the distortion.
+  vec2 vertex_position = ComputeInverseDistortedPoint(eye, uv_in, kRed);
+  *vertex_out = vec2(vertex_position.x() - 0.5f, vertex_position.y() - 0.5f);
+
+  if (uv_out) {
+    // Compute the texture coordinate for each vertex coordinate.
+    // Red's is the inverse of the inverse, skip the calculation and use uv_in.
+    uv_out[kRed] = uv_in;
+    uv_out[kGreen] = ComputeDistortedPoint(eye, vertex_position, kGreen);
+    uv_out[kBlue] = ComputeDistortedPoint(eye, vertex_position, kBlue);
+  }
+}
+
+vec2i CompositeHmd::GetRecommendedRenderTargetSize() const {
+  return recommended_render_target_size_;
+}
+
+Range2i CompositeHmd::GetDisplayRange() const { return display_range_; }
+
+mat4 CompositeHmd::GetEyeFromHeadMatrix(EyeType eye) const {
+  return eye_from_head_matrix_[eye];
+}
+
+FieldOfView CompositeHmd::GetEyeFov(EyeType eye) const { return eye_fov_[eye]; }
+
+Range2i CompositeHmd::GetEyeViewportBounds(EyeType eye) const {
+  return eye_viewport_range_[eye];
+}
+
+void CompositeHmd::SetHeadMountMetrics(
+    const HeadMountMetrics& head_mount_metrics) {
+  // Use the assignement operator to do memberwise copy.
+  head_mount_metrics_ = head_mount_metrics;
+  MetricsChanged();
+}
+
+const HeadMountMetrics& CompositeHmd::GetHeadMountMetrics() const {
+  return head_mount_metrics_;
+}
+
+void CompositeHmd::SetDisplayMetrics(const DisplayMetrics& display_metrics) {
+  // Use the assignment operator to do memberwise copy.
+  display_metrics_ = display_metrics;
+  MetricsChanged();
+}
+
+const DisplayMetrics& CompositeHmd::GetDisplayMetrics() const {
+  return display_metrics_;
+}
+
+void CompositeHmd::MetricsChanged() {
+  // Abbreviations in variable names:
+  //   "vp": viewport
+  //   "ta": tan-angle
+  const HeadMountMetrics& mount = head_mount_metrics_;
+  DisplayMetrics display = display_metrics_;
+
+  if (display.IsPortrait()) {
+    // If we're in portrait mode, toggle the orientation so that all
+    // calculations are done in landscape mode.
+    display.ToggleOrientation();
+  }
+
+  float display_width_meters = display.GetSizeMeters()[0];
+  float display_height_meters = display.GetSizeMeters()[1];
+
+  vec2 pixels_per_meter = vec2(1.0f / display.GetMetersPerPixel()[0],
+                               1.0f / display.GetMetersPerPixel()[1]);
+
+  // virtual_eye_to_screen_dist is the distance from the screen to the eye
+  // after it has been projected through the lens.  This would normally be
+  // slightly different from the distance to the actual eye.
+  float virtual_eye_to_screen_dist = mount.GetVirtualEyeToScreenDistance();
+  float meters_per_tan_angle = virtual_eye_to_screen_dist;
+  vec2 pixels_per_tan_angle = pixels_per_meter * meters_per_tan_angle;
+
+  CHECK_NE(0.0f, display_width_meters);
+  CHECK_NE(0.0f, display_height_meters);
+  CHECK_NE(0.0f, virtual_eye_to_screen_dist);
+
+  // Height of lenses from the bottom of the screen.
+  float lens_y_center = 0;
+  float bottom_dist = 0;
+  float top_dist = 0;
+
+  // bottom_display_dist and top_display_dist represent the distance from the
+  // lens center to the edge of the display.
+  float bottom_display_dist = 0;
+  float top_display_dist = 0;
+  switch (mount.GetVerticalAlignment()) {
+    case HeadMountMetrics::kBottom:
+      lens_y_center =
+          mount.GetTrayToLensDistance() - display.GetBorderSizeMeters();
+      bottom_dist = lens_y_center;
+      top_dist = lens_y_center;
+      bottom_display_dist = lens_y_center;
+      top_display_dist = display_height_meters - lens_y_center;
+      break;
+    case HeadMountMetrics::kCenter:
+      // TODO(hendrikw): This should respect the border size, but since we
+      //                 currently hard code the border size, it would break
+      //                 the distortion on some devices.  Revisit when border
+      //                 size is fixed.
+      lens_y_center = display_height_meters * 0.5f;
+      bottom_dist = lens_y_center;
+      top_dist = lens_y_center;
+      bottom_display_dist = lens_y_center;
+      top_display_dist = lens_y_center;
+      break;
+    case HeadMountMetrics::kTop:
+      lens_y_center = display_height_meters - (mount.GetTrayToLensDistance() -
+                                               display.GetBorderSizeMeters());
+      bottom_dist =
+          mount.GetTrayToLensDistance() - display.GetBorderSizeMeters();
+      top_dist = bottom_dist;
+      bottom_display_dist = lens_y_center;
+      top_display_dist = display_height_meters - lens_y_center;
+      break;
+  }
+
+  float inner_dist = mount.GetScreenCenterToLensDistance();
+  float outer_dist = display_width_meters * 0.5f - inner_dist;
+
+  // We don't take chromatic aberration into account yet for computing FOV,
+  // viewport, etc, so we only use the green channel for now. Note the actual
+  // Distort function *does* implement chromatic aberration.
+  const ColorChannelDistortion& distortion =
+      mount.GetColorChannelDistortion(kGreen);
+
+  vec2 outer_point(outer_dist / virtual_eye_to_screen_dist, 0.0f);
+  vec2 inner_point(inner_dist / virtual_eye_to_screen_dist, 0.0f);
+  vec2 bottom_point(0.0f, bottom_dist / virtual_eye_to_screen_dist);
+  vec2 top_point(0.0f, top_dist / virtual_eye_to_screen_dist);
+
+  float outer_angle = atanf(distortion.Distort(outer_point)[0]);
+  float inner_angle = atanf(distortion.Distort(inner_point)[0]);
+  float bottom_angle = atanf(distortion.Distort(bottom_point)[1]);
+  float top_angle = atanf(distortion.Distort(top_point)[1]);
+
+  for (EyeType eye : {kLeftEye, kRightEye}) {
+    const FieldOfView max_fov = mount.GetEyeMaxFov(eye);
+    float left_angle = (eye == kLeftEye) ? outer_angle : inner_angle;
+    float right_angle = (eye == kLeftEye) ? inner_angle : outer_angle;
+
+    eye_fov_[eye] = FieldOfView(std::min(left_angle, max_fov.GetLeft()),
+                                std::min(right_angle, max_fov.GetRight()),
+                                std::min(bottom_angle, max_fov.GetBottom()),
+                                std::min(top_angle, max_fov.GetTop()));
+
+    vec2 texture_vp_ta_p1 =
+        vec2(-tanf(eye_fov_[eye].GetLeft()), -tanf(eye_fov_[eye].GetBottom()));
+    vec2 texture_vp_ta_p2 =
+        vec2(tanf(eye_fov_[eye].GetRight()), tanf(eye_fov_[eye].GetTop()));
+    vec2 texture_vp_size_ta = texture_vp_ta_p2 - texture_vp_ta_p1;
+
+    vec2 texture_vp_sizef_pixels =
+        texture_vp_size_ta.array() * pixels_per_tan_angle.array();
+
+    vec2i texture_vp_size_pixels =
+        vec2i(static_cast<int32_t>(roundf(texture_vp_sizef_pixels[0])),
+              static_cast<int32_t>(roundf(texture_vp_sizef_pixels[1])));
+    int vp_start_x =
+        (eye == kLeftEye) ? 0 : eye_viewport_range_[kLeftEye].p2[0];
+
+    eye_viewport_range_[eye] =
+        Range2i::FromSize(vec2i(vp_start_x, 0), texture_vp_size_pixels);
+    float left_dist = (eye == kLeftEye) ? outer_dist : inner_dist;
+    float right_dist = (eye == kLeftEye) ? inner_dist : outer_dist;
+    vec2 screen_ta_p1(-left_dist / virtual_eye_to_screen_dist,
+                      -bottom_display_dist / virtual_eye_to_screen_dist);
+    vec2 screen_ta_p2(right_dist / virtual_eye_to_screen_dist,
+                      top_display_dist / virtual_eye_to_screen_dist);
+    vec2 screen_ta_size = screen_ta_p2 - screen_ta_p1;
+
+    // Align the tan angle coordinates to the nearest pixel.  This will ensure
+    // that the optical center doesn't straddle multiple pixels.
+    // TODO(hendrikw): verify that this works correctly for Daydream View.
+    vec2 tan_angle_per_pixel(screen_ta_size.array() /
+                             texture_vp_size_pixels.cast<float>().array());
+    vec2 pixel_p1(screen_ta_p1.array() / tan_angle_per_pixel.array());
+    vec2 pixel_shift(roundf(pixel_p1.x()) - pixel_p1.x(),
+                     roundf(pixel_p1.y()) - pixel_p1.y());
+    screen_ta_p1 +=
+        (tan_angle_per_pixel.array() * pixel_shift.array()).matrix();
+    screen_ta_p2 +=
+        (tan_angle_per_pixel.array() * pixel_shift.array()).matrix();
+
+    // Calculate the transformations needed for the distortions.
+    eye_tan_angle_from_norm_screen_matrix_[eye] =
+        TranslationMatrix(vec2(screen_ta_p1)) *
+        ScaleMatrix(screen_ta_size);
+    eye_tan_angle_from_norm_screen_inv_matrix_[eye] =
+        eye_tan_angle_from_norm_screen_matrix_[eye].inverse();
+
+    eye_norm_texture_from_tan_angle_inv_matrix_[eye] =
+        TranslationMatrix(texture_vp_ta_p1) *
+        ScaleMatrix(texture_vp_size_ta);
+    eye_norm_texture_from_tan_angle_matrix_[eye] =
+        eye_norm_texture_from_tan_angle_inv_matrix_[eye].inverse();
+  }
+  vec2i left_vp_size = eye_viewport_range_[kLeftEye].GetSize();
+  vec2i right_vp_size = eye_viewport_range_[kRightEye].GetSize();
+
+  recommended_render_target_size_ =
+      vec2i(left_vp_size[0] + right_vp_size[0],
+            std::max(left_vp_size[1], right_vp_size[1]));
+
+  display_range_ = Range2i::FromSize(vec2i(0, 0), display.GetSizePixels());
+
+  eye_from_head_matrix_[kLeftEye] = Eigen::Translation3f(
+      vec3(mount.GetScreenCenterToLensDistance(), 0.0f, 0.0f));
+  eye_from_head_matrix_[kRightEye] = Eigen::Translation3f(
+      vec3(-mount.GetScreenCenterToLensDistance(), 0.0f, 0.0f));
+}
+
+}  // namespace dvr
+}  // namespace android
diff --git a/libs/vr/libeds/cpu_thread_pose_updater.cpp b/libs/vr/libeds/cpu_thread_pose_updater.cpp
new file mode 100644
index 0000000..5b8a734
--- /dev/null
+++ b/libs/vr/libeds/cpu_thread_pose_updater.cpp
@@ -0,0 +1,86 @@
+#include "include/private/dvr/cpu_thread_pose_updater.h"
+
+#include <sys/prctl.h>
+#include <unistd.h>
+
+#define ATRACE_TAG ATRACE_TAG_INPUT
+#include <utils/Trace.h>
+
+#include <private/dvr/clock_ns.h>
+#include <private/dvr/debug.h>
+
+namespace android {
+namespace dvr {
+
+CpuThreadPoseUpdater::CpuThreadPoseUpdater()
+    : stop_request_(false), update_period_us_(0), count_(0) {}
+
+CpuThreadPoseUpdater::~CpuThreadPoseUpdater() { StopAndJoin(); }
+
+void CpuThreadPoseUpdater::Start(volatile RawPosePair* mapped_pose_buffer,
+                                 int period_us) {
+  mapped_pose_buffer_ = mapped_pose_buffer;
+  update_period_us_ = period_us;
+  stop_request_ = false;
+
+  // First buffer is odd (starts at 1), second is even (starts at 2).
+  count_ = 0;
+  mapped_pose_buffer_->pose1.Reset(++count_);
+  mapped_pose_buffer_->pose2.Reset(++count_);
+
+  update_thread_ = std::thread(&CpuThreadPoseUpdater::UpdateThread, this);
+}
+
+void CpuThreadPoseUpdater::StopAndJoin() {
+  stop_request_ = true;
+  if (update_thread_.joinable()) {
+    update_thread_.join();
+  }
+}
+
+void CpuThreadPoseUpdater::UpdateThread() {
+  prctl(PR_SET_NAME, reinterpret_cast<intptr_t>("CpuPoseUpdater"),
+        0, 0, 0);
+
+  ATRACE_NAME(__PRETTY_FUNCTION__);
+  for (;;) {
+    if (stop_request_) {
+      break;
+    }
+
+    ++count_;
+
+    // Choose the writable pose based on whether count is odd or even.
+    volatile RawPose* out_pose = nullptr;
+    if (count_ & 1) {
+      out_pose = &mapped_pose_buffer_->pose1;
+    } else {
+      out_pose = &mapped_pose_buffer_->pose2;
+    }
+
+    {
+      ATRACE_NAME("GetPose");
+      Posef pose = pose_tracker_.GetPose(GetSystemClockNs());
+      out_pose->qx = pose.GetRotation().x();
+      out_pose->qy = pose.GetRotation().y();
+      out_pose->qz = pose.GetRotation().z();
+      out_pose->qw = pose.GetRotation().w();
+      out_pose->px = pose.GetPosition()[0];
+      out_pose->py = pose.GetPosition()[1];
+      out_pose->pz = pose.GetPosition()[2];
+      // Atomically store the count so that it hits memory last:
+      out_pose->count.store(count_, std::memory_order_release);
+    }
+
+    // Sleep to simulate the IMU update process.
+    usleep(update_period_us_);
+    // TODO(jbates) sleep_for returns immediately, we need to fix our toolchain!
+    // int64_t c1 = GetSystemClockNs();
+    // std::this_thread::sleep_for(std::chrono::milliseconds(10));
+    // int64_t c2 = GetSystemClockNs();
+    // fprintf(stderr, "%lld us\n", (long long)(c2 - c1) / 1000);
+  }
+}
+
+}  // namesapce dvr
+}  // namesapce android
diff --git a/libs/vr/libeds/display_metrics.cpp b/libs/vr/libeds/display_metrics.cpp
new file mode 100644
index 0000000..e129395
--- /dev/null
+++ b/libs/vr/libeds/display_metrics.cpp
@@ -0,0 +1,30 @@
+#include "include/private/dvr/display_metrics.h"
+
+namespace android {
+namespace dvr {
+
+DisplayMetrics::DisplayMetrics(vec2i size_pixels, vec2 meters_per_pixel,
+                               float border_size_meters,
+                               float frame_duration_seconds,
+                               DisplayOrientation orientation)
+    : size_pixels_(size_pixels),
+      meters_per_pixel_(meters_per_pixel),
+      border_size_meters_(border_size_meters),
+      frame_duration_seconds_(frame_duration_seconds),
+      orientation_(orientation) {}
+
+void DisplayMetrics::ToggleOrientation() {
+  std::swap(size_pixels_[0], size_pixels_[1]);
+  std::swap(meters_per_pixel_[0], meters_per_pixel_[1]);
+  if (orientation_ == DisplayOrientation::kPortrait)
+    orientation_ = DisplayOrientation::kLandscape;
+  else
+    orientation_ = DisplayOrientation::kPortrait;
+}
+
+DisplayMetrics::DisplayMetrics()
+    : DisplayMetrics(vec2i(0, 0), vec2(0.0f, 0.0f), 0.0f, 0.0f,
+                     DisplayOrientation::kLandscape) {}
+
+}  // namespace dvr
+}  // namespace android
diff --git a/libs/vr/libeds/distortion_renderer.cpp b/libs/vr/libeds/distortion_renderer.cpp
new file mode 100644
index 0000000..a19843f
--- /dev/null
+++ b/libs/vr/libeds/distortion_renderer.cpp
@@ -0,0 +1,793 @@
+#include "include/private/dvr/distortion_renderer.h"
+
+#include <float.h>
+
+#include <string>
+
+#include <utils/Log.h>
+#define ATRACE_TAG ATRACE_TAG_GRAPHICS
+#include <utils/Trace.h>
+
+#include <base/logging.h>
+#include <private/dvr/clock_ns.h>
+#include <private/dvr/composite_hmd.h>
+#include <private/dvr/debug.h>
+#include <private/dvr/graphics/gpu_profiler.h>
+#include <private/dvr/ortho.h>
+#include <private/dvr/sensor_constants.h>
+
+#define STRINGIFY2(s) #s
+#define STRINGIFY(s) STRINGIFY2(s)
+
+#define POSITION_ATTR 0
+#define VIEWPORT_COORD_R_ATTR 1
+#define VIEWPORT_COORD_G_ATTR 2
+#define VIEWPORT_COORD_B_ATTR 3
+
+// Pose data uniform buffer bindings. Must be sequential.
+#define POSE_BINDING 0
+#define POSE_BINDING2 1
+
+// Texture unit bindings. Must be sequential.
+// Things break if we start at binding 0 (samples come back black).
+#define SAMPLER_BINDING 1
+#define SAMPLER_BINDING2 2
+
+#define GLSL_VIGNETTE_FUNC                                       \
+  "float vignette(vec2 texCoords) {\n"                           \
+  "  const float fadeDist = 0.01;\n"                             \
+  "  const float fadeDistInv = 1.0 / fadeDist;\n"                \
+  "  const float inset = 0.02;\n"                                \
+  "  vec2 lowEdge = vec2(inset - fadeDist);\n"                   \
+  "  vec2 highEdge = vec2(1.0 - inset + fadeDist);\n"            \
+  "  vec2 vignetteMin = "                                        \
+  "    clamp(-fadeDistInv * (lowEdge - texCoords), 0.0, 1.0);\n" \
+  "  vec2 vignetteMax = "                                        \
+  "    clamp(fadeDistInv * (highEdge - texCoords), 0.0, 1.0);\n" \
+  "  vec2 vignette = vignetteMin * vignetteMax;\n"               \
+  "  return vignette.x * vignette.y;\n"                          \
+  "}\n"
+
+namespace {
+
+// If enabled, the pixel shader will blend by reading back the current pixel
+// from the framebuffer.
+// TODO(jbates) With framebuffer read coherency disabled, this seems to perform
+//   well enough. That requires a GL extension, so for now we disable this path.
+constexpr bool kUseFramebufferReadback = false;
+
+static const char* kVertexShaderChromaticAberrationString =
+    "uniform mat4 uProjectionMatrix;\n"
+    "layout(binding = " STRINGIFY(POSE_BINDING) ", std140)\n"
+    "uniform LateLatchData {\n"
+    "  mat4 uTexFromRecommendedViewportMatrix;\n"
+    "};\n"
+    "#ifdef COMPOSITE_LAYER_2\n"
+    "layout(binding = " STRINGIFY(POSE_BINDING2) ", std140)\n"
+    "uniform LateLatchData2 {\n"
+    "  mat4 uTexFromRecommendedViewportMatrix2;\n"
+    "};\n"
+    "#endif\n"
+    "uniform vec4 uTexXMinMax;\n"
+    "layout(location = " STRINGIFY(POSITION_ATTR) ") in vec2 aPosition;\n"
+    "layout(location = " STRINGIFY(VIEWPORT_COORD_R_ATTR)
+           ") in vec2 aViewportCoordsR;\n"
+    "layout(location = " STRINGIFY(VIEWPORT_COORD_G_ATTR)
+           ") in vec2 aViewportCoordsG;\n"
+    "layout(location = " STRINGIFY(VIEWPORT_COORD_B_ATTR)
+           ") in vec2 aViewportCoordsB;\n"
+    "mediump out vec4 vTexCoordsRG;\n"
+    "mediump out vec2 vTexCoordsB;\n"
+    "#ifdef COMPOSITE_LAYER_2\n"
+    "mediump out vec4 vTexCoordsRG2;\n"
+    "mediump out vec2 vTexCoordsB2;\n"
+    "#endif\n"
+    "mediump out vec3 vVignette;\n"
+    "\n" GLSL_VIGNETTE_FUNC
+    "void main(void) {\n"
+    "  vVignette.r = vignette(aViewportCoordsR);\n"
+    "  vVignette.g = vignette(aViewportCoordsG);\n"
+    "  vVignette.b = vignette(aViewportCoordsB);\n"
+    "  vec4 redTexCoords = (uTexFromRecommendedViewportMatrix * \n"
+    "                       vec4(aViewportCoordsR, 0., 1.));\n"
+    "  vec4 greenTexCoords = (uTexFromRecommendedViewportMatrix * \n"
+    "                         vec4(aViewportCoordsG, 0., 1.));\n"
+    "  vec4 blueTexCoords = (uTexFromRecommendedViewportMatrix * \n"
+    "                        vec4(aViewportCoordsB, 0., 1.));\n"
+    "  vTexCoordsRG.xy = redTexCoords.xy / redTexCoords.w;\n"
+    "  vTexCoordsRG.zw = greenTexCoords.xy / greenTexCoords.w;\n"
+    "  vTexCoordsB = blueTexCoords.xy / blueTexCoords.w;\n"
+    "  vTexCoordsRG.x = clamp(vTexCoordsRG.x, uTexXMinMax.x, uTexXMinMax.y);\n"
+    "  vTexCoordsRG.z = clamp(vTexCoordsRG.z, uTexXMinMax.x, uTexXMinMax.y);\n"
+    "  vTexCoordsB.x = clamp(vTexCoordsB.x, uTexXMinMax.x, uTexXMinMax.y);\n"
+    "#ifdef COMPOSITE_LAYER_2\n"
+    "  redTexCoords = (uTexFromRecommendedViewportMatrix2 * \n"
+    "                  vec4(aViewportCoordsR, 0., 1.));\n"
+    "  greenTexCoords = (uTexFromRecommendedViewportMatrix2 * \n"
+    "                    vec4(aViewportCoordsG, 0., 1.));\n"
+    "  blueTexCoords = (uTexFromRecommendedViewportMatrix2 * \n"
+    "                   vec4(aViewportCoordsB, 0., 1.));\n"
+    "  vTexCoordsRG2.xy = redTexCoords.xy / redTexCoords.w;\n"
+    "  vTexCoordsRG2.zw = greenTexCoords.xy / greenTexCoords.w;\n"
+    "  vTexCoordsB2 = blueTexCoords.xy / blueTexCoords.w;\n"
+    "  vTexCoordsRG2.x = clamp(vTexCoordsRG2.x,\n"
+    "                          uTexXMinMax.z, uTexXMinMax.w);\n"
+    "  vTexCoordsRG2.z = clamp(vTexCoordsRG2.z, uTexXMinMax.z,\n"
+    "                          uTexXMinMax.w);\n"
+    "  vTexCoordsB2.x = clamp(vTexCoordsB2.x, uTexXMinMax.z, uTexXMinMax.w);\n"
+    "#endif\n"
+    "  gl_Position = uProjectionMatrix * vec4(aPosition, 0., 1.);\n"
+    "}\n";
+
+static const char* kFragmentShaderChromaticAberrationString =
+    "#ifdef GL_ES\n"
+    "precision mediump float;\n"
+    "#endif\n"
+    " \n"
+    "layout(binding = " STRINGIFY(SAMPLER_BINDING) ")\n"
+    "uniform sampler2D uDistortionTexture; \n"
+    "mediump in vec4 vTexCoordsRG;\n"
+    "mediump in vec2 vTexCoordsB;\n"
+    "#ifdef COMPOSITE_LAYER_2\n"
+    "layout(binding = " STRINGIFY(SAMPLER_BINDING2) ")\n"
+    "uniform sampler2D uDistortionTexture2; \n"
+    "mediump in vec4 vTexCoordsRG2;\n"
+    "mediump in vec2 vTexCoordsB2;\n"
+    "#endif\n"
+    "mediump in vec3 vVignette;\n"
+    "#ifdef BLEND_WITH_PREVIOUS_LAYER \n"
+    "inout vec4 fragColor; \n"
+    "#else \n"
+    "out vec4 fragColor; \n"
+    "#endif \n"
+    " \n"
+    "void main(void) { \n"
+    "  vec4 ra = texture(uDistortionTexture, vTexCoordsRG.xy); \n"
+    "  vec4 ga = texture(uDistortionTexture, vTexCoordsRG.zw); \n"
+    "  vec4 ba = texture(uDistortionTexture, vTexCoordsB); \n"
+    "#ifdef BLEND_WITH_PREVIOUS_LAYER \n"
+    "  vec3 alpha1 = vec3(ra.a, ga.a, ba.a); \n"
+    "  vec3 color = (vec3(1.0) - alpha1) * fragColor.rgb + \n"
+    "               alpha1 * vec3(ra.r, ga.g, ba.b); \n"
+    "#else // BLEND_WITH_PREVIOUS_LAYER \n"
+    "  vec3 color = vec3(ra.r, ga.g, ba.b); \n"
+    "#endif // BLEND_WITH_PREVIOUS_LAYER \n"
+    "#ifdef COMPOSITE_LAYER_2 \n"
+    "  // Alpha blend layer 2 onto layer 1. \n"
+    "  vec4 ra2 = texture(uDistortionTexture2, vTexCoordsRG2.xy); \n"
+    "  vec4 ga2 = texture(uDistortionTexture2, vTexCoordsRG2.zw); \n"
+    "  vec4 ba2 = texture(uDistortionTexture2, vTexCoordsB2); \n"
+    "  vec3 color2 = vec3(ra2.r, ga2.g, ba2.b); \n"
+    "  vec3 alpha2 = vec3(ra2.a, ga2.a, ba2.a); \n"
+    "  color = (vec3(1.0) - alpha2) * color + alpha2 * color2; \n"
+    "#endif \n"
+    "#ifdef ALPHA_VIGNETTE\n"
+    "  fragColor = vec4(color, vVignette.b * ga.a); \n"
+    "#else // ALPHA_VIGNETTE\n"
+    "  fragColor = vec4(vVignette.rgb * color, ga.a); \n"
+    "#endif // ALPHA_VIGNETTE\n"
+    "} \n";
+
+static const char* kVertexShaderNoChromaticAberrationString =
+    "uniform mat4 uProjectionMatrix;\n"
+    "layout(binding = " STRINGIFY(POSE_BINDING) ", std140)\n"
+    "uniform LateLatchData {\n"
+    "  mat4 uTexFromRecommendedViewportMatrix;\n"
+    "};\n"
+    "#ifdef COMPOSITE_LAYER_2\n"
+    "layout(binding = " STRINGIFY(POSE_BINDING2) ", std140)\n"
+    "uniform LateLatchData2 {\n"
+    "  mat4 uTexFromRecommendedViewportMatrix2;\n"
+    "};\n"
+    "#endif\n"
+    "uniform vec4 uTexXMinMax;\n"
+    "layout(location = " STRINGIFY(POSITION_ATTR) ") in vec2 aPosition;\n"
+    "layout(location = " STRINGIFY(VIEWPORT_COORD_G_ATTR)
+           ") in vec2 aViewportCoords;\n"
+    "mediump out vec2 vTexCoords;\n"
+    "#ifdef COMPOSITE_LAYER_2\n"
+    "mediump out vec2 vTexCoords2;\n"
+    "#endif\n"
+    "mediump out vec3 vVignette;\n"
+    "\n" GLSL_VIGNETTE_FUNC
+    "void main(void) {\n"
+    "  float fVignette = vignette(aViewportCoords);\n"
+    "  vVignette = vec3(fVignette, fVignette, fVignette);\n"
+    "  vec4 texCoords = (uTexFromRecommendedViewportMatrix * \n"
+    "                    vec4(aViewportCoords, 0., 1.));\n"
+    "  vTexCoords = texCoords.xy / texCoords.w;\n"
+    "  vTexCoords.x = clamp(vTexCoords.x, uTexXMinMax.x, uTexXMinMax.y);\n"
+    "#ifdef COMPOSITE_LAYER_2\n"
+    "  texCoords = (uTexFromRecommendedViewportMatrix2 * \n"
+    "               vec4(aViewportCoords, 0., 1.));\n"
+    "  vTexCoords2 = texCoords.xy / texCoords.w;\n"
+    "  vTexCoords2.x = clamp(vTexCoords2.x, uTexXMinMax.z, uTexXMinMax.w);\n"
+    "#endif\n"
+    "  gl_Position = uProjectionMatrix * vec4(aPosition, 0., 1.);\n"
+    "}\n";
+
+static const char* kFragmentShaderNoChromaticAberrationString =
+    "#ifdef GL_ES\n"
+    "precision mediump float;\n"
+    "#endif\n"
+    " \n"
+    "layout(binding = " STRINGIFY(SAMPLER_BINDING) ")\n"
+    "uniform sampler2D uDistortionTexture; \n"
+    "mediump in vec2 vTexCoords;\n"
+    "#ifdef COMPOSITE_LAYER_2\n"
+    "layout(binding = " STRINGIFY(SAMPLER_BINDING2) ")\n"
+    "uniform sampler2D uDistortionTexture2; \n"
+    "mediump in vec2 vTexCoords2;\n"
+    "#endif\n"
+    "mediump in vec3 vVignette;\n"
+    "out vec4 fragColor;\n"
+    " \n"
+    "void main(void) { \n"
+    "  vec4 color = texture(uDistortionTexture, vTexCoords); \n"
+    "#ifdef COMPOSITE_LAYER_2 \n"
+    "  // Alpha blend layer 2 onto layer 1. \n"
+    "  vec4 color2 = texture(uDistortionTexture2, vTexCoords2); \n"
+    "  float alpha2 = color2.a; \n"
+    "  color.rgb = (1.0 - alpha2) * color.rgb + alpha2 * color2.rgb; \n"
+    "#endif \n"
+    "  fragColor = vec4(vVignette * color.rgb, color.a); \n"
+    "} \n";
+
+static const char* kVertexShaderSimpleVideoQuadString =
+    "uniform mat4 uProjectionMatrix;\n"
+    "layout(binding = " STRINGIFY(POSE_BINDING) ", std140)\n"
+    "uniform LateLatchData {\n"
+    "  mat4 uEdsCorrection;\n"
+    "};\n"
+    "uniform mat4 uTexFromEyeMatrix;\n"
+    "uniform mat4 uEyeFromViewportMatrix;\n"
+    "layout(location = " STRINGIFY(POSITION_ATTR) ") in vec2 aPosition;\n"
+    "layout(location = " STRINGIFY(VIEWPORT_COORD_G_ATTR)
+           ") in vec2 aViewportCoords;\n"
+    "mediump out vec2 vTexCoords;\n"
+    "void main(void) {\n"
+    "  mat4 m = uTexFromEyeMatrix * inverse(uEdsCorrection) * uEyeFromViewportMatrix;\n"
+    "  mat3 uTexFromViewportMatrix = inverse(mat3(m[0].xyw, m[1].xyw, m[3].xyw)); \n"
+    "  vec3 texCoords = uTexFromViewportMatrix * vec3(aViewportCoords, 1.0);\n"
+    "  vTexCoords = texCoords.xy / texCoords.z;\n"
+    "  gl_Position = uProjectionMatrix * vec4(aPosition, 0.0, 1.0);\n"
+    "}\n";
+
+static const char* kFragmentShaderSimpleVideoQuadString =
+    "#extension GL_OES_EGL_image_external_essl3 : enable\n"
+    " \n"
+    "#ifdef GL_ES\n"
+    "precision mediump float;\n"
+    "#endif\n"
+    " \n"
+    "layout(binding = " STRINGIFY(SAMPLER_BINDING) ")\n"
+    "uniform samplerExternalOES uDistortionTexture; \n"
+    "mediump in vec2 vTexCoords;\n"
+    "out vec4 fragColor;\n"
+    " \n"
+    "void main(void) { \n"
+    "  if (clamp(vTexCoords, 0.0, 1.0) != vTexCoords) { \n"
+    "    fragColor = vec4(0.0, 0.0, 0.0, 0.0); \n"
+    "  } else { \n"
+    "    fragColor = texture(uDistortionTexture, vTexCoords); \n"
+    "  } \n"
+    "} \n";
+
+}  // anonymous namespace
+
+namespace android {
+namespace dvr {
+
+// Note that converting from Clip Space ([-1,1]^3) to Viewport Space
+// for one eye ([0,1]x[0,1]) requires dividing by 2 in x and y.
+const mat4 DistortionRenderer::kViewportFromClipMatrix =
+    Eigen::Translation3f(vec3(0.5f, 0.5f, 0)) *
+    Eigen::DiagonalMatrix<float, 3>(vec3(0.5f, 0.5f, 1.0f));
+
+const mat4 DistortionRenderer::kClipFromViewportMatrix =
+    Eigen::DiagonalMatrix<float, 3>(vec3(2.0f, 2.0f, 1.0f)) *
+    Eigen::Translation3f(vec3(-0.5f, -0.5f, 0));
+
+void DistortionRenderer::EdsShader::load(const char* vertex,
+                                         const char* fragment, int num_layers,
+                                         bool use_alpha_vignette,
+                                         float rotation, bool flip_vertical,
+                                         bool blend_with_previous_layer) {
+  std::string vert_builder = "#version 310 es\n";
+  std::string frag_builder = "#version 310 es\n";
+  if (blend_with_previous_layer && kUseFramebufferReadback) {
+    frag_builder += "#extension GL_EXT_shader_framebuffer_fetch : require\n";
+  }
+
+  if (num_layers == 2) {
+    vert_builder += "#define COMPOSITE_LAYER_2\n";
+    frag_builder += "#define COMPOSITE_LAYER_2\n";
+  } else {
+    CHECK_EQ(num_layers, 1);
+  }
+  if (blend_with_previous_layer) {
+    // Check for unsupported shader combinations:
+    CHECK_EQ(num_layers, 1);
+    CHECK_EQ(use_alpha_vignette, false);
+    if (kUseFramebufferReadback)
+      frag_builder += "#define BLEND_WITH_PREVIOUS_LAYER\n";
+  }
+  if (use_alpha_vignette) {
+    vert_builder += "#define ALPHA_VIGNETTE\n";
+    frag_builder += "#define ALPHA_VIGNETTE\n";
+  }
+
+  vert_builder += vertex;
+  frag_builder += fragment;
+  pgm.Link(vert_builder, frag_builder);
+  CHECK(pgm.IsUsable());
+
+  pgm.Use();
+
+  uProjectionMatrix =
+      glGetUniformLocation(pgm.GetProgram(), "uProjectionMatrix");
+  uTexFromEyeMatrix =
+      glGetUniformLocation(pgm.GetProgram(), "uTexFromEyeMatrix");
+  uEyeFromViewportMatrix =
+      glGetUniformLocation(pgm.GetProgram(), "uEyeFromViewportMatrix");
+  uTexXMinMax = glGetUniformLocation(pgm.GetProgram(), "uTexXMinMax");
+  CHECK_GL();
+
+  float vertical_multiply = flip_vertical ? -1.0 : 1.0;
+  mat4 projectionMatrix = OrthoMatrix(-0.5f, 0.5f, vertical_multiply * -0.5f,
+                                      vertical_multiply * 0.5f, -1.0f, 1.0f);
+
+  // Rotate the mesh into the screen's orientation.
+  // TODO(hendrikw): Once the display is finalized, and perhaps not portrait,
+  //                 look into removing this matrix altogether.
+  projectionMatrix =
+      projectionMatrix * Eigen::AngleAxisf(rotation, vec3::UnitZ());
+
+  CHECK(sizeof(mat4) == 4 * 4 * 4);
+  glUniformMatrix4fv(uProjectionMatrix, 1, false, projectionMatrix.data());
+}
+
+DistortionRenderer::DistortionRenderer(
+    const CompositeHmd& hmd, vec2i display_size, int distortion_mesh_resolution,
+    bool flip_texture_horizontally, bool flip_texture_vertically,
+    bool separated_eye_buffers, bool eds_enabled, bool late_latch_enabled)
+    : shader_type_(kChromaticAberrationCorrection),
+      eds_enabled_(eds_enabled),
+      chromatic_aberration_correction_enabled_(true),
+      use_alpha_vignette_(false),
+      distortion_mesh_resolution_(distortion_mesh_resolution),
+      last_distortion_texture_id_(0),
+      app_texture_target_(GL_TEXTURE_2D),
+      display_size_(display_size),
+      separated_eye_buffers_(separated_eye_buffers) {
+  ATRACE_NAME("DistortionRenderer::DistortionRenderer");
+
+  float device_rotation = 0.0;
+
+  if (eds_enabled_) {
+    // Late latch must be on if eds_enabled_ is true.
+    if (!late_latch_enabled) {
+      LOG(ERROR) << "Cannot enable EDS without late latch. "
+                 << "Force enabling late latch.";
+      late_latch_enabled = true;
+    }
+  }
+
+  // TODO(hendrikw): Look into moving this logic into DisplayMetrics.
+  if (hmd.GetDisplayMetrics().IsPortrait()) {
+    device_rotation = -M_PI / 2.0f;
+  }
+
+  // Create shader programs.
+  shaders_[kNoChromaticAberrationCorrection].load(
+      kVertexShaderNoChromaticAberrationString,
+      kFragmentShaderNoChromaticAberrationString, 1, false, device_rotation,
+      flip_texture_horizontally, false);
+  shaders_[kNoChromaticAberrationCorrectionTwoLayers].load(
+      kVertexShaderNoChromaticAberrationString,
+      kFragmentShaderNoChromaticAberrationString, 2, false, device_rotation,
+      flip_texture_horizontally, false);
+  shaders_[kChromaticAberrationCorrection].load(
+      kVertexShaderChromaticAberrationString,
+      kFragmentShaderChromaticAberrationString, 1, false, device_rotation,
+      flip_texture_horizontally, false);
+  shaders_[kChromaticAberrationCorrectionTwoLayers].load(
+      kVertexShaderChromaticAberrationString,
+      kFragmentShaderChromaticAberrationString, 2, false, device_rotation,
+      flip_texture_horizontally, false);
+  shaders_[kChromaticAberrationCorrectionAlphaVignette].load(
+      kVertexShaderChromaticAberrationString,
+      kFragmentShaderChromaticAberrationString, 1, true, device_rotation,
+      flip_texture_horizontally, false);
+  shaders_[kChromaticAberrationCorrectionAlphaVignetteTwoLayers].load(
+      kVertexShaderChromaticAberrationString,
+      kFragmentShaderChromaticAberrationString, 2, true, device_rotation,
+      flip_texture_horizontally, false);
+  shaders_[kChromaticAberrationCorrectionWithBlend].load(
+      kVertexShaderChromaticAberrationString,
+      kFragmentShaderChromaticAberrationString, 1, false, device_rotation,
+      flip_texture_horizontally, true);
+  shaders_[kSimpleVideoQuad].load(
+      kVertexShaderSimpleVideoQuadString,
+      kFragmentShaderSimpleVideoQuadString, 1, false, device_rotation,
+      flip_texture_horizontally, true);
+  CHECK_GL();
+
+  mat4 tex_from_recommended_viewport_matrix[2][2][2];
+  for (int eye = 0; eye < 2; ++eye) {
+    // Near and far plane don't actually matter for the clip_from_eye_matrix
+    // below since it is only used (for EDS) to transform coordinates for
+    // which the Z has been dropped.
+    static const float kNear = 0.1f, kFar = 100.0f;
+    const FieldOfView& fov =
+        (eye == kLeftEye ? hmd.GetEyeFov(kLeftEye) : hmd.GetEyeFov(kRightEye));
+    mat4 c_clip_from_eye_matrix = fov.GetProjectionMatrix(kNear, kFar);
+    mat4 c_eye_from_clip_matrix = c_clip_from_eye_matrix.inverse();
+
+    // Compute tex_from_recommended_viewport_matrix.
+
+    // flip_texture_vertically defines the default flip behavior.
+    // do_flip[0] should be the default, while do_flip[1] should be the
+    // inverse of the default.
+    int do_flip[2] = {flip_texture_vertically ? 1 : 0,
+                      flip_texture_vertically ? 0 : 1};
+    for (int flip = 0; flip < 2; ++flip) {
+      vec2 flip_scale(1.0f, do_flip[flip] ? -1.0f : 1.0f);
+      vec2 flip_offset(0.0f, do_flip[flip] ? 1.0f : 0.0f);
+
+      for (int separate_eye = 0; separate_eye < 2; ++separate_eye) {
+        vec2 viewport_corner_offset = (eye == kLeftEye || separate_eye)
+                                          ? vec2(0.0f, 0.0f)
+                                          : vec2(0.5f, 0.0f);
+        const vec2 txy = viewport_corner_offset + flip_offset;
+        const vec2 scalexy = vec2(separate_eye ? 1.0f : 0.5f, 1.0f);
+        tex_from_recommended_viewport_matrix[eye][flip][separate_eye] =
+            Eigen::Translation3f(vec3(txy.x(), txy.y(), 0.0f)) *
+            Eigen::DiagonalMatrix<float, 3>(vec3(flip_scale.x() * scalexy.x(),
+                                                 flip_scale.y(), scalexy.y()));
+
+        tex_from_eye_matrix_[eye][flip][separate_eye] =
+            tex_from_recommended_viewport_matrix[eye][flip][separate_eye] *
+            kViewportFromClipMatrix * c_clip_from_eye_matrix;
+      }
+    }
+
+    eye_from_viewport_matrix_[eye] =
+        c_eye_from_clip_matrix * kClipFromViewportMatrix;
+  }
+
+  // Create UBO for setting the EDS matrix to identity when EDS is disabled.
+  glGenBuffers(2 * 2 * 2, &uTexFromRecommendedViewportMatrix[0][0][0]);
+  for (int eye = 0; eye < 2; ++eye) {
+    for (int flip = 0; flip < 2; ++flip) {
+      for (int separate_eye = 0; separate_eye < 2; ++separate_eye) {
+        glBindBuffer(
+            GL_UNIFORM_BUFFER,
+            uTexFromRecommendedViewportMatrix[eye][flip][separate_eye]);
+        glBufferData(GL_UNIFORM_BUFFER, sizeof(mat4), 0, GL_STATIC_DRAW);
+        CHECK_GL();
+        mat4* mat = static_cast<mat4*>(glMapBufferRange(
+            GL_UNIFORM_BUFFER, 0, sizeof(mat4), GL_MAP_WRITE_BIT));
+        CHECK_GL();
+        *mat = tex_from_recommended_viewport_matrix[eye][flip][separate_eye];
+        glUnmapBuffer(GL_UNIFORM_BUFFER);
+      }
+    }
+  }
+  glBindBuffer(GL_UNIFORM_BUFFER, 0);
+
+  // Create distortion meshes and associated GL resources.
+  glGenBuffers(2, mesh_vbo_);
+  glGenVertexArrays(2, mesh_vao_);
+  glGenBuffers(2, mesh_ibo_);
+  RecomputeDistortion(hmd);
+
+  SetDisplaySize(display_size);
+
+  if (hmd.GetDisplayMetrics().IsPortrait()) {
+    eye_viewport_origin_[0] =
+        vec2i(0, flip_texture_horizontally ? 0 : display_size_[1] / 2);
+    eye_viewport_origin_[1] =
+        vec2i(0, flip_texture_horizontally ? display_size_[1] / 2 : 0);
+    eye_viewport_size_ = vec2i(display_size_[0], display_size_[1] / 2);
+  } else {
+    eye_viewport_origin_[0] = vec2i(0, 0);
+    eye_viewport_origin_[1] = vec2i(display_size_[0] / 2, 0);
+    eye_viewport_size_ = vec2i(display_size_[0] / 2, display_size_[1]);
+  }
+
+  CHECK_GL();
+}
+
+DistortionRenderer::~DistortionRenderer() {
+  glDeleteBuffers(2 * 2 * 2, &uTexFromRecommendedViewportMatrix[0][0][0]);
+  glDeleteBuffers(2, mesh_vbo_);
+  glDeleteVertexArrays(2, mesh_vao_);
+  glDeleteBuffers(2, mesh_ibo_);
+}
+
+void DistortionRenderer::ApplyDistortionCorrectionToTexture(
+    EyeType eye, const GLuint* texture_ids, const bool* vertical_flip,
+    const bool* separate_eye, const int* late_latch_layer, int num_textures,
+    bool blend_with_previous_layer, bool do_gl_state_prep) {
+  ATRACE_NAME(__PRETTY_FUNCTION__);
+
+  bool use_gl_blend = use_alpha_vignette_ ||
+                      (blend_with_previous_layer && !kUseFramebufferReadback);
+  if (use_gl_blend) {
+    glEnable(GL_BLEND);
+    glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
+  }
+  DrawEye(eye, texture_ids, vertical_flip, separate_eye, late_latch_layer,
+          num_textures, blend_with_previous_layer, do_gl_state_prep);
+  if (use_gl_blend) {
+    glDisable(GL_BLEND);
+  }
+  CHECK_GL();
+}
+
+void DistortionRenderer::DrawVideoQuad(EyeType eye, int layer_i,
+                                       GLuint texture_id,
+                                       const mat4& transform) {
+  shaders_[kSimpleVideoQuad].use();
+
+  shaders_[kSimpleVideoQuad].SetTexFromEyeTransform(
+      tex_from_eye_matrix_[eye][0][1]);
+  shaders_[kSimpleVideoQuad].SetEyeFromViewportTransform(
+      transform * kClipFromViewportMatrix);
+
+  if (eds_enabled_) {
+    // Bind late latch view-projection UBO that is produced by AddEdsLateLatch.
+    late_latch_[layer_i]->BindUniformBuffer(
+        POSE_BINDING, LateLatch::kViewMatrix, eye);
+    CHECK_GL();
+  } else {
+    // When EDS is disabled we just set the matrix here with no pose offset.
+    glBindBufferBase(GL_UNIFORM_BUFFER, POSE_BINDING + layer_i,
+                     uTexFromRecommendedViewportMatrix[eye][0][1]);
+    CHECK_GL();
+  }
+
+  glActiveTexture(GL_TEXTURE0 + SAMPLER_BINDING);
+  glBindTexture(GL_TEXTURE_EXTERNAL_OES, texture_id);
+  CHECK_GL();
+
+  glDrawElements(GL_TRIANGLE_STRIP, mesh_node_[eye].indices.size(),
+                 GL_UNSIGNED_SHORT, nullptr);
+
+  CHECK_GL();
+}
+
+void DistortionRenderer::DoLateLatch(uint32_t target_vsync_count,
+                                     const uint32_t* render_buffer_index,
+                                     const GLuint* render_pose_buffer_objects,
+                                     const bool* vertical_flip,
+                                     const bool* separate_eye,
+                                     int num_textures) {
+  if (eds_enabled_) {
+    LateLatchInput data;
+    memset(&data, 0, sizeof(data));
+    for (int ti = 0; ti < num_textures; ++ti) {
+      if (late_latch_[ti] == nullptr)
+        late_latch_[ti].reset(new LateLatch(false));
+
+      int flip_index = vertical_flip[ti] ? 1 : 0;
+      int separate_eye_i = separate_eye[ti] ? 1 : 0;
+      // Copy data into late latch input struct.
+      for (int eye = 0; eye < 2; ++eye) {
+        data.eds_mat1[eye] =
+            tex_from_eye_matrix_[eye][flip_index][separate_eye_i];
+        data.eds_mat2[eye] = eye_from_viewport_matrix_[eye];
+      }
+      data.pose_index = target_vsync_count & kPoseAsyncBufferIndexMask;
+      data.render_pose_index = render_buffer_index[ti];
+
+      late_latch_[ti]->AddEdsLateLatch(data, render_pose_buffer_objects[ti]);
+    }
+  }
+}
+
+void DistortionRenderer::PrepGlState(EyeType eye) {
+  glViewport(eye_viewport_origin_[eye].x(), eye_viewport_origin_[eye].y(),
+             eye_viewport_size_.x(), eye_viewport_size_.y());
+
+  glBindVertexArray(mesh_vao_[eye]);
+  CHECK_GL();
+
+  glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mesh_ibo_[eye]);
+  CHECK_GL();
+
+  if (!eds_enabled_) {
+    glMemoryBarrier(GL_UNIFORM_BARRIER_BIT);
+  }
+}
+
+void DistortionRenderer::ResetGlState(int num_textures) {
+  glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
+  glBindBuffer(GL_ARRAY_BUFFER, 0);
+  glBindVertexArray(0);
+  if (eds_enabled_) {
+    for (int ti = 0; ti < num_textures; ++ti)
+      glBindBufferBase(GL_UNIFORM_BUFFER, POSE_BINDING + ti, 0);
+  } else {
+    glBindBuffer(GL_UNIFORM_BUFFER, 0);
+  }
+
+  CHECK_GL();
+
+  // Unbind all texture inputs.
+  for (int ti = 0; ti < num_textures; ++ti) {
+    glActiveTexture(GL_TEXTURE0 + SAMPLER_BINDING + ti);
+    glBindTexture(app_texture_target_, 0);
+  }
+  glActiveTexture(GL_TEXTURE0);
+}
+
+void DistortionRenderer::DrawEye(EyeType eye, const GLuint* texture_ids,
+                                 const bool* vertical_flip,
+                                 const bool* separate_eye,
+                                 const int* late_latch_layer, int num_textures,
+                                 bool blend_with_previous_layer,
+                                 bool do_gl_state_prep) {
+  if (do_gl_state_prep)
+    PrepGlState(eye);
+
+  if (num_textures > kMaxLayers) {
+    LOG(ERROR) << "Too many textures for DistortionRenderer";
+    num_textures = kMaxLayers;
+  }
+
+  CHECK(num_textures == 1 || num_textures == 2);
+
+  if (num_textures == 2) {
+    if (chromatic_aberration_correction_enabled_) {
+      if (use_alpha_vignette_) {
+        shader_type_ = kChromaticAberrationCorrectionAlphaVignetteTwoLayers;
+      } else {
+        shader_type_ = kChromaticAberrationCorrectionTwoLayers;
+      }
+    } else {
+      shader_type_ = kNoChromaticAberrationCorrectionTwoLayers;
+    }
+  } else {
+    if (chromatic_aberration_correction_enabled_) {
+      if (blend_with_previous_layer) {
+        shader_type_ = kChromaticAberrationCorrectionWithBlend;
+      } else if (use_alpha_vignette_) {
+        shader_type_ = kChromaticAberrationCorrectionAlphaVignette;
+      } else {
+        shader_type_ = kChromaticAberrationCorrection;
+      }
+    } else {
+      shader_type_ = kNoChromaticAberrationCorrection;
+    }
+  }
+  shaders_[shader_type_].use();
+
+  for (int ti = 0; ti < num_textures; ++ti) {
+    int flip_index = vertical_flip[ti] ? 1 : 0;
+    if (eds_enabled_) {
+      // Bind late latch view-projection UBO that is produced by
+      // AddEdsLateLatch.
+      late_latch_[late_latch_layer[ti]]->BindUniformBuffer(
+          POSE_BINDING + ti, LateLatch::kViewProjMatrix, eye);
+      CHECK_GL();
+    } else {
+      // When EDS is disabled we just set the matrix here with no pose offset.
+      // With app late-latching, we can't know the pose that the app used
+      // because it's in the app's framebuffer.
+      int separate_eye_i = separate_eye[ti] ? 1 : 0;
+      glBindBufferBase(
+          GL_UNIFORM_BUFFER, POSE_BINDING + ti,
+          uTexFromRecommendedViewportMatrix[eye][flip_index][separate_eye_i]);
+      CHECK_GL();
+    }
+
+    glActiveTexture(GL_TEXTURE0 + SAMPLER_BINDING + ti);
+    glBindTexture(app_texture_target_, texture_ids[ti]);
+    CHECK_GL();
+  }
+
+  // Prevents left eye data from bleeding into right eye and vice-versa.
+  vec2 layer_min_max[kMaxLayers];
+  for (int i = 0; i < kMaxLayers; ++i)
+    layer_min_max[i] = vec2(0.0f, 0.0f);
+  for (int ti = 0; ti < num_textures; ++ti) {
+    if (separate_eye[ti]) {
+      layer_min_max[ti] = vec2(0.0f, 1.0f);  // Use the whole texture.
+    } else if (eye == kLeftEye) {
+      layer_min_max[ti] = vec2(0.0f, 0.499f);
+    } else {
+      layer_min_max[ti] = vec2(0.501f, 1.0f);
+    }
+  }
+  // The second layer stores its x min and max in the z,w slots of the vec4.
+  vec4 xTexMinMax(layer_min_max[0].x(), layer_min_max[0].y(),
+                  layer_min_max[1].x(), layer_min_max[1].y());
+
+  glUniform4fv(shaders_[shader_type_].uTexXMinMax, 1, &xTexMinMax[0]);
+  CHECK_GL();
+
+  glDrawElements(GL_TRIANGLE_STRIP, mesh_node_[eye].indices.size(),
+                 GL_UNSIGNED_SHORT, nullptr);
+  CHECK_GL();
+  if (do_gl_state_prep)
+    ResetGlState(num_textures);
+}
+
+void DistortionRenderer::SetDisplaySize(vec2i display_size) {
+  display_size_ = display_size;
+}
+
+void DistortionRenderer::SetEdsEnabled(bool enabled) { eds_enabled_ = enabled; }
+
+void DistortionRenderer::RecomputeDistortion(const CompositeHmd& hmd) {
+  using std::placeholders::_1;
+  using std::placeholders::_2;
+  using std::placeholders::_3;
+  using std::placeholders::_4;
+  DistortionFunction distortion_function =
+      std::bind(&CompositeHmd::ComputeDistortedVertex, &hmd, _1, _2, _3, _4);
+
+  for (int i = 0; i < 2; ++i) {
+    mesh_node_[i] =
+        BuildDistortionMesh(static_cast<EyeType>(i),
+                            distortion_mesh_resolution_, distortion_function);
+
+    glBindVertexArray(mesh_vao_[i]);
+
+    glBindBuffer(GL_ARRAY_BUFFER, mesh_vbo_[i]);
+    glBufferData(GL_ARRAY_BUFFER,
+                 sizeof(EdsVertex) * mesh_node_[i].vertices.size(),
+                 &mesh_node_[i].vertices.front(), GL_STATIC_DRAW);
+
+    glEnableVertexAttribArray(POSITION_ATTR);
+    glEnableVertexAttribArray(VIEWPORT_COORD_R_ATTR);
+    glEnableVertexAttribArray(VIEWPORT_COORD_G_ATTR);
+    glEnableVertexAttribArray(VIEWPORT_COORD_B_ATTR);
+
+    glVertexAttribPointer(
+        POSITION_ATTR, 2, GL_FLOAT, GL_FALSE, sizeof(EdsVertex),
+        reinterpret_cast<void*>(offsetof(EdsVertex, position)));
+
+    glVertexAttribPointer(
+        VIEWPORT_COORD_R_ATTR, 2, GL_FLOAT, GL_FALSE, sizeof(EdsVertex),
+        reinterpret_cast<void*>(offsetof(EdsVertex, red_viewport_coords)));
+
+    glVertexAttribPointer(
+        VIEWPORT_COORD_G_ATTR, 2, GL_FLOAT, GL_FALSE, sizeof(EdsVertex),
+        reinterpret_cast<void*>(offsetof(EdsVertex, green_viewport_coords)));
+
+    glVertexAttribPointer(
+        VIEWPORT_COORD_B_ATTR, 2, GL_FLOAT, GL_FALSE, sizeof(EdsVertex),
+        reinterpret_cast<void*>(offsetof(EdsVertex, blue_viewport_coords)));
+
+    glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, mesh_ibo_[i]);
+    glBufferData(GL_ELEMENT_ARRAY_BUFFER,
+                 sizeof(uint16_t) * mesh_node_[i].indices.size(),
+                 &mesh_node_[i].indices.front(), GL_STATIC_DRAW);
+    CHECK_GL();
+  }
+  glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
+  glBindBuffer(GL_ARRAY_BUFFER, 0);
+
+  glBindVertexArray(0);
+}
+
+bool DistortionRenderer::GetLastEdsPose(LateLatchOutput* out_data, int layer_id) const {
+  if (layer_id >= kMaxLayers) {
+    LOG(ERROR) << "Accessing invalid layer " << layer_id << std::endl;
+    return false;
+  }
+
+  if (late_latch_[layer_id] != nullptr) {
+    late_latch_[layer_id]->CaptureOutputData(out_data);
+    return true;
+  } else {
+    LOG(ERROR) << "Late latch shader not enabled." << std::endl;
+    return false;
+  }
+}
+
+}  // namespace dvr
+}  // namespace android
diff --git a/libs/vr/libeds/eds.cpp b/libs/vr/libeds/eds.cpp
new file mode 100644
index 0000000..8af5b27
--- /dev/null
+++ b/libs/vr/libeds/eds.cpp
@@ -0,0 +1,35 @@
+#include <dvr/eds.h>
+
+#include <private/dvr/graphics/vr_gl_extensions.h>
+#include <private/dvr/late_latch.h>
+#include <private/dvr/types.h>
+
+// TODO(jbates) delete this file and eds.h
+
+extern "C" int dvrEdsInit(bool with_late_latch) { return 0; }
+
+extern "C" void dvrEdsDeinit() {}
+
+extern "C" int dvrEdsCapturePoseAsync(int eye, uint32_t target_vsync_count,
+                                      const float* projection_matrix,
+                                      const float* eye_from_head_matrix,
+                                      const float* pose_offset_matrix) {
+  return 0;
+}
+
+extern "C" int dvrEdsBindPose(int eye, uint32_t ubo_binding, intptr_t offset,
+                              ssize_t size) {
+  return 0;
+}
+
+extern "C" int dvrEdsBlitPose(int eye, int viewport_width,
+                              int viewport_height) {
+  return 0;
+}
+
+extern "C" int dvrEdsBlitPoseFromCpu(int eye, int viewport_width,
+                                     int viewport_height,
+                                     const float* pose_quaternion,
+                                     const float* pose_position) {
+  return 0;
+}
diff --git a/libs/vr/libeds/eds_mesh.cpp b/libs/vr/libeds/eds_mesh.cpp
new file mode 100644
index 0000000..2c7dc2f
--- /dev/null
+++ b/libs/vr/libeds/eds_mesh.cpp
@@ -0,0 +1,136 @@
+#include "include/private/dvr/eds_mesh.h"
+
+#include <math.h>
+
+#include <base/logging.h>
+#include <private/dvr/types.h>
+
+namespace {
+
+using android::dvr::EdsVertex;
+using android::dvr::EyeType;
+using android::dvr::DistortionFunction;
+using android::dvr::vec2;
+
+// Computes the vertices for a distortion mesh with resolution |resolution| and
+// distortion provided by |hmd| and stores them in |vertices|.
+static void ComputeDistortionMeshVertices(
+    EdsVertex* vertices, int resolution,
+    const DistortionFunction& distortion_function, EyeType eye) {
+  for (int row = 0; row < resolution; row++) {
+    for (int col = 0; col < resolution; col++) {
+      const float x_norm =
+          static_cast<float>(col) / (static_cast<float>(resolution - 1U));
+      const float y_norm =
+          static_cast<float>(row) / (static_cast<float>(resolution - 1U));
+
+      const vec2 xy_norm(x_norm, y_norm);
+      const size_t index = col * resolution + row;
+
+      // Evaluate distortion function to get the new coordinates for each color
+      // channel. The distortion function returns the new coordinates relative
+      // to a full viewport with 0 <= x <= 1 for each eye.
+      vec2 coords[3];
+      distortion_function(eye, xy_norm, &vertices[index].position, coords);
+
+      // Store distortion mapping in texture coordinates.
+      vertices[index].red_viewport_coords = coords[0];
+      vertices[index].green_viewport_coords = coords[1];
+      vertices[index].blue_viewport_coords = coords[2];
+    }
+  }
+}
+
+// Computes the triangle strip indices for a distortion mesh with resolution
+// |resolution| and stores them in |indices|.
+static void ComputeDistortionMeshIndices(uint16_t* indices, int resolution) {
+  // The following strip method has been used in the Cardboard SDK
+  // (java/com/google/vrtoolkit/cardboard/DistortionRenderer.java) and has
+  // originally been described at:
+  //
+  // http://dan.lecocq.us/wordpress/2009/12/25/triangle-strip-for-grids-a-construction/
+  //
+  // For a grid with 4 rows and 4 columns of vertices, the strip would
+  // look like:
+  //                             ↻
+  //         0    -    4    -    8    -   12
+  //         ↓    ↗    ↓    ↗    ↓    ↗    ↓
+  //         1    -    5    -    9    -   13
+  //         ↓    ↖    ↓    ↖    ↓    ↖    ↓
+  //         2    -    6    -   10    -   14
+  //         ↓    ↗    ↓    ↗    ↓    ↗    ↓
+  //         3    -    7    -   11    -   15
+  //                   ↺
+  //
+  // Note the little circular arrows next to 7 and 8 that indicate
+  // repeating that vertex once so as to produce degenerate triangles.
+  //
+  // To facilitate scanline racing, the vertex order is left to right.
+
+  int16_t index_offset = 0;
+  int16_t vertex_offset = 0;
+  for (int row = 0; row < resolution - 1; ++row) {
+    if (row > 0) {
+      indices[index_offset] = indices[index_offset - 1];
+      ++index_offset;
+    }
+    for (int col = 0; col < resolution; ++col) {
+      if (col > 0) {
+        if (row % 2 == 0) {
+          // Move right on even rows.
+          ++vertex_offset;
+        } else {
+          --vertex_offset;
+        }
+      }
+      // A cast to uint16_t is safe here as |vertex_offset| will not drop below
+      // zero in this loop. As col is initially equal to zero |vertex_offset| is
+      // always incremented before being decremented, is initialized to zero and
+      // is only incremented outside of the loop.
+      indices[index_offset++] = static_cast<uint16_t>(vertex_offset);
+      indices[index_offset++] = static_cast<uint16_t>(
+          vertex_offset + static_cast<int16_t>(resolution));
+    }
+    vertex_offset =
+        static_cast<int16_t>(static_cast<int>(resolution) + vertex_offset);
+  }
+}
+
+}  // anonymous namespace
+
+namespace android {
+namespace dvr {
+
+// Builds a distortion mesh of resolution |resolution| using the distortion
+// provided by |hmd| for |eye|.
+EdsMesh BuildDistortionMesh(EyeType eye, int resolution,
+                            const DistortionFunction& distortion_function) {
+  CHECK_GT(resolution, 2);
+
+  // Number of indices produced by the strip method
+  // (see comment in ComputeDistortionMeshIndices):
+  //
+  //     1 vertex per triangle
+  //     2 triangles per quad, (rows - 1) * (cols - 1) quads
+  //     2 vertices at the start of each row for the first triangle
+  //     1 extra vertex per row (except first and last) for a
+  //       degenerate triangle
+  //
+  const uint16_t index_count =
+      static_cast<uint16_t>(resolution * (2 * resolution - 1U) - 2U);
+  const uint16_t vertex_count = static_cast<uint16_t>(resolution * resolution);
+
+  EdsMesh mesh;
+  mesh.vertices.resize(vertex_count);
+  mesh.indices.resize(index_count);
+
+  // Populate vertex and index buffer.
+  ComputeDistortionMeshVertices(&mesh.vertices[0], resolution,
+                                distortion_function, eye);
+  ComputeDistortionMeshIndices(&mesh.indices[0], resolution);
+
+  return mesh;
+}
+
+}  // namespace dvr
+}  // namespace android
diff --git a/libs/vr/libeds/include/CPPLINT.cfg b/libs/vr/libeds/include/CPPLINT.cfg
new file mode 100644
index 0000000..2f8a3c0
--- /dev/null
+++ b/libs/vr/libeds/include/CPPLINT.cfg
@@ -0,0 +1 @@
+filter=-build/header_guard
diff --git a/libs/vr/libeds/include/dvr/eds.h b/libs/vr/libeds/include/dvr/eds.h
new file mode 100644
index 0000000..37b1297
--- /dev/null
+++ b/libs/vr/libeds/include/dvr/eds.h
@@ -0,0 +1,150 @@
+#ifndef ANDROID_DVR_EDS_H_
+#define ANDROID_DVR_EDS_H_
+
+#include <stdbool.h>
+#include <stdint.h>
+#include <sys/cdefs.h>
+#include <sys/types.h>
+
+__BEGIN_DECLS
+
+// This struct aligns with GLSL uniform blocks with std140 layout.
+// std140 allows padding between certain types, so padding must be explicitly
+// added as struct members.
+struct __attribute__((__packed__)) DvrLateLatchData {
+  // Column-major order.
+  float view_proj_matrix[16];
+  // Column-major order.
+  float view_matrix[16];
+  float pose_quaternion[4];
+  float pose_position[4];
+};
+
+//
+// These APIs are not thread safe and must be called on a single thread with an
+// actively bound GL context corresponding to a display surface.
+//
+
+// Prepares EDS and Late Latching system. Idempotent if called more than once.
+// The target GL context must be created and bound.
+//
+// If |with_late_latch| is true, a thread will be created that asynchronously
+// updates the pose in memory.
+//
+// The following GL states are modified as follows:
+// glBindBuffer(GL_ARRAY_BUFFER, 0);
+// glBindBuffer(GL_UNIFORM_BUFFER, 0);
+//
+// Returns 0 on success, negative error code on failure.
+// Check GL errors with glGetError for other error conditions.
+int dvrEdsInit(bool with_late_latch);
+
+// Stops and destroys the EDS Late Latching system.
+void dvrEdsDeinit();
+
+// Submits GL draw command that will capture the latest head pose into a uniform
+// buffer object. This should be called twice per frame, before the app begins
+// drawing for each eye.
+// For each eye, a later call to dvrEdsBlitPose will write this pose into
+// the application framebuffer corner so that the EDS service knows what pose
+// the frame was rendered with.
+//
+// |eye| is 0 for left eye and 1 for right eye.
+//
+// The following GL states are modified as follows:
+// glUseProgram(0);
+// glBindBuffer(GL_UNIFORM_BUFFER, 0);
+// glBindBufferBase(GL_TRANSFORM_FEEDBACK_BUFFER, 0, id);
+// glDisable(GL_RASTERIZER_DISCARD);
+//
+// Returns 0 on success, negative error code on failure:
+//   EPERM - dvrEdsInit(true) was not called.
+// Check GL errors with glGetError for other error conditions.
+int dvrEdsCapturePoseAsync(int eye, uint32_t target_vsync_count,
+                           const float* projection_matrix,
+                           const float* eye_from_head_matrix,
+                           const float* pose_offset_matrix);
+
+// Binds the late-latch output data as a GL_UNIFORM_BUFFER so that your vertex
+// shaders can use the latest head pose. For example, to bind just the
+// view_matrix from the output:
+//
+// dvrEdsBindPose(eye, BINDING,
+//                       offsetof(DvrLateLatchData, view_matrix),
+//                       sizeof(DvrLateLatchData::view_matrix));
+//
+// Or more commonly, bind the view projection matrix:
+//
+// dvrEdsBindPose(eye, BINDING,
+//                       offsetof(DvrLateLatchData, view_proj_matrix),
+//                       sizeof(DvrLateLatchData::view_proj_matrix));
+//
+// BINDING in the above examples is the binding location of the uniform
+// interface block in the GLSL shader.
+//
+// Shader example (3 would be the |ubo_binding| passed to this function):
+//  layout(binding = 3, std140) uniform LateLatchData {
+//    mat4 uViewProjection;
+//  };
+//
+// |eye| is 0 for left eye and 1 for right eye.
+//
+// The following GL states are modified as follows:
+// glBindBuffer(GL_UNIFORM_BUFFER, ...);
+// glBindBufferRange(GL_UNIFORM_BUFFER, ...);
+//
+// To clear the binding, call glBindBuffer(GL_UNIFORM_BUFFER, 0);
+//
+// Returns 0 on success, negative error code on failure:
+//   EPERM - dvrEdsInit(true) was not called.
+// Check GL errors with glGetError for other error conditions.
+int dvrEdsBindPose(int eye, uint32_t ubo_binding, intptr_t offset,
+                   ssize_t size);
+
+// DEPRECATED
+//
+// Blits the pose captured previously into the currently bound framebuffer.
+// The current framebuffer is assumed to be the default framebuffer 0, the
+// surface that will be sent to the display and have EDS and lens warp applied
+// to it.
+//
+// |eye| is 0 for left eye and 1 for right eye.
+// |viewport_width| is the width of the viewport for this eye, which is
+//                  usually half the width of the framebuffer.
+// |viewport_height| is the height of the viewport for this eye, which is
+//                   usually the height of the framebuffer.
+//
+// The following GL states are modified as follows:
+// glUseProgram(0);
+// glBindBuffer(GL_UNIFORM_BUFFER, 0);
+// glBindBufferRange(GL_UNIFORM_BUFFER, 23, ...);
+//
+// Returns 0 on success, negative error code on failure:
+//   EPERM - dvrEdsInit was not called.
+// Check GL errors with glGetError for other error conditions.
+int dvrEdsBlitPose(int eye, int viewport_width, int viewport_height);
+
+// DEPRECATED
+//
+// Same as dvrEdsBlitPose except that the pose is provided as an
+// parameter instead of getting it from dvrEdsBindPose. This is for
+// applications that want EDS but do not want late-latching.
+//
+// |pose_quaternion| should point to 4 floats that represent a quaternion.
+// |pose_position| should point to 3 floats that represent x,y,z position.
+//
+// GL states are modified as follows:
+// glUseProgram(0);
+// glBindBuffer(GL_UNIFORM_BUFFER, 0);
+// glBindBufferBase(GL_UNIFORM_BUFFER, 23, ...);
+//
+// Returns 0 on success, negative error code on failure:
+//   EPERM - dvrEdsInit was not called.
+// Check GL errors with glGetError for other error conditions.
+int dvrEdsBlitPoseFromCpu(int eye, int viewport_width, int viewport_height,
+                          const float* pose_quaternion,
+                          const float* pose_position);
+
+__END_DECLS
+
+#endif  // ANDROID_DVR_EDS_H_
diff --git a/libs/vr/libeds/include/private/dvr/color_channel_distortion.h b/libs/vr/libeds/include/private/dvr/color_channel_distortion.h
new file mode 100644
index 0000000..4e612cd
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/color_channel_distortion.h
@@ -0,0 +1,30 @@
+#ifndef ANDROID_DVR_COLOR_CHANNEL_DISTORTION_H_
+#define ANDROID_DVR_COLOR_CHANNEL_DISTORTION_H_
+
+#include <private/dvr/types.h>
+
+namespace android {
+namespace dvr {
+
+// ColorChannelDistortion encapsulates the way one color channel (wavelength)
+// is distorted optically when an image is viewed through a lens.
+class ColorChannelDistortion {
+ public:
+  virtual ~ColorChannelDistortion() {}
+
+  // Given a 2d point p, returns the corresponding distorted point.
+  // The units of both the input and output points are tan-angle units,
+  // which can be computed as the distance on the screen divided by
+  // distance from the virtual eye to the screen.  For both the input
+  // and output points, the intersection of the optical axis of the lens
+  // with the screen defines the origin, the x axis points right, and
+  // the y axis points up.
+  virtual vec2 Distort(vec2 p) const = 0;
+
+  virtual vec2 DistortInverse(vec2 p) const = 0;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_COLOR_CHANNEL_DISTORTION_H_
diff --git a/libs/vr/libeds/include/private/dvr/composite_hmd.h b/libs/vr/libeds/include/private/dvr/composite_hmd.h
new file mode 100644
index 0000000..70727e0
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/composite_hmd.h
@@ -0,0 +1,89 @@
+#ifndef ANDROID_DVR_COMPOSITE_HMD_H_
+#define ANDROID_DVR_COMPOSITE_HMD_H_
+
+#include <private/dvr/display_metrics.h>
+#include <private/dvr/head_mount_metrics.h>
+#include <private/dvr/types.h>
+
+namespace android {
+namespace dvr {
+
+// An intermediate structure composed of a head mount (described by
+// HeadMountMetrics) and a display (described by DisplayMetrics).
+class CompositeHmd {
+ public:
+  // Constructs a new CompositeHmd given a HeadMountMetrics and a
+  // DisplayMetrics.
+  CompositeHmd(const HeadMountMetrics& head_mount_metrics,
+               const DisplayMetrics& display_metrics);
+
+  CompositeHmd(CompositeHmd&& composite_hmd) = delete;
+  CompositeHmd(const CompositeHmd& composite_hmd) = delete;
+  CompositeHmd& operator=(CompositeHmd&& composite_hmd) = delete;
+  CompositeHmd& operator=(const CompositeHmd& composite_hmd) = delete;
+
+  // Headset metadata.
+  float GetTargetFrameDuration() const;
+  void ComputeDistortedVertex(EyeType eye, vec2 uv_in, vec2* vertex_out,
+                              vec2* uv_out) const;
+
+  // Eye-unspecific view accessors.
+  vec2i GetRecommendedRenderTargetSize() const;
+  Range2i GetDisplayRange() const;
+
+  // Eye-specific view accessors.
+  mat4 GetEyeFromHeadMatrix(EyeType eye) const;
+  FieldOfView GetEyeFov(EyeType eye) const;
+  Range2i GetEyeViewportBounds(EyeType eye) const;
+
+  // Set HeadMountMetrics and recompute everything that depends on
+  // HeadMountMetrics.
+  void SetHeadMountMetrics(const HeadMountMetrics& head_mount_metrics);
+
+  // Returns a reference to the |head_mount_metrics_| member.
+  const HeadMountMetrics& GetHeadMountMetrics() const;
+
+  // Set DisplayMetrics and recompute everything that depends on DisplayMetrics.
+  void SetDisplayMetrics(const DisplayMetrics& display_metrics);
+
+  // Returns a reference to the current display metrics.
+  const DisplayMetrics& GetDisplayMetrics() const;
+
+  // Compute the distorted point for a single channel.
+  vec2 ComputeDistortedPoint(EyeType eye, vec2 position,
+                             RgbColorChannel channel) const;
+
+  // Compute the inverse distorted point for a single channel.
+  vec2 ComputeInverseDistortedPoint(EyeType eye, vec2 position,
+                                    RgbColorChannel channel) const;
+
+ private:
+  FieldOfView eye_fov_[2];
+  Range2i eye_viewport_range_[2];
+  mat4 eye_from_head_matrix_[2];
+  Range2i display_range_;
+  vec2i recommended_render_target_size_;
+
+  // Per-eye scale and translation to convert from normalized Screen Space
+  // ([0:1]x[0:1]) to tan-angle space.
+  mat3 eye_tan_angle_from_norm_screen_matrix_[2];
+  mat3 eye_tan_angle_from_norm_screen_inv_matrix_[2];
+
+  // Per-eye scale and translation to convert from tan-angle space to normalized
+  // Texture Space ([0:1]x[0:1]).
+  mat3 eye_norm_texture_from_tan_angle_matrix_[2];
+  mat3 eye_norm_texture_from_tan_angle_inv_matrix_[2];
+
+  HeadMountMetrics head_mount_metrics_;
+  DisplayMetrics display_metrics_;
+
+  // Called by SetHeadMountMetrics/SetDisplayMetrics after metrics get changed.
+  // This function will update head_mount_metrics_/display_metrics_ based on the
+  // metrics supplied in the above two methods.
+  void MetricsChanged();
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_COMPOSITE_HMD_H_
diff --git a/libs/vr/libeds/include/private/dvr/cpu_thread_pose_updater.h b/libs/vr/libeds/include/private/dvr/cpu_thread_pose_updater.h
new file mode 100644
index 0000000..6a2c8a6
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/cpu_thread_pose_updater.h
@@ -0,0 +1,48 @@
+#ifndef ANDROID_DVR_CPU_THREAD_POSE_UPDATER_H_
+#define ANDROID_DVR_CPU_THREAD_POSE_UPDATER_H_
+
+#include <atomic>
+#include <thread>
+
+#include <private/dvr/lucid_pose_tracker.h>
+#include <private/dvr/raw_pose.h>
+
+namespace android {
+namespace dvr {
+
+// Temporary version of pose updater that uses a CPU thread to update
+// the pose buffer. Note that this thread starts and runs indefinitely
+class CpuThreadPoseUpdater {
+ public:
+  CpuThreadPoseUpdater();
+  ~CpuThreadPoseUpdater();
+
+  // Start the thread to update the given buffer with the given number of
+  // microseconds between updates.
+  void Start(volatile RawPosePair* mapped_pose_buffer, int period_us);
+
+  void StopAndJoin();
+
+ private:
+  void UpdateThread();
+
+  volatile RawPosePair* mapped_pose_buffer_;
+
+  // Pose update thread.
+  std::thread update_thread_;
+
+  volatile bool stop_request_;
+
+  // Update period in microseconds.
+  int update_period_us_;
+
+  // Current pose count, used to avoid writing to the same buffer that is being
+  // read by the GPU.
+  uint32_t count_;
+  LucidPoseTracker pose_tracker_;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_CPU_THREAD_POSE_UPDATER_H_
diff --git a/libs/vr/libeds/include/private/dvr/display_metrics.h b/libs/vr/libeds/include/private/dvr/display_metrics.h
new file mode 100644
index 0000000..87d9d04
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/display_metrics.h
@@ -0,0 +1,79 @@
+#ifndef ANDROID_DVR_DISPLAY_METRICS_H_
+#define ANDROID_DVR_DISPLAY_METRICS_H_
+
+#include <private/dvr/types.h>
+
+namespace android {
+namespace dvr {
+
+enum class DisplayOrientation { kPortrait, kLandscape };
+
+// DisplayMetrics encapsulates metrics describing a display to be used
+// with a head mount to create a head mounted display.
+class DisplayMetrics {
+ public:
+  DisplayMetrics();
+  // Constructs a DisplayMetrics given a display size in pixels,
+  // meters per pixel, border size in meters, and frame duration in
+  // seconds.
+  //
+  // size_pixels The size of the display in pixels.
+  // meters_per_pixel The meters per pixel in each dimension.
+  // border_size_meters The size of the border around the display
+  //     in meters.  When the device sits on a surface in the proper
+  //     orientation this is the distance from the surface to the edge
+  //     of the display.
+  // frame_duration_seconds The duration in seconds of each frame
+  //     (i.e., 1 / framerate).
+  DisplayMetrics(vec2i size_pixels, vec2 meters_per_pixel,
+                 float border_size_meters, float frame_duration_seconds,
+                 DisplayOrientation orientation);
+
+  // Gets the size of the display in physical pixels (not logical pixels).
+  vec2i GetSizePixels() const { return size_pixels_; }
+
+  DisplayOrientation GetOrientation() const { return orientation_; }
+  bool IsPortrait() const {
+    return orientation_ == DisplayOrientation::kPortrait;
+  }
+
+  // Gets the size of the display in meters.
+  vec2 GetSizeMeters() const {
+    return vec2(static_cast<float>(size_pixels_[0]),
+                static_cast<float>(size_pixels_[1]))
+               .array() *
+           meters_per_pixel_.array();
+  }
+
+  // Gets the meters per pixel.
+  vec2 GetMetersPerPixel() const { return meters_per_pixel_; }
+
+  // Gets the size of the border around the display.
+  // For a phone in landscape position this would be the distance from
+  // the bottom the edge of the phone to the bottom of the screen.
+  float GetBorderSizeMeters() const { return border_size_meters_; }
+
+  // Gets the frame duration in seconds for the display.
+  float GetFrameDurationSeconds() const { return frame_duration_seconds_; }
+
+  // Toggles the orientation and swaps all of the settings such that the
+  // display is being held in the other orientation.
+  void ToggleOrientation();
+
+  // Override the meters per pixel.
+  void SetMetersPerPixel(const vec2& meters_per_pixel) {
+    meters_per_pixel_ = meters_per_pixel;
+  }
+
+ private:
+  vec2i size_pixels_;
+  vec2 meters_per_pixel_;
+  float border_size_meters_;
+  float frame_duration_seconds_;
+  DisplayOrientation orientation_;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_DISPLAY_METRICS_H_
diff --git a/libs/vr/libeds/include/private/dvr/distortion_renderer.h b/libs/vr/libeds/include/private/dvr/distortion_renderer.h
new file mode 100644
index 0000000..e1c8114
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/distortion_renderer.h
@@ -0,0 +1,233 @@
+#ifndef ANDROID_DVR_DISTORTION_RENDERER_H_
+#define ANDROID_DVR_DISTORTION_RENDERER_H_
+
+#include <EGL/egl.h>
+#include <GLES2/gl2.h>
+#include <array>
+#include <functional>
+
+#include <private/dvr/eds_mesh.h>
+#include <private/dvr/graphics/shader_program.h>
+#include <private/dvr/late_latch.h>
+#include <private/dvr/lucid_pose_tracker.h>
+#include <private/dvr/render_texture_params.h>
+#include <private/dvr/types.h>
+
+namespace android {
+namespace dvr {
+
+class CompositeHmd;
+
+// Encapsulates the rendering operations to correct for the HMD's lens
+// distortion.
+class DistortionRenderer {
+ public:
+  static constexpr int kMaxLayers = 2;
+  static constexpr int kMaxLatchedLayers = 4;
+
+  static const mat4 kViewportFromClipMatrix;
+  static const mat4 kClipFromViewportMatrix;
+
+  // Creates a distortion renderer for distortion function.
+  //
+  // distortion_function the black-box distortion function to apply.
+  // display_size the resolution of the output of the distortion renderer.
+  // distortion_mesh_resolution the amount of subdivision in the
+  //     distortion mesh.
+  DistortionRenderer(const CompositeHmd& hmd, vec2i display_size,
+                     int distortion_mesh_resolution,
+                     bool flip_texture_horizontally,
+                     bool flip_texture_vertically, bool separated_eye_buffers,
+                     bool eds_enabled, bool late_latch_enabled);
+  ~DistortionRenderer();
+
+  // Returns the distortion factor array for the distortion function that was
+  // passed in at creation time. The distortion factor array contains the
+  // magnification factor induced by the distortion mesh at every vertex. There
+  // is one entry per vertex, and entries are ordered in row-major major. The
+  // array contains the magnification for both eyes averaged.
+  const std::vector<float>& GetDistortionFactorArray();
+
+  // |render_pose_buffer_object| is the per-texture pose array buffer object.
+  // |render_buffer_index| is the per-texture index into the pose array buffer
+  //                       object. This selects which pose was rendered into the
+  //                       corresponding texture.
+  void DoLateLatch(uint32_t target_vsync_count,
+                   const uint32_t* render_buffer_index,
+                   const GLuint* render_pose_buffer_objects,
+                   const bool* vertical_flip, const bool* separate_eye,
+                   int num_textures);
+
+  // Convenience method that does no flipping.
+  void DoLateLatch(uint32_t target_vsync_count,
+                   const uint32_t* render_buffer_index,
+                   const GLuint* render_pose_buffer_objects, int num_textures) {
+    bool flip[kMaxLayers] = {false};
+    bool separate[kMaxLayers] = {separated_eye_buffers_};
+    DoLateLatch(target_vsync_count, render_buffer_index,
+                render_pose_buffer_objects, flip, separate, num_textures);
+  }
+
+  void PrepGlState(EyeType eye);
+  void ResetGlState(int num_textures);
+
+  // Applies distortion correction to the given textures by rendering into the
+  // current output target.
+  //
+  // eye Which eye is being corrected.
+  // texture_ids The OpenGL texture IDs of the texture layers.
+  // texture_sizes Dimensions of the corresponding textures.
+  // vertical_flip Whether to flip each input texture vertically.
+  // separate_eye Whether the correspending texture is a separate texture for
+  //              left and right eyes. If false, it is a shared texture with
+  //              the left view on the left half and right on the right half.
+  // late_latch_layer Which late latch layer index to use for each texture.
+  //     Typically this is just {0, 1} unless blend_with_previous_layer is used.
+  // num_textures Number of textures in texture_ids and texture_sizes.
+  // blend_with_previous_layer If enabled, blend this single layer with the
+  //     existing framebuffer contents.
+  void ApplyDistortionCorrectionToTexture(
+      EyeType eye, const GLuint* texture_ids, const bool* vertical_flip,
+      const bool* separate_eye, const int* late_latch_layer, int num_textures,
+      bool blend_with_previous_layer, bool do_gl_state_prep);
+
+  // Convenience method that does no flipping.
+  void ApplyDistortionCorrectionToTexture(EyeType eye,
+                                          const GLuint* texture_ids,
+                                          int num_textures) {
+    bool flip[kMaxLayers] = {false};
+    bool separate[kMaxLayers] = {separated_eye_buffers_,
+                                 separated_eye_buffers_};
+    int latch_layer[kMaxLayers] = {0, 1};
+    ApplyDistortionCorrectionToTexture(eye, texture_ids, flip, separate,
+                                       latch_layer, num_textures, false, true);
+  }
+
+  // Draw a video quad based on the given video texture by rendering into the
+  // current output target.
+  //
+  // eye Which eye is being corrected.
+  // layer_id Which compositor layer the video mesh should be drawn into.
+  // texture_ids The OpenGL texture IDs of the texture layers.
+  // transform The transformation matrix that transforms the video mesh to its
+  //           desired eye space position for the target eye.
+  void DrawVideoQuad(EyeType eye, int layer_id, GLuint texture_id,
+                     const mat4& transform);
+
+  // Modifies the size of the output display. This is the number of physical
+  // pixels per dimension covered by the display on the output device. Calling
+  // this method is cheap; it only updates the state table of the two
+  // eye-specific mesh nodes.
+  void SetDisplaySize(vec2i size);
+
+  void SetEdsEnabled(bool enabled);
+  void SetChromaticAberrationCorrectionEnabled(bool enabled) {
+    chromatic_aberration_correction_enabled_ = enabled;
+  }
+  void SetUseAlphaVignette(bool enabled) { use_alpha_vignette_ = enabled; }
+
+  bool GetLastEdsPose(LateLatchOutput* out_data, int layer_id = 0) const;
+
+ private:
+  enum ShaderProgramType {
+    kNoChromaticAberrationCorrection,
+    kNoChromaticAberrationCorrectionTwoLayers,
+    kChromaticAberrationCorrection,
+    kChromaticAberrationCorrectionTwoLayers,
+    kChromaticAberrationCorrectionAlphaVignette,
+    kChromaticAberrationCorrectionAlphaVignetteTwoLayers,
+    kChromaticAberrationCorrectionWithBlend,
+    kSimpleVideoQuad,
+    kNumShaderPrograms,
+  };
+
+  struct EdsShader {
+    EdsShader() {}
+    ~EdsShader() {
+    }
+
+    void load(const char* vertex, const char* fragment, int num_layers,
+              bool use_alpha_vignette, float rotation, bool flip_vertical,
+              bool blend_with_previous_layer);
+    void use() { pgm.Use(); }
+
+    // Update uTexFromEyeMatrix and uEyeFromViewportMatrix by the distortion
+    // renderer with the transform matrix.
+    void SetTexFromEyeTransform(const mat4& transform) {
+      glUniformMatrix4fv(uTexFromEyeMatrix, 1, false, transform.data());
+    }
+
+    void SetEyeFromViewportTransform(const mat4& transform) {
+      glUniformMatrix4fv(uEyeFromViewportMatrix, 1, false, transform.data());
+    }
+
+    ShaderProgram pgm;
+
+    // Texture variables, named to match shader strings for convenience.
+    GLint uProjectionMatrix;
+    GLint uTexFromEyeMatrix;
+    GLint uEyeFromViewportMatrix;
+    GLint uTexXMinMax;
+  };
+
+  void DrawEye(EyeType eye, const GLuint* texture_ids,
+               const bool* vertical_flip, const bool* separate_eye,
+               const int* late_latch_layer, int num_textures,
+               bool blend_with_previous_layer, bool do_gl_state_prep);
+
+  // This function is called when there is an update on Hmd and distortion mesh
+  // vertices and factor array will be updated.
+  void RecomputeDistortion(const CompositeHmd& hmd);
+
+  // Per-eye, per flip, per separate eye mode buffers for setting EDS matrix
+  // when EDS is disabled.
+  GLuint uTexFromRecommendedViewportMatrix[2][2][2];
+
+  // Distortion mesh for the each eye.
+  EdsMesh mesh_node_[2];
+  // VBO (vertex buffer object) for distortion mesh vertices.
+  GLuint mesh_vbo_[2];
+  // VAO (vertex array object) for distortion mesh vertex array data.
+  GLuint mesh_vao_[2];
+  // IBO (index buffer object) for distortion mesh indices.
+  GLuint mesh_ibo_[2];
+
+  EdsShader shaders_[kNumShaderPrograms];
+
+  // Enum to indicate which shader program is being used.
+  ShaderProgramType shader_type_;
+
+  bool eds_enabled_;
+  bool chromatic_aberration_correction_enabled_;
+  bool use_alpha_vignette_;
+
+  // This keeps track of what distortion mesh resolution we are using currently.
+  // When there is an update on Hmd, the distortion mesh vertices/factor array
+  // will be re-computed with the old resolution that is stored here.
+  int distortion_mesh_resolution_;
+
+  // The OpenGL ID of the last texture passed to
+  // ApplyDistortionCorrectionToTexture().
+  GLuint last_distortion_texture_id_;
+
+  // GL texture 2D target for application texture.
+  GLint app_texture_target_;
+
+  // Precomputed matrices for EDS and viewport transforms.
+  mat4 tex_from_eye_matrix_[2][2][2];
+  mat4 eye_from_viewport_matrix_[2];
+
+  // Eye viewport locations.
+  vec2i eye_viewport_origin_[2];
+  vec2i eye_viewport_size_;
+
+  vec2i display_size_;
+
+  std::unique_ptr<LateLatch> late_latch_[kMaxLatchedLayers];
+  bool separated_eye_buffers_;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_DISTORTION_RENDERER_H_
diff --git a/libs/vr/libeds/include/private/dvr/eds_mesh.h b/libs/vr/libeds/include/private/dvr/eds_mesh.h
new file mode 100644
index 0000000..d2c901e
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/eds_mesh.h
@@ -0,0 +1,38 @@
+#ifndef ANDROID_DVR_EDS_MESH_H_
+#define ANDROID_DVR_EDS_MESH_H_
+
+#include <stdint.h>
+#include <functional>
+#include <vector>
+
+#include <private/dvr/types.h>
+
+namespace android {
+namespace dvr {
+
+struct EdsVertex {
+  vec2 position;
+  vec2 red_viewport_coords;
+  vec2 green_viewport_coords;
+  vec2 blue_viewport_coords;
+};
+
+struct EdsMesh {
+  std::vector<EdsVertex> vertices;
+  std::vector<uint16_t> indices;
+};
+
+// Distortion function takes in a point in the range [0..1, 0..1] and returns
+// the vertex position and the three distorted points for separate R, G and B
+// channels.
+typedef std::function<void(EyeType, vec2, vec2*, vec2*)> DistortionFunction;
+
+// Builds a distortion mesh of resolution |resolution| using
+// the distortion provided by |hmd| for |eye|.
+EdsMesh BuildDistortionMesh(EyeType eye, int resolution,
+                            const DistortionFunction& distortion_function);
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_EDS_MESH_H_
diff --git a/libs/vr/libeds/include/private/dvr/head_mount_metrics.h b/libs/vr/libeds/include/private/dvr/head_mount_metrics.h
new file mode 100644
index 0000000..f3e63a6
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/head_mount_metrics.h
@@ -0,0 +1,134 @@
+#ifndef ANDROID_DVR_HEAD_MOUNT_METRICS_H_
+#define ANDROID_DVR_HEAD_MOUNT_METRICS_H_
+
+#include <array>
+
+#include <private/dvr/color_channel_distortion.h>
+#include <private/dvr/types.h>
+
+namespace android {
+namespace dvr {
+
+// HeadMountMetrics encapsulates metrics describing a head mount to be used
+// with a display to create a head mounted display.
+class HeadMountMetrics {
+ public:
+  // The vertical point of the HMD where the lens distance is measured from.
+  enum VerticalAlignment { kBottom = 0, kCenter = 1, kTop = 2 };
+
+  enum EyeOrientation {
+    kCCW0Degrees = 0,
+    kCCW90Degrees = 1,
+    kCCW180Degrees = 2,
+    kCCW270Degrees = 3,
+    kCCW0DegreesMirrored = 4,
+    kCCW90DegreesMirrored = 5,
+    kCCW180DegreesMirrored = 6,
+    kCCW270DegreesMirrored = 7,
+
+    // Rotations that consist of an odd number of 90 degree rotations will swap
+    // the height and width of any bounding boxes/viewports. This bit informs
+    // any viewport manipulating code to perform the appropriate transformation.
+    kRightAngleBit = 0x01,
+    // Viewports are represented as four floating point values (four half
+    // angles). Rotating this structure can be done through a shift operation.
+    // This mask extracts the rotation portion of the orientation.
+    kRotationMask = 0x03,
+    // This mask specifies whether the output is mirrored.
+    kMirroredBit = 0x04
+  };
+
+  HeadMountMetrics(
+      float inter_lens_distance, float tray_to_lens_distance,
+      float virtual_eye_to_screen_distance,
+      VerticalAlignment vertical_alignment, const FieldOfView& left_eye_max_fov,
+      const FieldOfView& right_eye_max_fov,
+      const std::shared_ptr<ColorChannelDistortion>& red_distortion,
+      const std::shared_ptr<ColorChannelDistortion>& green_distortion,
+      const std::shared_ptr<ColorChannelDistortion>& blue_distortion,
+      EyeOrientation left_eye_orientation, EyeOrientation right_eye_orientation,
+      float screen_center_to_lens_distance)
+      : inter_lens_distance_(inter_lens_distance),
+        tray_to_lens_distance_(tray_to_lens_distance),
+        virtual_eye_to_screen_distance_(virtual_eye_to_screen_distance),
+        screen_center_to_lens_distance_(screen_center_to_lens_distance),
+        vertical_alignment_(vertical_alignment),
+        eye_max_fov_({{left_eye_max_fov, right_eye_max_fov}}),
+        color_channel_distortion_(
+            {{red_distortion, green_distortion, blue_distortion}}),
+        supports_chromatic_aberration_correction_(true),
+        eye_orientation_({{left_eye_orientation, right_eye_orientation}}) {
+    // If we're missing the green or blur distortions, assume that we don't
+    // correct for chromatic aberration.
+    if (!green_distortion || !blue_distortion) {
+      color_channel_distortion_[1] = red_distortion;
+      color_channel_distortion_[2] = red_distortion;
+      supports_chromatic_aberration_correction_ = false;
+    }
+  }
+
+  // Returns the distance in meters between the optical centers of the two
+  // lenses.
+  float GetInterLensDistance() const { return inter_lens_distance_; }
+
+  // Returns the distance in meters from the "tray" upon which the display
+  // rests to the optical center of a lens.
+  float GetTrayToLensDistance() const { return tray_to_lens_distance_; }
+
+  // Returns the distance in meters from the virtual eye to the screen.
+  // See http://go/vr-distortion-correction for an explanation of what
+  // this distance is.
+  float GetVirtualEyeToScreenDistance() const {
+    return virtual_eye_to_screen_distance_;
+  }
+
+  // Returns the horizontal distance from the center of the screen to the center
+  // of the lens, in meters.
+  float GetScreenCenterToLensDistance() const {
+    return screen_center_to_lens_distance_;
+  }
+
+  // Returns the vertical alignment of the HMD.  The tray-to-lens distance
+  // is relative to this position.  Exception: if the alignment is kCenter,
+  // then the offset has no meaning.
+  VerticalAlignment GetVerticalAlignment() const { return vertical_alignment_; }
+
+  // Returns the given eye's maximum field of view visible through the lens.
+  // The actual rendered field of view will be limited by this and also by
+  // the size of the screen.
+  const FieldOfView& GetEyeMaxFov(EyeType eye) const {
+    return eye_max_fov_[eye];
+  }
+
+  // Returns the ColorChannelDistortion object representing the distortion
+  // caused by the lenses for the given color channel.
+  const ColorChannelDistortion& GetColorChannelDistortion(
+      RgbColorChannel channel) const {
+    return *color_channel_distortion_[channel];
+  }
+
+  bool supports_chromatic_aberration_correction() const {
+    return supports_chromatic_aberration_correction_;
+  }
+
+  EyeOrientation GetEyeOrientation(EyeType eye) const {
+    return eye_orientation_[eye];
+  }
+
+ private:
+  float inter_lens_distance_;
+  float tray_to_lens_distance_;
+  float virtual_eye_to_screen_distance_;
+  float screen_center_to_lens_distance_;
+  VerticalAlignment vertical_alignment_;
+  std::array<FieldOfView, 2> eye_max_fov_;
+  std::array<std::shared_ptr<ColorChannelDistortion>, 3>
+      color_channel_distortion_;
+  bool supports_chromatic_aberration_correction_;
+  std::array<EyeOrientation, 2> eye_orientation_;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_HEAD_MOUNT_METRICS_H_
diff --git a/libs/vr/libeds/include/private/dvr/identity_distortion.h b/libs/vr/libeds/include/private/dvr/identity_distortion.h
new file mode 100644
index 0000000..b9c5cf6
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/identity_distortion.h
@@ -0,0 +1,23 @@
+#ifndef ANDROID_DVR_IDENTITY_DISTORTION_H_
+#define ANDROID_DVR_IDENTITY_DISTORTION_H_
+
+#include <private/dvr/color_channel_distortion.h>
+
+namespace android {
+namespace dvr {
+
+// Provides an identity distortion operation if running the device without any
+// lenses.
+class IdentityDistortion : public ColorChannelDistortion {
+ public:
+  IdentityDistortion() {}
+
+  vec2 Distort(vec2 p) const override { return p; }
+
+  vec2 DistortInverse(vec2 p) const override { return p; }
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_IDENTITY_DISTORTION_H_
diff --git a/libs/vr/libeds/include/private/dvr/lookup_radial_distortion.h b/libs/vr/libeds/include/private/dvr/lookup_radial_distortion.h
new file mode 100644
index 0000000..56fc5db
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/lookup_radial_distortion.h
@@ -0,0 +1,31 @@
+#ifndef ANDROID_DVR_LOOKUP_RADIAL_DISTORTION_H_
+#define ANDROID_DVR_LOOKUP_RADIAL_DISTORTION_H_
+
+#include <vector>
+
+#include <private/dvr/color_channel_distortion.h>
+
+namespace android {
+namespace dvr {
+
+// LookupRadialDistortion implements a radial distortion based using using a
+// vector of tan(angle) -> multipliers.  This can use measured data directly.
+class LookupRadialDistortion : public ColorChannelDistortion {
+ public:
+  // lookup.x = tan(angle), lookup.y = distance from center multiplier.
+  explicit LookupRadialDistortion(const vec2* lookup, size_t count);
+
+  vec2 Distort(vec2 p) const override;
+  vec2 DistortInverse(vec2 p) const override;
+
+ private:
+  float DistortionFactor(float r) const;
+  float DistortRadius(float r) const;
+
+  std::vector<vec2> lookup_;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_LOOKUP_RADIAL_DISTORTION_H_
diff --git a/libs/vr/libeds/include/private/dvr/lucid_metrics.h b/libs/vr/libeds/include/private/dvr/lucid_metrics.h
new file mode 100644
index 0000000..0e4ada4
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/lucid_metrics.h
@@ -0,0 +1,22 @@
+#ifndef ANDROID_DVR_LUCID_METRICS_H_
+#define ANDROID_DVR_LUCID_METRICS_H_
+
+#include <private/dvr/display_metrics.h>
+#include <private/dvr/head_mount_metrics.h>
+#include <private/dvr/types.h>
+
+namespace android {
+namespace dvr {
+
+HeadMountMetrics CreateHeadMountMetrics();
+HeadMountMetrics CreateHeadMountMetrics(const FieldOfView& l_fov,
+                                        const FieldOfView& r_fov);
+HeadMountMetrics CreateUndistortedHeadMountMetrics();
+HeadMountMetrics CreateUndistortedHeadMountMetrics(const FieldOfView& l_fov,
+                                                   const FieldOfView& r_fov);
+DisplayMetrics CreateDisplayMetrics(vec2i screen_size);
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_LUCID_METRICS_H_
diff --git a/libs/vr/libeds/include/private/dvr/lucid_pose_tracker.h b/libs/vr/libeds/include/private/dvr/lucid_pose_tracker.h
new file mode 100644
index 0000000..4ceda5a
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/lucid_pose_tracker.h
@@ -0,0 +1,49 @@
+#ifndef ANDROID_DVR_LUCID_POSE_TRACKER_H_
+#define ANDROID_DVR_LUCID_POSE_TRACKER_H_
+
+#include <memory>
+
+#include <dvr/pose_client.h>
+
+#include <private/dvr/types.h>
+
+namespace android {
+namespace dvr {
+
+// Provides pose tracking via the system pose service.
+class LucidPoseTracker {
+ public:
+  // When set, the pose service is ignored and the given pose is always returned
+  // by GetPose. As long as this is called before any LucidPoseTracker is
+  // used, the pose service will not be created.
+  // Threading: this is not thread safe.
+  static void SetPoseOverride(const Posef& pose);
+
+  // Reset prior override pose.
+  static void ClearPoseOverride();
+
+  LucidPoseTracker();
+  ~LucidPoseTracker();
+
+  // Currently GetPose() will ignore timestamp_ns and always return the most
+  // recent orientation.
+  // TODO(stefanus): support prediction.
+  Posef GetPose(uint64_t timestamp_ns);
+
+ private:
+  static bool is_override_pose_;
+  static Posef override_pose_;
+
+  DvrPose* pose_client_;
+
+  // The most recent pose.
+  Posef latest_pose_;
+
+  // The time stamp corresponding to when latest_pose_ was last updated.
+  uint64_t latest_timestamp_ns_;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_LUCID_POSE_TRACKER_H_
diff --git a/libs/vr/libeds/include/private/dvr/polynomial_radial_distortion.h b/libs/vr/libeds/include/private/dvr/polynomial_radial_distortion.h
new file mode 100644
index 0000000..8f080aa
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/polynomial_radial_distortion.h
@@ -0,0 +1,60 @@
+#ifndef ANDROID_DVR_POLYNOMIAL_RADIAL_DISTORTION_H_
+#define ANDROID_DVR_POLYNOMIAL_RADIAL_DISTORTION_H_
+
+#include <vector>
+
+#include <private/dvr/color_channel_distortion.h>
+
+namespace android {
+namespace dvr {
+
+// PolynomialRadialDistortion implements a radial distortion based using
+// a set of coefficients describing a polynomial function.
+// See http://en.wikipedia.org/wiki/Distortion_(optics).
+//
+// Unless otherwise stated, the units used in this class are tan-angle units
+// which can be computed as distance on the screen divided by distance from the
+// virtual eye to the screen.
+class PolynomialRadialDistortion : public ColorChannelDistortion {
+ public:
+  // Construct a PolynomialRadialDistortion with coefficients for
+  // the radial distortion equation:
+  //
+  //   p' = p (1 + K1 r^2 + K2 r^4 + ... + Kn r^(2n))
+  //
+  // where r is the distance in tan-angle units from the optical center,
+  // p the input point and p' the output point.
+  // The provided vector contains the coefficients for the even monomials
+  // in the distortion equation: coefficients[0] is K1, coefficients[1] is K2,
+  // etc.  Thus the polynomial used for distortion has degree
+  // (2 * coefficients.size()).
+  explicit PolynomialRadialDistortion(const std::vector<float>& coefficients);
+
+  // Given a radius (measuring distance from the optical axis of the lens),
+  // returns the distortion factor for that radius.
+  float DistortionFactor(float r_squared) const;
+
+  // Given a radius (measuring distance from the optical axis of the lens),
+  // returns the corresponding distorted radius.
+  float DistortRadius(float r) const;
+
+  // Given a 2d point p, returns the corresponding distorted point.
+  // distance from the virtual eye to the screen.  The optical axis
+  // of the lens defines the origin for both input and output points.
+  vec2 Distort(vec2 p) const override;
+
+  // Given a 2d point p, returns the point that would need to be passed to
+  // Distort to get point p (approximately).
+  vec2 DistortInverse(vec2 p) const override;
+
+  // Returns the distortion coefficients.
+  const std::vector<float>& GetCoefficients() const;
+
+ private:
+  std::vector<float> coefficients_;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_POLYNOMIAL_RADIAL_DISTORTION_H_
diff --git a/libs/vr/libeds/include/private/dvr/raw_pose.h b/libs/vr/libeds/include/private/dvr/raw_pose.h
new file mode 100644
index 0000000..7058f1a
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/raw_pose.h
@@ -0,0 +1,54 @@
+#ifndef ANDROID_DVR_RAW_POSE_H_
+#define ANDROID_DVR_RAW_POSE_H_
+
+#include <atomic>
+
+namespace android {
+namespace dvr {
+
+// POD raw data of a head pose with a count field for read consistency checking.
+// Warning: The layout of this struct and RawPosePair are specific to match the
+// corresponding buffer type in the shader in late_latch.cpp.
+struct RawPose {
+  void Reset(uint32_t new_count) volatile {
+    qx = qy = qz = 0.0f;
+    qw = 1.0f;
+    px = py = pz = 0.0f;
+    count = new_count;
+  }
+
+  float qx, qy, qz, qw;
+  float px, py, pz;
+  std::atomic<uint32_t> count;
+};
+
+// RawPosePair is used for lock-free writing at about 1khz by the CPU/DSP
+// and reading by the GPU. At creation time, pose1 is given count = 1 and
+// pose2 is given count = 2.
+//
+// The lock-free write pattern is:
+// - write to pose with least count.
+// - memory write barrier.
+// - write count = count + 2.
+//
+// For reads, there is an important assumption about the GPU: it generally
+// processes things contiguously, without arbitrary preemptions that save and
+// restore full cache states. In other words, if the GPU is preempted and then
+// later resumed, any data that was read from memory before the preemption will
+// be re-read from memory after resume. This allows the following read trick to
+// work:
+// - read the full RawPosePair into a shader.
+// - select the pose with the newest count.
+//
+// The older pose may be partially written by the async stores from CPU/DSP, but
+// because of the memory barrier and GPU characteristics, the highest count pose
+// should always be a fully consistent RawPose.
+struct RawPosePair {
+  RawPose pose1;
+  RawPose pose2;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_RAW_POSE_H_
diff --git a/libs/vr/libeds/include/private/dvr/render_texture_params.h b/libs/vr/libeds/include/private/dvr/render_texture_params.h
new file mode 100644
index 0000000..71aebef
--- /dev/null
+++ b/libs/vr/libeds/include/private/dvr/render_texture_params.h
@@ -0,0 +1,55 @@
+#ifndef ANDROID_DVR_RENDER_TEXTURE_PARAMS_H_
+#define ANDROID_DVR_RENDER_TEXTURE_PARAMS_H_
+
+#include <private/dvr/types.h>
+
+namespace android {
+namespace dvr {
+
+// Encapsulates information about the render texture, includes the size
+// of the render texture, and the left/right viewport which define the
+// portion each eye is rendering onto. This struct will be passed to
+// PresentFrame every frame before the client actually drawing the scene.
+struct RenderTextureParams {
+  RenderTextureParams() {}
+
+  RenderTextureParams(vec2i target_texture_size,
+                      const Range2i& eye_viewport_bounds_left,
+                      const Range2i& eye_viewport_bounds_right,
+                      const FieldOfView& eye_fov_left,
+                      const FieldOfView& eye_fov_right)
+      : texture_size(target_texture_size) {
+    eye_viewport_bounds[kLeftEye] = eye_viewport_bounds_left;
+    eye_viewport_bounds[kRightEye] = eye_viewport_bounds_right;
+    eye_fov[kLeftEye] = eye_fov_left;
+    eye_fov[kRightEye] = eye_fov_right;
+  }
+
+  explicit RenderTextureParams(vec2i target_texture_size,
+                               const FieldOfView& eye_fov_left,
+                               const FieldOfView& eye_fov_right) {
+    texture_size = target_texture_size;
+    eye_viewport_bounds[0] = Range2i::FromSize(
+        vec2i(0, 0), vec2i(texture_size[0] / 2, texture_size[1]));
+    eye_viewport_bounds[1] =
+        Range2i::FromSize(vec2i(texture_size[0] / 2, 0),
+                          vec2i(texture_size[0] / 2, texture_size[1]));
+
+    eye_fov[kLeftEye] = eye_fov_left;
+    eye_fov[kRightEye] = eye_fov_right;
+  }
+
+  // The render texture size.
+  vec2i texture_size;
+
+  // The viewport bounds on the render texture for each eye.
+  Range2i eye_viewport_bounds[2];
+
+  // The field of view for each eye in degrees.
+  FieldOfView eye_fov[2];
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_RENDER_TEXTURE_PARAMS_H_
diff --git a/libs/vr/libeds/lookup_radial_distortion.cpp b/libs/vr/libeds/lookup_radial_distortion.cpp
new file mode 100644
index 0000000..2cee863
--- /dev/null
+++ b/libs/vr/libeds/lookup_radial_distortion.cpp
@@ -0,0 +1,47 @@
+#include "include/private/dvr/lookup_radial_distortion.h"
+
+namespace android {
+namespace dvr {
+
+LookupRadialDistortion::LookupRadialDistortion(const vec2* lookup, size_t count)
+    : lookup_(lookup, lookup + count) {}
+
+float LookupRadialDistortion::DistortionFactor(float r) const {
+  for (size_t i = 1; i < lookup_.size(); ++i) {
+    if (lookup_[i].x() > r) {
+      float t =
+          (r - lookup_[i - 1].x()) / (lookup_[i].x() - lookup_[i - 1].x());
+      return lookup_[i - 1].y() + t * (lookup_[i].y() - lookup_[i - 1].y());
+    }
+  }
+  return lookup_.back().y();
+}
+
+float LookupRadialDistortion::DistortRadius(float r) const {
+  return r * DistortionFactor(r);
+}
+
+vec2 LookupRadialDistortion::Distort(vec2 p) const {
+  return p * DistortionFactor(p.norm());
+}
+
+vec2 LookupRadialDistortion::DistortInverse(vec2 p) const {
+  // Secant method.
+  const float radius = p.norm();
+  float r0 = radius / 0.9f;
+  float r1 = radius * 0.9f;
+  float r2;
+  float dr0 = radius - DistortRadius(r0);
+  float dr1;
+  while (fabsf(r1 - r0) > 0.0001f /** 0.1mm */) {
+    dr1 = radius - DistortRadius(r1);
+    r2 = r1 - dr1 * ((r1 - r0) / (dr1 - dr0));
+    r0 = r1;
+    r1 = r2;
+    dr0 = dr1;
+  }
+  return (r1 / radius) * p;
+}
+
+}  // namespace dvr
+}  // namespace android
diff --git a/libs/vr/libeds/lucid_metrics.cpp b/libs/vr/libeds/lucid_metrics.cpp
new file mode 100644
index 0000000..690c326
--- /dev/null
+++ b/libs/vr/libeds/lucid_metrics.cpp
@@ -0,0 +1,327 @@
+#include "include/private/dvr/display_metrics.h"
+#include <private/dvr/head_mount_metrics.h>
+#include <private/dvr/identity_distortion.h>
+#include <private/dvr/lookup_radial_distortion.h>
+#include <private/dvr/lucid_metrics.h>
+#include <private/dvr/types.h>
+
+namespace {
+
+// These numbers are specific to the OnePlus One and therefore
+// temporary until we advance to the next Lucid development platform.
+
+// Head mount metrics for Lucid A00
+static const float kDefaultInterLensDistance = 0.064f;  // 64mm
+static const float kDefaultTrayToLensDistance = 0.035f;
+static const float kDefaultVirtualEyeToScreenDistance = 0.042f;
+static const android::dvr::HeadMountMetrics::VerticalAlignment
+    kDefaultVerticalAlignment = android::dvr::HeadMountMetrics::kCenter;
+static const float kDefaultFovHalfAngleInsideH = 43.7f * M_PI / 180.0f;
+static const float kDefaultFovHalfAngleOutsideH = 47.8f * M_PI / 180.0f;
+static const float kDefaultFovHalfAngleV = 54.2f * M_PI / 180.0f;
+
+// Screen size in meters for Lucid (Nexus 6 display in portrait mode).
+static const android::dvr::vec2 kScreenSizeInMeters(0.0742177f, 0.131943f);
+
+// Border size in meters for the OnePlus One.
+static const float kScreenBorderSize = 0.004f;
+
+// Refresh rate.
+static const float kScreenRefreshRate = 60.0f;
+
+// Lucid display orientation is portrait.
+static const android::dvr::DisplayOrientation kDisplayOrientation =
+    android::dvr::DisplayOrientation::kPortrait;
+
+}  // anonymous namespace
+
+namespace android {
+namespace dvr {
+
+// The distortion lookup tables were generated via a raytraced lens simulation.
+// Please see for full calculations:
+// https://docs.google.com/a/google.com/spreadsheets/d/
+//       15cfHmCw5mHVOQ1rAJxMhta4q0e8zzcUDka1nRkfl7pY/edit?usp=sharing
+LookupRadialDistortion* GetBlueDistortionLookup() {
+  // clang-format off
+  vec2 kBlueDistortionLookup[] = {
+    {0.00000000000f, 1.00000000000f},
+    {0.01888626190f, 1.00096958278f},
+    {0.03777223810f, 1.00133301793f},
+    {0.05665761905f, 1.00193985168f},
+    {0.07554214286f, 1.00279048731f},
+    {0.09442542857f, 1.00388751781f},
+    {0.11330704762f, 1.00523363045f},
+    {0.13218657143f, 1.00683149424f},
+    {0.15106340476f, 1.00868516849f},
+    {0.16993695238f, 1.01079861126f},
+    {0.18880640476f, 1.01317712726f},
+    {0.20767092857f, 1.01582607321f},
+    {0.22652945238f, 1.01875203063f},
+    {0.24538078571f, 1.02196207850f},
+    {0.26422352381f, 1.02546421601f},
+    {0.28305602381f, 1.02926737969f},
+    {0.30187640476f, 1.03338139216f},
+    {0.32068252381f, 1.03781702504f},
+    {0.33947190476f, 1.04258620905f},
+    {0.35824171429f, 1.04770206653f},
+    {0.37698869048f, 1.05317909331f},
+    {0.39570916667f, 1.05903306635f},
+    {0.41439900000f, 1.06528124790f},
+    {0.43305350000f, 1.07194257391f},
+    {0.45166738095f, 1.07903777957f},
+    {0.47023471429f, 1.08658953759f},
+    {0.48874897619f, 1.09462239798f},
+    {0.50720285714f, 1.10316330018f},
+    {0.52558835714f, 1.11224144183f},
+    {0.54389669048f, 1.12188861421f},
+    {0.56211826190f, 1.13213939967f},
+    {0.58024261905f, 1.14303145047f},
+    {0.59825847619f, 1.15460566091f},
+    {0.61615335714f, 1.16690711338f},
+    {0.63391345238f, 1.17998560444f},
+    {0.65152300000f, 1.19389708987f},
+    {0.66896328571f, 1.20870580446f},
+    {0.68621100000f, 1.22448751087f},
+    {0.70323578571f, 1.24133415620f},
+    {0.71999716667f, 1.25935962776f},
+    {0.73643969048f, 1.27870875648f},
+    {0.75250778571f, 1.29953256670f},
+    {0.76817614286f, 1.32193822000f},
+    {0.78342009524f, 1.34604270338f},
+    {0.79828314286f, 1.37185833833f},
+    {0.81267376190f, 1.39964322604f},
+    {0.82656559524f, 1.42955958262f},
+    {0.83983054762f, 1.46196539657f},
+    {0.85234333333f, 1.49724142650f},
+    {0.86394971429f, 1.53585530271f},
+    {0.87422461905f, 1.57881139444f},
+    {0.88382583095f, 1.62091537826f},
+    {0.89571361286f, 1.67610209261f},
+    {0.90490389167f, 1.72118819668f},
+    {0.91526452143f, 1.77496904481f},
+    {0.92651365452f, 1.83722833673f},
+    {0.93437489976f, 1.88337590145f},
+    {0.94654105500f, 1.95937892848f},
+    {0.95476685095f, 2.01469745492f},
+    {0.96720383310f, 2.10451495481f},
+    {0.97546726405f, 2.16904926656f},
+    {0.98774046786f, 2.27302748020f},
+    {0.99579206762f, 2.34720582421f},
+    {1.00763328857f, 2.46603526105f},
+    {1.01533118405f, 2.55049232288f},
+    {1.02287120929f, 2.63936582235f}
+  };
+  // clang-format on
+  return new LookupRadialDistortion(
+      kBlueDistortionLookup, sizeof(kBlueDistortionLookup) / sizeof(vec2));
+}
+
+LookupRadialDistortion* GetGreenDistortionLookup() {
+  // clang-format off
+  vec2 kGreenDistortionLookup[] = {
+    {0.00000000000f, 1.00000000000f},
+    {0.01898883333f, 1.00000000000f},
+    {0.03797750000f, 1.00000000000f},
+    {0.05696585714f, 1.00000000000f},
+    {0.07595369048f, 1.00000000000f},
+    {0.09494078571f, 1.00000000000f},
+    {0.11392685714f, 1.00000000000f},
+    {0.13291157143f, 1.00000000000f},
+    {0.15189450000f, 1.00176560670f},
+    {0.17087511905f, 1.00384553961f},
+    {0.18985280952f, 1.00618614484f},
+    {0.20882680952f, 1.00879302066f},
+    {0.22779623810f, 1.01167234096f},
+    {0.24675997619f, 1.01483135203f},
+    {0.26571680952f, 1.01827767641f},
+    {0.28466519048f, 1.02202026825f},
+    {0.30360342857f, 1.02606859705f},
+    {0.32252950000f, 1.03043334057f},
+    {0.34144104762f, 1.03512630376f},
+    {0.36033538095f, 1.04016038545f},
+    {0.37920942857f, 1.04554970984f},
+    {0.39805966667f, 1.05130981266f},
+    {0.41688209524f, 1.05745768999f},
+    {0.43567214286f, 1.06401204155f},
+    {0.45442473810f, 1.07099310305f},
+    {0.47313411905f, 1.07842314596f},
+    {0.49179388095f, 1.08632639514f},
+    {0.51039692857f, 1.09472920992f},
+    {0.52893538095f, 1.10366038032f},
+    {0.54740061905f, 1.11315113705f},
+    {0.56578326190f, 1.12323535769f},
+    {0.58407300000f, 1.13395008040f},
+    {0.60225871429f, 1.14533547370f},
+    {0.62032809524f, 1.15743581542f},
+    {0.63826750000f, 1.17030000749f},
+    {0.65606135714f, 1.18398295206f},
+    {0.67369107143f, 1.19854780583f},
+    {0.69113350000f, 1.21406895255f},
+    {0.70835842857f, 1.23063670464f},
+    {0.72532545238f, 1.24836302903f},
+    {0.74197478571f, 1.26739777609f},
+    {0.75822164286f, 1.28793886907f},
+    {0.77407361905f, 1.31003521318f},
+    {0.78948523810f, 1.33383710115f},
+    {0.80448471429f, 1.35938255065f},
+    {0.81901733333f, 1.38686361242f},
+    {0.83305214286f, 1.41644808409f},
+    {0.84646438095f, 1.44848277406f},
+    {0.85912733333f, 1.48334485259f},
+    {0.87088369048f, 1.52149970074f},
+    {0.88131250000f, 1.56392750036f},
+    {0.89105132929f, 1.60552684742f},
+    {0.90312479476f, 1.66002695068f},
+    {0.91244067452f, 1.70458805205f},
+    {0.92297971714f, 1.75767475825f},
+    {0.93440940905f, 1.81916050294f},
+    {0.94237194976f, 1.86478635937f},
+    {0.95471202405f, 1.93989738862f},
+    {0.96305355738f, 1.99457325750f},
+    {0.97567372071f, 2.08333293385f},
+    {0.98407229071f, 2.14708073108f},
+    {0.99653762071f, 2.24981649552f},
+    {1.00471276167f, 2.32311751786f},
+    {1.01672394000f, 2.44057411530f},
+    {1.02452363381f, 2.52407947994f},
+    {1.03216732667f, 2.61194301580f}
+  };
+  // clang-format on
+  return new LookupRadialDistortion(
+      kGreenDistortionLookup, sizeof(kGreenDistortionLookup) / sizeof(vec2));
+}
+
+LookupRadialDistortion* GetRedDistortionLookup() {
+  // clang-format off
+  vec2 kRedDistortionLookup[] = {
+    {0.00000000000f, 1.00000000000f},
+    {0.01906776190f, 1.00000000000f},
+    {0.03813547619f, 1.00000000000f},
+    {0.05720304762f, 1.00000000000f},
+    {0.07627040476f, 1.00000000000f},
+    {0.09533740476f, 1.00000000000f},
+    {0.11440385714f, 1.00000000000f},
+    {0.13346952381f, 1.00000000000f},
+    {0.15253409524f, 1.00000000000f},
+    {0.17159714286f, 1.00000000000f},
+    {0.19065814286f, 1.00053530030f},
+    {0.20971645238f, 1.00310924426f},
+    {0.22877123810f, 1.00595236192f},
+    {0.24782154762f, 1.00907150786f},
+    {0.26686623810f, 1.01247435420f},
+    {0.28590388095f, 1.01616968529f},
+    {0.30493288095f, 1.02016688932f},
+    {0.32395133333f, 1.02447646681f},
+    {0.34295697619f, 1.02911011406f},
+    {0.36194726190f, 1.03408046560f},
+    {0.38091921429f, 1.03940151599f},
+    {0.39986942857f, 1.04508858434f},
+    {0.41879402381f, 1.05115843585f},
+    {0.43768857143f, 1.05762946333f},
+    {0.45654809524f, 1.06452169646f},
+    {0.47536695238f, 1.07185711363f},
+    {0.49413888095f, 1.07965956927f},
+    {0.51285690476f, 1.08795508025f},
+    {0.53151326190f, 1.09677206014f},
+    {0.55009952381f, 1.10614118417f},
+    {0.56860633333f, 1.11609607621f},
+    {0.58702361905f, 1.12667304464f},
+    {0.60534028571f, 1.13791190276f},
+    {0.62354421429f, 1.14985618930f},
+    {0.64162188095f, 1.16255413653f},
+    {0.65955780952f, 1.17605992962f},
+    {0.67733352381f, 1.19043584317f},
+    {0.69492602381f, 1.20575517508f},
+    {0.71230514286f, 1.22210708787f},
+    {0.72943057143f, 1.23960199799f},
+    {0.74623921429f, 1.25839340501f},
+    {0.76262400000f, 1.27871385661f},
+    {0.77861754762f, 1.30056919119f},
+    {0.79415866667f, 1.32413401001f},
+    {0.80926385714f, 1.34946540639f},
+    {0.82390640476f, 1.37670655635f},
+    {0.83805190476f, 1.40602920817f},
+    {0.85157807143f, 1.43777181543f},
+    {0.86435700000f, 1.47230885729f},
+    {0.87622914286f, 1.51010361811f},
+    {0.88677650000f, 1.55211817236f},
+    {0.89663317738f, 1.59330127207f},
+    {0.90883197952f, 1.64729627820f},
+    {0.91827594357f, 1.69138814689f},
+    {0.92892199405f, 1.74398939784f},
+    {0.94047261548f, 1.80490554711f},
+    {0.94852659262f, 1.85009630648f},
+    {0.96099790167f, 1.92451421938f},
+    {0.96945317500f, 1.97863645920f},
+    {0.98221554286f, 2.06656418112f},
+    {0.99069599476f, 2.12974390154f},
+    {1.00331392976f, 2.23149730290f},
+    {1.01157138762f, 2.30414058939f},
+    {1.02372409452f, 2.42049694265f},
+    {1.03162992905f, 2.50318810924f},
+    {1.03934762000f, 2.59027212626f}
+  };
+  // clang-format on
+  return new LookupRadialDistortion(
+      kRedDistortionLookup, sizeof(kRedDistortionLookup) / sizeof(vec2));
+}
+
+HeadMountMetrics CreateHeadMountMetrics(const FieldOfView& l_fov,
+                                        const FieldOfView& r_fov) {
+  std::shared_ptr<ColorChannelDistortion> default_distortion_r(
+      GetRedDistortionLookup());
+  std::shared_ptr<ColorChannelDistortion> default_distortion_g(
+      GetGreenDistortionLookup());
+  std::shared_ptr<ColorChannelDistortion> default_distortion_b(
+      GetBlueDistortionLookup());
+
+  return HeadMountMetrics(
+      kDefaultInterLensDistance, kDefaultTrayToLensDistance,
+      kDefaultVirtualEyeToScreenDistance, kDefaultVerticalAlignment, l_fov,
+      r_fov, default_distortion_r, default_distortion_g, default_distortion_b,
+      HeadMountMetrics::EyeOrientation::kCCW0Degrees,
+      HeadMountMetrics::EyeOrientation::kCCW0Degrees,
+      kDefaultInterLensDistance / 2.0f);
+}
+
+HeadMountMetrics CreateHeadMountMetrics() {
+  FieldOfView l_fov(kDefaultFovHalfAngleOutsideH, kDefaultFovHalfAngleInsideH,
+                    kDefaultFovHalfAngleV, kDefaultFovHalfAngleV);
+  FieldOfView r_fov(kDefaultFovHalfAngleInsideH, kDefaultFovHalfAngleOutsideH,
+                    kDefaultFovHalfAngleV, kDefaultFovHalfAngleV);
+
+  return CreateHeadMountMetrics(l_fov, r_fov);
+}
+
+DisplayMetrics CreateDisplayMetrics(vec2i screen_size) {
+  vec2 meters_per_pixel(
+      kScreenSizeInMeters[0] / static_cast<float>(screen_size[0]),
+      kScreenSizeInMeters[1] / static_cast<float>(screen_size[1]));
+  return DisplayMetrics(screen_size, meters_per_pixel, kScreenBorderSize,
+                        1000.0f / kScreenRefreshRate, kDisplayOrientation);
+}
+
+HeadMountMetrics CreateUndistortedHeadMountMetrics() {
+  FieldOfView l_fov(kDefaultFovHalfAngleOutsideH, kDefaultFovHalfAngleInsideH,
+                    kDefaultFovHalfAngleV, kDefaultFovHalfAngleV);
+  FieldOfView r_fov(kDefaultFovHalfAngleInsideH, kDefaultFovHalfAngleOutsideH,
+                    kDefaultFovHalfAngleV, kDefaultFovHalfAngleV);
+  return CreateUndistortedHeadMountMetrics(l_fov, r_fov);
+}
+
+HeadMountMetrics CreateUndistortedHeadMountMetrics(const FieldOfView& l_fov,
+                                                   const FieldOfView& r_fov) {
+  auto distortion_all = std::make_shared<IdentityDistortion>();
+
+  return HeadMountMetrics(kDefaultInterLensDistance, kDefaultTrayToLensDistance,
+                          kDefaultVirtualEyeToScreenDistance,
+                          kDefaultVerticalAlignment, l_fov, r_fov,
+                          distortion_all, distortion_all, distortion_all,
+                          HeadMountMetrics::EyeOrientation::kCCW0Degrees,
+                          HeadMountMetrics::EyeOrientation::kCCW0Degrees,
+                          kDefaultInterLensDistance / 2.0f);
+}
+
+}  // namespace dvr
+}  // namespace dvr
diff --git a/libs/vr/libeds/lucid_pose_tracker.cpp b/libs/vr/libeds/lucid_pose_tracker.cpp
new file mode 100644
index 0000000..c321bb0
--- /dev/null
+++ b/libs/vr/libeds/lucid_pose_tracker.cpp
@@ -0,0 +1,90 @@
+#include "include/private/dvr/lucid_pose_tracker.h"
+
+#define LOG_TAG "LucidPoseTracker"
+#include <cutils/log.h>
+
+#include <private/dvr/clock_ns.h>
+
+namespace android {
+namespace dvr {
+
+bool LucidPoseTracker::is_override_pose_ = false;
+Posef LucidPoseTracker::override_pose_ = Posef();
+
+void LucidPoseTracker::SetPoseOverride(const Posef& pose) {
+  is_override_pose_ = true;
+  override_pose_ = pose;
+}
+
+void LucidPoseTracker::ClearPoseOverride() {
+  is_override_pose_ = false;
+  override_pose_ = Posef();
+}
+
+LucidPoseTracker::LucidPoseTracker() : pose_client_(NULL) {}
+
+LucidPoseTracker::~LucidPoseTracker() {
+  if (pose_client_) {
+    dvrPoseDestroy(pose_client_);
+  }
+}
+
+Posef LucidPoseTracker::GetPose(uint64_t timestamp_ns) {
+  if (is_override_pose_) {
+    return override_pose_;
+  }
+
+  if (!pose_client_) {
+    pose_client_ = dvrPoseCreate();
+
+    if (!pose_client_) {
+      ALOGE("No pose service, returning identity pose");
+      return Posef();
+    }
+  }
+
+  DvrPoseState state;
+  dvrPosePoll(pose_client_, &state);
+
+  const vec4 head_rotation_in_start_quat(
+      state.head_from_start_rotation.x, state.head_from_start_rotation.y,
+      state.head_from_start_rotation.z, state.head_from_start_rotation.w);
+
+  // When the pose service hasn't computed a pose yet, it returns a zero
+  // quaternion; just use the identity rotation in that case.
+  // TODO(stefanus): Find a better way to signal and check this.
+  if (head_rotation_in_start_quat.squaredNorm() < 0.5f) {
+    latest_pose_.SetRotation(quat::Identity());
+  } else {
+    latest_pose_.SetRotation(
+        quat(head_rotation_in_start_quat.w(), head_rotation_in_start_quat.x(),
+             head_rotation_in_start_quat.y(), head_rotation_in_start_quat.z())
+            .normalized());
+  }
+
+  const vec3 head_position_in_start(state.head_from_start_translation.x,
+                                    state.head_from_start_translation.y,
+                                    state.head_from_start_translation.z);
+  latest_pose_.SetPosition(head_position_in_start);
+
+  latest_timestamp_ns_ = GetSystemClockNs();
+
+  // PoseState pose_state;
+  // pose_state.timestamp_ns = latest_timestamp_ns_;
+  // pose_state.sensor_from_start_rotation =
+  //    ion::math::Rotationd::FromQuaternion(ion::math::Vector4d(
+  //        state.head_from_start_rotation.x, state.head_from_start_rotation.y,
+  //        state.head_from_start_rotation.z,
+  //        state.head_from_start_rotation.w));
+  //// TODO(stefanus): Determine the first derivative of the rotation and set it
+  //// here.
+  // pose_state.sensor_from_start_rotation_velocity =
+  // ion::math::Vector3d::Zero();
+
+  // TODO(stefanus): perform prediction.
+
+  return latest_pose_;
+}
+
+}  // namespace dvr
+}  // namespace android
diff --git a/libs/vr/libeds/polynomial_radial_distortion.cpp b/libs/vr/libeds/polynomial_radial_distortion.cpp
new file mode 100644
index 0000000..fa01bb4
--- /dev/null
+++ b/libs/vr/libeds/polynomial_radial_distortion.cpp
@@ -0,0 +1,53 @@
+#include "include/private/dvr/polynomial_radial_distortion.h"
+
+namespace android {
+namespace dvr {
+
+PolynomialRadialDistortion::PolynomialRadialDistortion(
+    const std::vector<float>& coefficients)
+    : coefficients_(coefficients) {}
+
+float PolynomialRadialDistortion::DistortionFactor(float r_squared) const {
+  float r_factor = 1.0f;
+  float distortion_factor = 1.0f;
+
+  for (float ki : coefficients_) {
+    r_factor *= r_squared;
+    distortion_factor += ki * r_factor;
+  }
+
+  return distortion_factor;
+}
+
+float PolynomialRadialDistortion::DistortRadius(float r) const {
+  return r * DistortionFactor(r * r);
+}
+
+vec2 PolynomialRadialDistortion::Distort(vec2 p) const {
+  return p * DistortionFactor(p.squaredNorm());
+}
+
+vec2 PolynomialRadialDistortion::DistortInverse(vec2 p) const {
+  // Secant method.
+  const float radius = p.norm();
+  float r0 = radius / 0.9f;
+  float r1 = radius * 0.9f;
+  float r2;
+  float dr0 = radius - DistortRadius(r0);
+  float dr1;
+  while (fabsf(r1 - r0) > 0.0001f /** 0.1mm */) {
+    dr1 = radius - DistortRadius(r1);
+    r2 = r1 - dr1 * ((r1 - r0) / (dr1 - dr0));
+    r0 = r1;
+    r1 = r2;
+    dr0 = dr1;
+  }
+  return (r1 / radius) * p;
+}
+
+const std::vector<float>& PolynomialRadialDistortion::GetCoefficients() const {
+  return coefficients_;
+}
+
+}  // namespace dvr
+}  // namespace android
diff --git a/libs/vr/libeds/tests/eds_app_tests.cpp b/libs/vr/libeds/tests/eds_app_tests.cpp
new file mode 100644
index 0000000..1742736
--- /dev/null
+++ b/libs/vr/libeds/tests/eds_app_tests.cpp
@@ -0,0 +1,141 @@
+#include <EGL/egl.h>
+#include <GLES2/gl2.h>
+
+#include <base/logging.h>
+#include <dvr/graphics.h>
+#include <dvr/pose_client.h>
+#include <gtest/gtest.h>
+#include <private/dvr/graphics/shader_program.h>
+#include <private/dvr/types.h>
+
+namespace {
+
+#define POSE_BINDING 0
+
+#ifndef STRINGIFY
+#define STRINGIFY2(s) #s
+#define STRINGIFY(s) STRINGIFY2(s)
+#endif
+
+static const char g_vert_shader[] =
+    "layout(binding = " STRINGIFY(POSE_BINDING) ", std140)\n"
+    "uniform LateLatchData {\n"
+    "  mat4 uViewProjection;\n"
+    "};\n"
+    "void main() {\n"
+    "  vec2 verts[4];\n"
+    "  verts[0] = vec2(-1, -1);\n"
+    "  verts[1] = vec2(-1, 1);\n"
+    "  verts[2] = vec2(1, -1);\n"
+    "  verts[3] = vec2(1, 1);\n"
+    "  gl_Position = uViewProjection * vec4(verts[gl_VertexID], 0.0, 1.0);\n"
+    "}\n";
+
+static const char g_frag_shader[] =
+    "precision mediump float;\n"
+    "out vec4 outColor;\n"
+    "void main() {\n"
+    "  outColor = vec4(1.0);\n"
+    "}\n";
+
+DvrGraphicsContext* CreateContext(int* surface_width, int* surface_height) {
+  DvrGraphicsContext* context = nullptr;
+  int display_width = 0, display_height = 0;
+  float inter_lens_meters = 0.0f;
+  float left_fov[4] = {0.0f};
+  float right_fov[4] = {0.0f};
+  int disable_warp = 0;
+  int enable_late_latch = 1;
+  DvrSurfaceParameter surface_params[] = {
+      DVR_SURFACE_PARAMETER_IN(DISABLE_DISTORTION, disable_warp),
+      DVR_SURFACE_PARAMETER_IN(ENABLE_LATE_LATCH, enable_late_latch),
+      DVR_SURFACE_PARAMETER_OUT(DISPLAY_WIDTH, &display_width),
+      DVR_SURFACE_PARAMETER_OUT(DISPLAY_HEIGHT, &display_height),
+      DVR_SURFACE_PARAMETER_OUT(SURFACE_WIDTH, surface_width),
+      DVR_SURFACE_PARAMETER_OUT(SURFACE_HEIGHT, surface_height),
+      DVR_SURFACE_PARAMETER_OUT(INTER_LENS_METERS, &inter_lens_meters),
+      DVR_SURFACE_PARAMETER_OUT(LEFT_FOV_LRBT, left_fov),
+      DVR_SURFACE_PARAMETER_OUT(RIGHT_FOV_LRBT, right_fov),
+      DVR_SURFACE_PARAMETER_LIST_END,
+  };
+  dvrGraphicsContextCreate(surface_params, &context);
+  return context;
+}
+
+}  // namespace
+
+TEST(SensorAppTests, EdsWithLateLatch) {
+  int surface_width = 0, surface_height = 0;
+  DvrGraphicsContext* context = CreateContext(&surface_width, &surface_height);
+  ASSERT_NE(nullptr, context);
+
+  android::dvr::ShaderProgram shader(g_vert_shader, g_frag_shader);
+
+  for (int i = 0; i < 5; ++i) {
+    DvrFrameSchedule schedule;
+    dvrGraphicsWaitNextFrame(context, 0, &schedule);
+
+    const auto ident_mat = android::dvr::mat4::Identity();
+    const float* ident_mats[] = { ident_mat.data(), ident_mat.data() };
+    GLuint late_latch_buffer_id = 0;
+    int ret = dvrBeginRenderFrameLateLatch(context, 0, schedule.vsync_count, 2,
+                                           ident_mats, ident_mats, ident_mats,
+                                           &late_latch_buffer_id);
+    EXPECT_EQ(0, ret);
+    for (int eye = 0; eye < 2; ++eye) {
+      if (eye == 0)
+        glViewport(0, 0, surface_width / 2, surface_height);
+      else
+        glViewport(surface_width / 2, 0, surface_width / 2, surface_height);
+
+      glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
+      shader.Use();
+
+      // Bind late latch pose matrix buffer.
+      glBindBufferRange(
+          GL_UNIFORM_BUFFER, POSE_BINDING, late_latch_buffer_id,
+          offsetof(DvrGraphicsLateLatchData, view_proj_matrix[eye]),
+          16 * sizeof(float));
+
+      // TODO(jbates): use transform feedback here to grab the vertex output
+      // and verify that it received late-latch pose data. Combine this with
+      // mocked pose data to verify that late-latching is working.
+      glDrawArrays(GL_POINTS, 0, 4);
+    }
+    dvrPresent(context);
+  }
+
+  glFinish();
+  dvrGraphicsContextDestroy(context);
+}
+
+TEST(SensorAppTests, EdsWithoutLateLatch) {
+  int surface_width = 0, surface_height = 0;
+  DvrGraphicsContext* context = CreateContext(&surface_width, &surface_height);
+  ASSERT_NE(nullptr, context);
+  DvrPose* client = dvrPoseCreate();
+  ASSERT_NE(nullptr, client);
+
+  for (int i = 0; i < 5; ++i) {
+    DvrFrameSchedule schedule;
+    dvrGraphicsWaitNextFrame(context, 0, &schedule);
+    DvrPoseAsync pose;
+    int ret = dvrPoseGet(client, schedule.vsync_count, &pose);
+    ASSERT_EQ(0, ret);
+
+    dvrBeginRenderFrameEds(context, pose.orientation, pose.translation);
+    for (int eye = 0; eye < 2; ++eye) {
+      if (eye == 0)
+        glViewport(0, 0, surface_width / 2, surface_height);
+      else
+        glViewport(surface_width / 2, 0, surface_width / 2, surface_height);
+
+      glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
+      EXPECT_EQ(0, ret);
+    }
+    dvrPresent(context);
+  }
+
+  dvrPoseDestroy(client);
+  dvrGraphicsContextDestroy(context);
+}