Add DaydreamVR native libraries and services

Upstreaming the main VR system components from master-dreamos-dev
into goog/master.

Bug: None
Test: `m -j32` succeeds. Sailfish boots and basic_vr sample app works
Change-Id: I853015872afc443aecee10411ef2d6b79184d051
diff --git a/libs/vr/libdisplay/include/CPPLINT.cfg b/libs/vr/libdisplay/include/CPPLINT.cfg
new file mode 100644
index 0000000..2f8a3c0
--- /dev/null
+++ b/libs/vr/libdisplay/include/CPPLINT.cfg
@@ -0,0 +1 @@
+filter=-build/header_guard
diff --git a/libs/vr/libdisplay/include/dvr/graphics.h b/libs/vr/libdisplay/include/dvr/graphics.h
new file mode 100644
index 0000000..50d2754
--- /dev/null
+++ b/libs/vr/libdisplay/include/dvr/graphics.h
@@ -0,0 +1,475 @@
+#ifndef DVR_GRAPHICS_H_
+#define DVR_GRAPHICS_H_
+
+#include <EGL/egl.h>
+#include <sys/cdefs.h>
+
+#ifdef __ARM_NEON
+#include <arm_neon.h>
+#else
+#ifndef __FLOAT32X4T_86
+#define __FLOAT32X4T_86
+typedef float float32x4_t __attribute__ ((__vector_size__ (16)));
+typedef struct float32x4x4_t { float32x4_t val[4]; };
+#endif
+#endif
+
+#ifndef VK_USE_PLATFORM_ANDROID_KHR
+#define VK_USE_PLATFORM_ANDROID_KHR 1
+#endif
+#include <vulkan/vulkan.h>
+
+__BEGIN_DECLS
+
+// Create a stereo surface that will be lens-warped by the system.
+EGLNativeWindowType dvrCreateWarpedDisplaySurface(int* display_width,
+                                                  int* display_height);
+EGLNativeWindowType dvrCreateDisplaySurface(void);
+
+// Display surface parameters used to specify display surface options.
+enum {
+  DVR_SURFACE_PARAMETER_NONE = 0,
+  // WIDTH
+  DVR_SURFACE_PARAMETER_WIDTH_IN,
+  // HEIGHT
+  DVR_SURFACE_PARAMETER_HEIGHT_IN,
+  // DISABLE_DISTORTION
+  DVR_SURFACE_PARAMETER_DISABLE_DISTORTION_IN,
+  // DISABLE_STABILIZATION
+  DVR_SURFACE_PARAMETER_DISABLE_STABILIZATION_IN,
+  // Disable chromatic aberration correction
+  DVR_SURFACE_PARAMETER_DISABLE_CAC_IN,
+  // ENABLE_LATE_LATCH: Enable late latching of pose data for application
+  // GPU shaders.
+  DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN,
+  // VISIBLE
+  DVR_SURFACE_PARAMETER_VISIBLE_IN,
+  // Z_ORDER
+  DVR_SURFACE_PARAMETER_Z_ORDER_IN,
+  // EXCLUDE_FROM_BLUR
+  DVR_SURFACE_PARAMETER_EXCLUDE_FROM_BLUR_IN,
+  // BLUR_BEHIND
+  DVR_SURFACE_PARAMETER_BLUR_BEHIND_IN,
+  // DISPLAY_WIDTH
+  DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT,
+  // DISPLAY_HEIGHT
+  DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT,
+  // SURFACE_WIDTH: Returns width of allocated surface buffer.
+  DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT,
+  // SURFACE_HEIGHT: Returns height of allocated surface buffer.
+  DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT,
+  // INTER_LENS_METERS: Returns float value in meters, the distance between
+  // lenses.
+  DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT,
+  // LEFT_FOV_LRBT: Return storage must have room for array of 4 floats (in
+  // radians). The layout is left, right, bottom, top as indicated by LRBT.
+  DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT,
+  // RIGHT_FOV_LRBT: Return storage must have room for array of 4 floats (in
+  // radians). The layout is left, right, bottom, top as indicated by LRBT.
+  DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT,
+  // VSYNC_PERIOD: Returns the period of the display refresh (in
+  // nanoseconds per refresh), as a 64-bit unsigned integer.
+  DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT,
+  // SURFACE_TEXTURE_TARGET_TYPE: Returns the type of texture used as the render
+  // target.
+  DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT,
+  // SURFACE_TEXTURE_TARGET_ID: Returns the texture ID used as the render
+  // target.
+  DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT,
+  // Whether the surface needs to be flipped vertically before display. Default
+  // is 0.
+  DVR_SURFACE_PARAMETER_VERTICAL_FLIP_IN,
+  // A bool indicating whether or not to create a GL context for the surface.
+  // 0: don't create a context
+  // Non-zero: create a context.
+  // Default is 1.
+  // If this value is 0, there must be a GLES 3.2 or greater context bound on
+  // the current thread at the time dvrGraphicsContextCreate is called.
+  DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN,
+  // Specify one of DVR_SURFACE_GEOMETRY_*.
+  DVR_SURFACE_PARAMETER_GEOMETRY_IN,
+  // FORMAT: One of DVR_SURFACE_FORMAT_RGBA_8888 or DVR_SURFACE_FORMAT_RGB_565.
+  // Default is DVR_SURFACE_FORMAT_RGBA_8888.
+  DVR_SURFACE_PARAMETER_FORMAT_IN,
+  // GRAPHICS_API: One of DVR_SURFACE_GRAPHICS_API_GLES or
+  // DVR_SURFACE_GRAPHICS_API_VULKAN. Default is GLES.
+  DVR_SURFACE_PARAMETER_GRAPHICS_API_IN,
+  // VK_INSTANCE: In Vulkan mode, the application creates a VkInstance and
+  // passes it in.
+  DVR_SURFACE_PARAMETER_VK_INSTANCE_IN,
+  // VK_PHYSICAL_DEVICE: In Vulkan mode, the application passes in the
+  // PhysicalDevice handle corresponding to the logical device passed to
+  // VK_DEVICE.
+  DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN,
+  // VK_DEVICE: In Vulkan mode, the application creates a VkDevice and
+  // passes it in.
+  DVR_SURFACE_PARAMETER_VK_DEVICE_IN,
+  // VK_PRESENT_QUEUE: In Vulkan mode, the application selects a
+  // presentation-compatible VkQueue and passes it in.
+  DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN,
+  // VK_PRESENT_QUEUE_FAMILY: In Vulkan mode, the application passes in the
+  // index of the queue family containing the VkQueue passed to
+  // VK_PRESENT_QUEUE.
+  DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN,
+  // VK_SWAPCHAIN_IMAGE_COUNT: In Vulkan mode, the number of swapchain images
+  // will be returned here.
+  DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT,
+  // VK_SWAPCHAIN_IMAGE_FORMAT: In Vulkan mode, the VkFormat of the swapchain
+  // images will be returned here.
+  DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT,
+};
+
+enum {
+  // Default surface type. One wide buffer with the left eye view in the left
+  // half and the right eye view in the right half.
+  DVR_SURFACE_GEOMETRY_SINGLE,
+  // Separate buffers, one per eye. The width parameters still refer to the
+  // total width (2 * eye view width).
+  DVR_SURFACE_GEOMETRY_SEPARATE_2,
+};
+
+// Surface format. Gvr only supports RGBA_8888 and RGB_565 for now, so those are
+// the only formats we provide here.
+enum {
+  DVR_SURFACE_FORMAT_RGBA_8888,
+  DVR_SURFACE_FORMAT_RGB_565,
+};
+
+enum {
+  // Graphics contexts are created for OpenGL ES client applications by default.
+  DVR_GRAPHICS_API_GLES,
+  // Create the graphics context for Vulkan client applications.
+  DVR_GRAPHICS_API_VULKAN,
+};
+
+#define DVR_SURFACE_PARAMETER_IN(name, value) \
+  { DVR_SURFACE_PARAMETER_##name##_IN, (value), NULL }
+#define DVR_SURFACE_PARAMETER_OUT(name, value) \
+  { DVR_SURFACE_PARAMETER_##name##_OUT, 0, (value) }
+#define DVR_SURFACE_PARAMETER_LIST_END \
+  { DVR_SURFACE_PARAMETER_NONE, 0, NULL }
+
+struct DvrSurfaceParameter {
+  int32_t key;
+  int64_t value;
+  void* value_out;
+};
+
+// This is a convenience struct to hold the relevant information of the HMD
+// lenses.
+struct DvrLensInfo {
+  float inter_lens_meters;
+  float left_fov[4];
+  float right_fov[4];
+};
+
+// Creates a display surface with the given parameters. The list of parameters
+// is terminated with an entry where key == DVR_SURFACE_PARAMETER_NONE.
+// For example, the parameters array could be built as follows:
+//   int display_width = 0, display_height = 0;
+//   int surface_width = 0, surface_height = 0;
+//   float inter_lens_meters = 0.0f;
+//   float left_fov[4] = {0.0f};
+//   float right_fov[4] = {0.0f};
+//   int disable_warp = 0;
+//   DvrSurfaceParameter surface_params[] = {
+//       DVR_SURFACE_PARAMETER_IN(DISABLE_DISTORTION, disable_warp),
+//       DVR_SURFACE_PARAMETER_OUT(DISPLAY_WIDTH, &display_width),
+//       DVR_SURFACE_PARAMETER_OUT(DISPLAY_HEIGHT, &display_height),
+//       DVR_SURFACE_PARAMETER_OUT(SURFACE_WIDTH, &surface_width),
+//       DVR_SURFACE_PARAMETER_OUT(SURFACE_HEIGHT, &surface_height),
+//       DVR_SURFACE_PARAMETER_OUT(INTER_LENS_METERS, &inter_lens_meters),
+//       DVR_SURFACE_PARAMETER_OUT(LEFT_FOV_LRBT, left_fov),
+//       DVR_SURFACE_PARAMETER_OUT(RIGHT_FOV_LRBT, right_fov),
+//       DVR_SURFACE_PARAMETER_LIST_END,
+//   };
+EGLNativeWindowType dvrCreateDisplaySurfaceExtended(
+    struct DvrSurfaceParameter* parameters);
+
+int dvrGetNativeDisplayDimensions(int* native_width, int* native_height);
+
+int dvrGetDisplaySurfaceInfo(EGLNativeWindowType win, int* width, int* height,
+                             int* format);
+
+// NOTE: Only call the functions below on windows created with the API above.
+
+// Sets the display surface visible based on the boolean evaluation of
+// |visible|.
+void dvrDisplaySurfaceSetVisible(EGLNativeWindowType window, int visible);
+
+// Sets the application z-order of the display surface. Higher values display on
+// top of lower values.
+void dvrDisplaySurfaceSetZOrder(EGLNativeWindowType window, int z_order);
+
+// Post the next buffer early. This allows the application to race with either
+// the async EDS process or the scanline for applications that are not using
+// system distortion. When this is called, the next buffer in the queue is
+// posted for display. It is up to the application to kick its GPU rendering
+// work in time. If the rendering is incomplete there will be significant,
+// undesirable tearing artifacts.
+// It is not recommended to use this feature with system distortion.
+void dvrDisplayPostEarly(EGLNativeWindowType window);
+
+// Opaque struct that represents a graphics context, the texture swap chain,
+// and surfaces.
+typedef struct DvrGraphicsContext DvrGraphicsContext;
+
+// Create the graphics context.
+int dvrGraphicsContextCreate(struct DvrSurfaceParameter* parameters,
+                             DvrGraphicsContext** return_graphics_context);
+
+// Destroy the graphics context.
+void dvrGraphicsContextDestroy(DvrGraphicsContext* graphics_context);
+
+// For every frame a schedule is decided by the system compositor. A sample
+// schedule for two frames is shown below.
+//
+// |                        |                        |
+// |-----------------|------|-----------------|------|
+// |                        |                        |
+// V0                A1     V1                A2     V2
+//
+// V0, V1, and V2 are display vsync events. Vsync events are uniquely identified
+// throughout the DVR system by a vsync count maintained by the system
+// compositor.
+//
+// A1 and A2 indicate when the application should finish rendering its frame,
+// including all GPU work. Under normal circumstances the scheduled finish
+// finish time will be set a few milliseconds before the vsync time, to give the
+// compositor time to perform distortion and EDS on the app's buffer. For apps
+// that don't use system distortion the scheduled frame finish time will be
+// closer to the vsync time. Other factors can also effect the scheduled frame
+// finish time, e.g. whether or not the System UI is being displayed.
+typedef struct DvrFrameSchedule {
+  // vsync_count is used as a frame identifier.
+  uint32_t vsync_count;
+
+  // The time when the app should finish rendering its frame, including all GPU
+  // work.
+  int64_t scheduled_frame_finish_ns;
+} DvrFrameSchedule;
+
+// Sleep until it's time to render the next frame. This should be the first
+// function called as part of an app's render loop, which normally looks like
+// this:
+//
+// while (1) {
+//   DvrFrameSchedule schedule;
+//   dvrGraphicsWaitNextFrame(..., &schedule); // Sleep until it's time to
+//                                             // render the next frame
+//   pose = dvrPoseGet(schedule.vsync_count);
+//   dvrBeginRenderFrame(...);
+//   <render a frame using the pose>
+//   dvrPresent(...); // Post the buffer
+// }
+//
+// |start_delay_ns| adjusts how long this function blocks the app from starting
+// its next frame. If |start_delay_ns| is 0, the function waits until the
+// scheduled frame finish time for the current frame, which gives the app one
+// full vsync period to render the next frame. If the app needs less than a full
+// vysnc period to render the frame, pass in a non-zero |start_delay_ns| to
+// delay the start of frame rendering further. For example, if the vsync period
+// is 11.1ms and the app takes 6ms to render a frame, consider setting this to
+// 5ms (note that the value is in nanoseconds, so 5,000,000ns) so that the app
+// finishes the frame closer to the scheduled frame finish time. Delaying the
+// start of rendering allows the app to use a more up-to-date pose for
+// rendering.
+// |start_delay_ns| must be a positive value or 0. If you're unsure what to set
+// for |start_delay_ns|, use 0.
+//
+// |out_next_frame_schedule| is an output parameter that will contain the
+// schedule for the next frame. It can be null. This function returns a negative
+// error code on failure.
+int dvrGraphicsWaitNextFrame(DvrGraphicsContext* graphics_context,
+                             int64_t start_delay_ns,
+                             DvrFrameSchedule* out_next_frame_schedule);
+
+// Prepares the graphics context's texture for rendering.  This function should
+// be called once for each frame, ideally immediately before the first GL call
+// on the framebuffer which wraps the surface texture.
+//
+// For GL contexts, GL states are modified as follows by this function:
+// glBindTexture(GL_TEXTURE_2D, 0);
+//
+// @param[in] graphics_context The DvrGraphicsContext.
+// @param[in] render_pose_orientation Head pose orientation that rendering for
+//            this frame will be based off of. This must be an unmodified value
+//            from DvrPoseAsync, returned by dvrPoseGet.
+// @param[in] render_pose_translation Head pose translation that rendering for
+//            this frame will be based off of. This must be an unmodified value
+//            from DvrPoseAsync, returned by dvrPoseGet.
+// @return 0 on success or a negative error code on failure.
+// Check GL errors with glGetError for other error conditions.
+int dvrBeginRenderFrameEds(DvrGraphicsContext* graphics_context,
+                           float32x4_t render_pose_orientation,
+                           float32x4_t render_pose_translation);
+int dvrBeginRenderFrameEdsVk(DvrGraphicsContext* graphics_context,
+                             float32x4_t render_pose_orientation,
+                             float32x4_t render_pose_translation,
+                             VkSemaphore acquire_semaphore,
+                             VkFence acquire_fence,
+                             uint32_t* swapchain_image_index,
+                             VkImageView* swapchain_image_view);
+// Same as dvrBeginRenderFrameEds, but with no EDS (asynchronous reprojection).
+//
+// For GL contexts, GL states are modified as follows by this function:
+// glBindTexture(GL_TEXTURE_2D, 0);
+//
+// @param[in] graphics_context The DvrGraphicsContext.
+// @return 0 on success or a negative error code on failure.
+// Check GL errors with glGetError for other error conditions.
+int dvrBeginRenderFrame(DvrGraphicsContext* graphics_context);
+int dvrBeginRenderFrameVk(DvrGraphicsContext* graphics_context,
+                          VkSemaphore acquire_semaphore, VkFence acquire_fence,
+                          uint32_t* swapchain_image_index,
+                          VkImageView* swapchain_image_view);
+
+// Maximum number of views per surface buffer (for multiview, multi-eye, etc).
+#define DVR_GRAPHICS_SURFACE_MAX_VIEWS 4
+
+// Output data format of late latch shader. The application can bind all or part
+// of this data with the buffer ID returned by dvrBeginRenderFrameLateLatch.
+// This struct is compatible with std140 layout for use from shaders.
+struct __attribute__((__packed__)) DvrGraphicsLateLatchData {
+  // Column-major order.
+  float view_proj_matrix[DVR_GRAPHICS_SURFACE_MAX_VIEWS][16];
+  // Column-major order.
+  float view_matrix[DVR_GRAPHICS_SURFACE_MAX_VIEWS][16];
+  // Quaternion for pose orientation from start space.
+  float pose_orientation[4];
+  // Pose translation from start space.
+  float pose_translation[4];
+};
+
+// Begin render frame with late latching of pose data. This kicks off a compute
+// shader that will read the latest head pose and then compute and output
+// matrices that can be used by application shaders.
+//
+// Matrices are computed with the following pseudo code.
+//   Pose pose = getLateLatchPose();
+//   out.pose_orientation = pose.orientation;
+//   out.pose_translation = pose.translation;
+//   mat4 head_from_center = ComputeInverseMatrix(pose);
+//   for each view:
+//     out.viewMatrix[view] =
+//         eye_from_head_matrices[view] * head_from_center *
+//         pose_offset_matrices[view];
+//     out.viewProjMatrix[view] =
+//         projection_matrices[view] * out.viewMatrix[view];
+//
+// For GL contexts, GL states are modified as follows by this function:
+// glBindTexture(GL_TEXTURE_2D, 0);
+// glBindBufferBase(GL_SHADER_STORAGE_BUFFER, 0, 0);
+// glBindBufferBase(GL_SHADER_STORAGE_BUFFER, 1, 0);
+// glBindBufferBase(GL_SHADER_STORAGE_BUFFER, 2, 0);
+// glBindBufferBase(GL_SHADER_STORAGE_BUFFER, 3, 0);
+// glUseProgram(0);
+//
+// @param[in] graphics_context The DvrGraphicsContext.
+// @param[in] flags Specify 0.
+// @param[in] target_vsync_count The target vsync count that this frame will
+//            display at. This is used for pose prediction.
+// @param[in] num_views Number of matrices in each of the following matrix array
+//            parameters. Typically 2 for left and right eye views. Maximum is
+//            DVR_GRAPHICS_SURFACE_MAX_VIEWS.
+// @param[in] projection_matrices Array of pointers to |num_views| matrices with
+//            column-major layout. These are the application projection
+//            matrices.
+// @param[in] eye_from_head_matrices Array of pointers to |num_views| matrices
+//            with column-major layout. See pseudo code for how these are used.
+// @param[in] pose_offset_matrices Array of pointers to |num_views| matrices
+//            with column-major layout. See pseudo code for how these are used.
+// @param[out] out_late_latch_buffer_id The GL buffer ID of the output buffer of
+//             of type DvrGraphicsLateLatchData.
+// @return 0 on success or a negative error code on failure.
+// Check GL errors with glGetError for other error conditions.
+int dvrBeginRenderFrameLateLatch(DvrGraphicsContext* graphics_context,
+                                 uint32_t flags, uint32_t target_vsync_count,
+                                 int num_views,
+                                 const float** projection_matrices,
+                                 const float** eye_from_head_matrices,
+                                 const float** pose_offset_matrices,
+                                 uint32_t* out_late_latch_buffer_id);
+
+// Present a frame for display.
+// This call is normally non-blocking, unless the internal buffer queue is full.
+// @return 0 on success or a negative error code on failure.
+int dvrPresent(DvrGraphicsContext* graphics_context);
+int dvrPresentVk(DvrGraphicsContext* graphics_context,
+                 VkSemaphore submit_semaphore, uint32_t swapchain_image_index);
+
+// Post the next buffer early. This allows the application to race with either
+// the async EDS process or the scanline for applications that are not using
+// system distortion. When this is called, the next buffer in the queue is
+// posted for display. It is up to the application to kick its GPU rendering
+// work in time. If the rendering is incomplete there will be significant,
+// undesirable tearing artifacts.
+// It is not recommended to use this feature with system distortion.
+void dvrGraphicsPostEarly(DvrGraphicsContext* graphics_context);
+
+// Used to retrieve frame measurement timings from dvrGetFrameScheduleResults().
+typedef struct DvrFrameScheduleResult {
+  // vsync_count is used as a frame identifier.
+  uint32_t vsync_count;
+
+  // The app's scheduled frame finish time.
+  int64_t scheduled_frame_finish_ns;
+
+  // The difference (in nanoseconds) between the scheduled finish time and the
+  // actual finish time.
+  //
+  // A value of +2ms for frame_finish_offset_ns indicates the app's frame was
+  // late and may have been skipped by the compositor for that vsync. A value of
+  // -1ms indicates the app's frame finished just ahead of schedule, as
+  // desired. A value of -6ms indicates the app's frame finished well ahead of
+  // schedule for that vsync. In that case the app may have unnecessary visual
+  // latency. Consider using the start_delay_ns parameter in
+  // dvrGraphicsWaitNextFrame() to align the app's frame finish time closer to
+  // the scheduled finish time.
+  int64_t frame_finish_offset_ns;
+} DvrFrameScheduleResult;
+
+// Retrieve the latest frame schedule results for the app. To collect all the
+// results this should be called each frame. The results for each frame are
+// returned only once.
+// The number of results written to |results| is returned on success, or a
+// negative error code on failure.
+// |graphics_context| is the context to retrieve frame schedule results for.
+// |results| is an array that will contain the frame schedule results.
+// |result_count| is the size of the |results| array. It's recommended to pass
+// in an array with 2 elements to ensure results for all frames are collected.
+int dvrGetFrameScheduleResults(DvrGraphicsContext* graphics_context,
+                               DvrFrameScheduleResult* results,
+                               int result_count);
+
+// Make the surface visible or hidden based on |visible|.
+// 0: hidden, Non-zero: visible.
+void dvrGraphicsSurfaceSetVisible(DvrGraphicsContext* graphics_context,
+                                  int visible);
+
+// Returns surface visilibity last requested by the client.
+int dvrGraphicsSurfaceGetVisible(DvrGraphicsContext* graphics_context);
+
+// Returns surface z order last requested by the client.
+int dvrGraphicsSurfaceGetZOrder(DvrGraphicsContext* graphics_context);
+
+// Sets the compositor z-order of the surface. Higher values display on
+// top of lower values.
+void dvrGraphicsSurfaceSetZOrder(DvrGraphicsContext* graphics_context,
+                                 int z_order);
+
+typedef struct DvrVideoMeshSurface DvrVideoMeshSurface;
+
+DvrVideoMeshSurface* dvrGraphicsVideoMeshSurfaceCreate(
+    DvrGraphicsContext* graphics_context);
+void dvrGraphicsVideoMeshSurfaceDestroy(DvrVideoMeshSurface* surface);
+
+// Present a VideoMeshSurface with the current video mesh transfromation matrix.
+void dvrGraphicsVideoMeshSurfacePresent(DvrGraphicsContext* graphics_context,
+                                        DvrVideoMeshSurface* surface,
+                                        const int eye,
+                                        const float* transform);
+
+__END_DECLS
+
+#endif  // DVR_GRAPHICS_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/display_client.h b/libs/vr/libdisplay/include/private/dvr/display_client.h
new file mode 100644
index 0000000..034b7b4
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/display_client.h
@@ -0,0 +1,126 @@
+#ifndef ANDROID_DVR_DISPLAY_CLIENT_H_
+#define ANDROID_DVR_DISPLAY_CLIENT_H_
+
+#include <hardware/hwcomposer.h>
+#include <pdx/client.h>
+#include <pdx/file_handle.h>
+#include <private/dvr/buffer_hub_client.h>
+#include <private/dvr/display_rpc.h>
+
+namespace android {
+namespace dvr {
+
+struct LateLatchOutput;
+
+// Abstract base class for all surface types maintained in DVR's display
+// service.
+// TODO(jwcai) Explain more, surface is a channel...
+class SurfaceClient : public pdx::Client {
+ public:
+  using LocalChannelHandle = pdx::LocalChannelHandle;
+  SurfaceType type() const { return type_; }
+
+  // Get the shared memory metadata buffer fd for this display surface. If it is
+  // not yet allocated, this will allocate it.
+  int GetMetadataBufferFd(pdx::LocalHandle* out_fd);
+
+  // Allocate the single metadata buffer for providing metadata associated with
+  // posted buffers for this surface. This can be used to provide rendered poses
+  // for EDS, for example. The buffer format is defined by the struct
+  // DisplaySurfaceMetadata.
+  // The first call to this method will allocate the buffer in via IPC to the
+  // display surface.
+  std::shared_ptr<BufferProducer> GetMetadataBuffer();
+
+ protected:
+  SurfaceClient(LocalChannelHandle channel_handle, SurfaceType type);
+  SurfaceClient(const std::string& endpoint_path, SurfaceType type);
+
+ private:
+  SurfaceType type_;
+  std::shared_ptr<BufferProducer> metadata_buffer_;
+};
+
+// DisplaySurfaceClient represents the client interface to a displayd display
+// surface.
+class DisplaySurfaceClient
+    : public pdx::ClientBase<DisplaySurfaceClient, SurfaceClient> {
+ public:
+  using LocalHandle = pdx::LocalHandle;
+
+  int width() const { return width_; }
+  int height() const { return height_; }
+  int format() const { return format_; }
+  int usage() const { return usage_; }
+  int flags() const { return flags_; }
+  int z_order() const { return z_order_; }
+  bool visible() const { return visible_; }
+
+  void SetVisible(bool visible);
+  void SetZOrder(int z_order);
+  void SetExcludeFromBlur(bool exclude_from_blur);
+  void SetBlurBehind(bool blur_behind);
+  void SetAttributes(const DisplaySurfaceAttributes& attributes);
+
+  // |out_buffer_index| will receive a unique index for this buffer within the
+  // surface. The first buffer gets 0, second gets 1, and so on. This index
+  // can be used to deliver metadata for buffers that are queued for display.
+  std::shared_ptr<BufferProducer> AllocateBuffer(uint32_t* out_buffer_index);
+  std::shared_ptr<BufferProducer> AllocateBuffer() {
+    return AllocateBuffer(nullptr);
+  }
+
+  // Get the shared memory metadata buffer for this display surface. If it is
+  // not yet allocated, this will allocate it.
+  volatile DisplaySurfaceMetadata* GetMetadataBufferPtr();
+
+  // Create a VideoMeshSurface that is attached to the display sruface.
+  LocalChannelHandle CreateVideoMeshSurface();
+
+ private:
+  friend BASE;
+
+  DisplaySurfaceClient(int width, int height, int format, int usage, int flags);
+
+  int width_;
+  int height_;
+  int format_;
+  int usage_;
+  int flags_;
+  int z_order_;
+  bool visible_;
+  bool exclude_from_blur_;
+  bool blur_behind_;
+  DisplaySurfaceMetadata* mapped_metadata_buffer_;
+
+  DisplaySurfaceClient(const DisplaySurfaceClient&) = delete;
+  void operator=(const DisplaySurfaceClient&) = delete;
+};
+
+class DisplayClient : public pdx::ClientBase<DisplayClient> {
+ public:
+  int GetDisplayMetrics(SystemDisplayMetrics* metrics);
+  pdx::Status<void> SetViewerParams(const ViewerParams& viewer_params);
+
+  // Pull the latest eds pose data from the display service renderer
+  int GetLastFrameEdsTransform(LateLatchOutput* ll_out);
+
+  int EnterVrMode();
+  int ExitVrMode();
+
+  std::unique_ptr<DisplaySurfaceClient> CreateDisplaySurface(
+      int width, int height, int format, int usage, int flags);
+
+ private:
+  friend BASE;
+
+  explicit DisplayClient(int* error = nullptr);
+
+  DisplayClient(const DisplayClient&) = delete;
+  void operator=(const DisplayClient&) = delete;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_DISPLAY_CLIENT_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/display_manager_client.h b/libs/vr/libdisplay/include/private/dvr/display_manager_client.h
new file mode 100644
index 0000000..f28c1e4
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/display_manager_client.h
@@ -0,0 +1,73 @@
+#ifndef DVR_DISPLAY_MANAGER_CLIENT_H_
+#define DVR_DISPLAY_MANAGER_CLIENT_H_
+
+#include <stdbool.h>
+#include <stddef.h>
+#include <stdint.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct DvrDisplayManagerClient DvrDisplayManagerClient;
+typedef struct DvrDisplayManagerClientSurfaceList
+    DvrDisplayManagerClientSurfaceList;
+typedef struct DvrDisplayManagerClientSurfaceBuffers
+    DvrDisplayManagerClientSurfaceBuffers;
+
+DvrDisplayManagerClient* dvrDisplayManagerClientCreate();
+
+void dvrDisplayManagerClientDestroy(DvrDisplayManagerClient* client);
+
+// If successful, populates |surface_list| with a list of application
+// surfaces the display is currently using.
+//
+// @return 0 on success. Otherwise it returns a negative error value.
+int dvrDisplayManagerClientGetSurfaceList(
+    DvrDisplayManagerClient* client,
+    DvrDisplayManagerClientSurfaceList** surface_list);
+
+void dvrDisplayManagerClientSurfaceListDestroy(
+    DvrDisplayManagerClientSurfaceList* surface_list);
+
+// @return Returns the number of surfaces in the list.
+size_t dvrDisplayManagerClientSurfaceListGetSize(
+    DvrDisplayManagerClientSurfaceList* surface_list);
+
+// @return Return a unique identifier for a client surface. The identifier can
+// be used to query for other surface properties.
+int dvrDisplayManagerClientSurfaceListGetSurfaceId(
+    DvrDisplayManagerClientSurfaceList* surface_list, size_t index);
+
+// @return Returns the stacking order of the client surface at |index|.
+int dvrDisplayManagerClientSurfaceListGetClientZOrder(
+    DvrDisplayManagerClientSurfaceList* surface_list, size_t index);
+
+// @return Returns true if the client surface is visible, false otherwise.
+bool dvrDisplayManagerClientSurfaceListGetClientIsVisible(
+    DvrDisplayManagerClientSurfaceList* surface_list, size_t index);
+
+// Populates |surface_buffers| with the list of buffers for |surface_id|.
+// |surface_id| should be a valid ID from the list of surfaces.
+//
+// @return Returns 0 on success. Otherwise it returns a negative error value.
+int dvrDisplayManagerClientGetSurfaceBuffers(
+    DvrDisplayManagerClient* client, int surface_id,
+    DvrDisplayManagerClientSurfaceBuffers** surface_buffers);
+
+void dvrDisplayManagerClientSurfaceBuffersDestroy(
+    DvrDisplayManagerClientSurfaceBuffers* surface_buffers);
+
+// @return Returns the number of buffers.
+size_t dvrDisplayManagerClientSurfaceBuffersGetSize(
+    DvrDisplayManagerClientSurfaceBuffers* surface_buffers);
+
+// @return Returns the file descriptor for the buffer consumer at |index|.
+int dvrDisplayManagerClientSurfaceBuffersGetFd(
+    DvrDisplayManagerClientSurfaceBuffers* surface_buffers, size_t index);
+
+#ifdef __cplusplus
+}  // extern "C"
+#endif
+
+#endif  // DVR_DISPLAY_MANAGER_CLIENT_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/display_manager_client_impl.h b/libs/vr/libdisplay/include/private/dvr/display_manager_client_impl.h
new file mode 100644
index 0000000..645ccce
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/display_manager_client_impl.h
@@ -0,0 +1,35 @@
+#ifndef ANDROID_DVR_DISPLAY_MANAGER_CLIENT_IMPL_H_
+#define ANDROID_DVR_DISPLAY_MANAGER_CLIENT_IMPL_H_
+
+#include <vector>
+
+#include <pdx/client.h>
+#include <private/dvr/display_rpc.h>
+
+namespace android {
+namespace dvr {
+
+class BufferConsumer;
+
+class DisplayManagerClient : public pdx::ClientBase<DisplayManagerClient> {
+ public:
+  ~DisplayManagerClient() override;
+
+  int GetSurfaceList(std::vector<DisplaySurfaceInfo>* surface_list);
+
+  int GetSurfaceBuffers(
+      int surface_id, std::vector<std::unique_ptr<BufferConsumer>>* consumers);
+
+ private:
+  friend BASE;
+
+  DisplayManagerClient();
+
+  DisplayManagerClient(const DisplayManagerClient&) = delete;
+  void operator=(const DisplayManagerClient&) = delete;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_DISPLAY_MANAGER_CLIENT_IMPL_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/display_rpc.h b/libs/vr/libdisplay/include/private/dvr/display_rpc.h
new file mode 100644
index 0000000..6150b35
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/display_rpc.h
@@ -0,0 +1,342 @@
+#ifndef ANDROID_DVR_DISPLAY_RPC_H_
+#define ANDROID_DVR_DISPLAY_RPC_H_
+
+#include <sys/types.h>
+
+#include <array>
+#include <map>
+
+#include <pdx/rpc/remote_method.h>
+#include <pdx/rpc/serializable.h>
+#include <pdx/rpc/variant.h>
+#include <private/dvr/display_types.h>
+
+namespace android {
+namespace dvr {
+
+struct SystemDisplayMetrics {
+  uint32_t display_native_width;
+  uint32_t display_native_height;
+  uint32_t display_x_dpi;
+  uint32_t display_y_dpi;
+  uint32_t distorted_width;
+  uint32_t distorted_height;
+  uint32_t vsync_period_ns;
+  uint32_t hmd_ipd_mm;
+  float inter_lens_distance_m;
+  std::array<float, 4> left_fov_lrbt;
+  std::array<float, 4> right_fov_lrbt;
+
+ private:
+  PDX_SERIALIZABLE_MEMBERS(SystemDisplayMetrics, display_native_width,
+                           display_native_height, display_x_dpi, display_y_dpi,
+                           distorted_width, distorted_height, vsync_period_ns,
+                           hmd_ipd_mm, inter_lens_distance_m, left_fov_lrbt,
+                           right_fov_lrbt);
+};
+
+using SurfaceType = uint32_t;
+struct SurfaceTypeEnum {
+  enum : SurfaceType {
+    Normal = DVR_SURFACE_TYPE_NORMAL,
+    VideoMesh = DVR_SURFACE_TYPE_VIDEO_MESH,
+    Overlay = DVR_SURFACE_TYPE_OVERLAY,
+  };
+};
+
+using DisplaySurfaceFlags = uint32_t;
+enum class DisplaySurfaceFlagsEnum : DisplaySurfaceFlags {
+  DisableSystemEds = DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_EDS,
+  DisableSystemDistortion = DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_DISTORTION,
+  VerticalFlip = DVR_DISPLAY_SURFACE_FLAGS_VERTICAL_FLIP,
+  SeparateGeometry = DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2,
+  DisableSystemCac = DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_CAC,
+};
+
+using DisplaySurfaceInfoFlags = uint32_t;
+enum class DisplaySurfaceInfoFlagsEnum : DisplaySurfaceInfoFlags {
+  BuffersChanged = DVR_DISPLAY_SURFACE_ITEM_FLAGS_BUFFERS_CHANGED,
+};
+
+using DisplaySurfaceAttributeValue =
+    pdx::rpc::Variant<int32_t, int64_t, bool, float, std::array<float, 2>,
+                      std::array<float, 3>, std::array<float, 4>,
+                      std::array<float, 16>>;
+using DisplaySurfaceAttribute = uint32_t;
+struct DisplaySurfaceAttributeEnum {
+  enum : DisplaySurfaceAttribute {
+    ZOrder = DVR_DISPLAY_SURFACE_ATTRIBUTE_Z_ORDER,
+    Visible = DVR_DISPLAY_SURFACE_ATTRIBUTE_VISIBLE,
+    // Manager only.
+    Blur = DVR_DISPLAY_SURFACE_ATTRIBUTE_BLUR,
+    // Client only.
+    ExcludeFromBlur = DVR_DISPLAY_SURFACE_ATTRIBUTE_EXCLUDE_FROM_BLUR,
+    BlurBehind = DVR_DISPLAY_SURFACE_ATTRIBUTE_BLUR_BEHIND,
+  };
+
+  static std::string ToString(DisplaySurfaceAttribute attribute) {
+    switch (attribute) {
+      case ZOrder:
+        return "z-order";
+      case Visible:
+        return "visible";
+      case Blur:
+        return "blur";
+      case ExcludeFromBlur:
+        return "exclude-from-blur";
+      case BlurBehind:
+        return "blur-behind";
+      default:
+        return "unknown";
+    }
+  }
+};
+
+using DisplaySurfaceAttributes =
+    std::map<DisplaySurfaceAttribute, DisplaySurfaceAttributeValue>;
+
+struct DisplaySurfaceInfo {
+  int surface_id;
+  int process_id;
+  SurfaceType type;
+  DisplaySurfaceFlags flags;
+  DisplaySurfaceInfoFlags info_flags;
+  DisplaySurfaceAttributes client_attributes;
+  DisplaySurfaceAttributes manager_attributes;
+
+  // Convenience accessors.
+  bool IsClientVisible() const {
+    const auto* variant =
+        FindClientAttribute(DisplaySurfaceAttributeEnum::Visible);
+    bool bool_value;
+    if (variant && pdx::rpc::IfAnyOf<int32_t, int64_t, bool, float>::Get(
+                       variant, &bool_value))
+      return bool_value;
+    else
+      return false;
+  }
+
+  int ClientZOrder() const {
+    const auto* variant =
+        FindClientAttribute(DisplaySurfaceAttributeEnum::ZOrder);
+    int int_value;
+    if (variant &&
+        pdx::rpc::IfAnyOf<int32_t, int64_t, float>::Get(variant, &int_value))
+      return int_value;
+    else
+      return 0;
+  }
+
+ private:
+  const DisplaySurfaceAttributeValue* FindClientAttribute(
+      DisplaySurfaceAttribute key) const {
+    auto search = client_attributes.find(key);
+    return (search != client_attributes.end()) ? &search->second : nullptr;
+  }
+
+  PDX_SERIALIZABLE_MEMBERS(DisplaySurfaceInfo, surface_id, process_id, type,
+                           flags, info_flags, client_attributes,
+                           manager_attributes);
+};
+
+struct VideoMeshSurfaceBufferMetadata {
+  int64_t timestamp_ns;
+};
+
+struct AlignmentMarker {
+ public:
+  float horizontal;
+  float vertical;
+
+  PDX_SERIALIZABLE_MEMBERS(AlignmentMarker, horizontal, vertical);
+};
+
+struct DaydreamInternalParams {
+ public:
+  int32_t version;
+  std::vector<AlignmentMarker> alignment_markers;
+
+  PDX_SERIALIZABLE_MEMBERS(DaydreamInternalParams, version, alignment_markers);
+};
+
+struct ViewerParams {
+ public:
+  // TODO(hendrikw): Do we need viewer_vendor_name and viewer_model_name?
+  float screen_to_lens_distance;
+  float inter_lens_distance;
+  float screen_center_to_lens_distance;
+  std::vector<float> left_eye_field_of_view_angles;
+
+  enum VerticalAlignmentType : int32_t {
+    BOTTOM = 0,  // phone rests against a fixed bottom tray
+    CENTER = 1,  // phone screen assumed to be centered w.r.t. lenses
+    TOP = 2      // phone rests against a fixed top tray
+  };
+
+  enum EyeOrientation : int32_t {
+    kCCW0Degrees = 0,
+    kCCW90Degrees = 1,
+    kCCW180Degrees = 2,
+    kCCW270Degrees = 3,
+    kCCW0DegreesMirrored = 4,
+    kCCW90DegreesMirrored = 5,
+    kCCW180DegreesMirrored = 6,
+    kCCW270DegreesMirrored = 7
+  };
+
+  VerticalAlignmentType vertical_alignment;
+  std::vector<EyeOrientation> eye_orientations;
+
+  float tray_to_lens_distance;
+
+  std::vector<float> distortion_coefficients_r;
+  std::vector<float> distortion_coefficients_g;
+  std::vector<float> distortion_coefficients_b;
+
+  DaydreamInternalParams daydream_internal;
+
+  PDX_SERIALIZABLE_MEMBERS(ViewerParams, screen_to_lens_distance,
+                           inter_lens_distance, screen_center_to_lens_distance,
+                           left_eye_field_of_view_angles, vertical_alignment,
+                           eye_orientations, tray_to_lens_distance,
+                           distortion_coefficients_r, distortion_coefficients_g,
+                           distortion_coefficients_b, daydream_internal);
+};
+
+struct DisplayRPC {
+  // Service path.
+  static constexpr char kClientPath[] = "system/display/client";
+
+  // Op codes.
+  enum {
+    kOpGetMetrics = 0,
+    kOpGetEdsCapture,
+    kOpCreateSurface,
+    kOpAllocateBuffer,
+    kOpSetAttributes,
+    kOpGetMetadataBuffer,
+    kOpCreateVideoMeshSurface,
+    kOpVideoMeshSurfaceCreateProducerQueue,
+    kOpEnterVrMode,
+    kOpExitVrMode,
+    kOpSetViewerParams
+  };
+
+  // Aliases.
+  using ByteBuffer = pdx::rpc::BufferWrapper<std::vector<uint8_t>>;
+  using LocalChannelHandle = pdx::LocalChannelHandle;
+  using Void = pdx::rpc::Void;
+
+  // Methods.
+  PDX_REMOTE_METHOD(GetMetrics, kOpGetMetrics, SystemDisplayMetrics(Void));
+  PDX_REMOTE_METHOD(GetEdsCapture, kOpGetEdsCapture, ByteBuffer(Void));
+  PDX_REMOTE_METHOD(CreateSurface, kOpCreateSurface,
+                    int(int width, int height, int format, int usage,
+                        DisplaySurfaceFlags flags));
+  PDX_REMOTE_METHOD(AllocateBuffer, kOpAllocateBuffer,
+                    std::pair<std::uint32_t, LocalChannelHandle>(Void));
+  PDX_REMOTE_METHOD(SetAttributes, kOpSetAttributes,
+                    int(const DisplaySurfaceAttributes& attributes));
+  PDX_REMOTE_METHOD(GetMetadataBuffer, kOpGetMetadataBuffer,
+                    LocalChannelHandle(Void));
+  // VideoMeshSurface methods
+  PDX_REMOTE_METHOD(CreateVideoMeshSurface, kOpCreateVideoMeshSurface,
+                    LocalChannelHandle(Void));
+  PDX_REMOTE_METHOD(VideoMeshSurfaceCreateProducerQueue,
+                    kOpVideoMeshSurfaceCreateProducerQueue,
+                    LocalChannelHandle(Void));
+  PDX_REMOTE_METHOD(EnterVrMode, kOpEnterVrMode, int(Void));
+  PDX_REMOTE_METHOD(ExitVrMode, kOpExitVrMode, int(Void));
+  PDX_REMOTE_METHOD(SetViewerParams, kOpSetViewerParams,
+                    void(const ViewerParams& viewer_params));
+};
+
+struct DisplayManagerRPC {
+  // Service path.
+  static constexpr char kClientPath[] = "system/display/manager";
+
+  // Op codes.
+  enum {
+    kOpGetSurfaceList = 0,
+    kOpGetSurfaceBuffers,
+    kOpUpdateSurfaces,
+  };
+
+  // Aliases.
+  using LocalChannelHandle = pdx::LocalChannelHandle;
+  using Void = pdx::rpc::Void;
+
+  // Methods.
+  PDX_REMOTE_METHOD(GetSurfaceList, kOpGetSurfaceList,
+                    std::vector<DisplaySurfaceInfo>(Void));
+  PDX_REMOTE_METHOD(GetSurfaceBuffers, kOpGetSurfaceBuffers,
+                    std::vector<LocalChannelHandle>(int surface_id));
+  PDX_REMOTE_METHOD(
+      UpdateSurfaces, kOpUpdateSurfaces,
+      int(const std::map<int, DisplaySurfaceAttributes>& updates));
+};
+
+struct ScreenshotData {
+  int width;
+  int height;
+  std::vector<uint8_t> buffer;
+
+ private:
+  PDX_SERIALIZABLE_MEMBERS(ScreenshotData, width, height, buffer);
+};
+
+struct DisplayScreenshotRPC {
+  // Service path.
+  static constexpr char kClientPath[] = "system/display/screenshot";
+
+  // Op codes.
+  enum {
+    kOpGetFormat = 0,
+    kOpTakeScreenshot,
+  };
+
+  using Void = pdx::rpc::Void;
+
+  PDX_REMOTE_METHOD(GetFormat, kOpGetFormat, int(Void));
+  PDX_REMOTE_METHOD(TakeScreenshot, kOpTakeScreenshot,
+                    ScreenshotData(int layer_index));
+};
+
+struct VSyncSchedInfo {
+  int64_t vsync_period_ns;
+  int64_t timestamp_ns;
+  uint32_t next_vsync_count;
+
+ private:
+  PDX_SERIALIZABLE_MEMBERS(VSyncSchedInfo, vsync_period_ns, timestamp_ns,
+                           next_vsync_count);
+};
+
+struct DisplayVSyncRPC {
+  // Service path.
+  static constexpr char kClientPath[] = "system/display/vsync";
+
+  // Op codes.
+  enum {
+    kOpWait = 0,
+    kOpAck,
+    kOpGetLastTimestamp,
+    kOpGetSchedInfo,
+    kOpAcknowledge,
+  };
+
+  // Aliases.
+  using Void = pdx::rpc::Void;
+  using Timestamp = int64_t;
+
+  // Methods.
+  PDX_REMOTE_METHOD(Wait, kOpWait, Timestamp(Void));
+  PDX_REMOTE_METHOD(GetLastTimestamp, kOpGetLastTimestamp, Timestamp(Void));
+  PDX_REMOTE_METHOD(GetSchedInfo, kOpGetSchedInfo, VSyncSchedInfo(Void));
+  PDX_REMOTE_METHOD(Acknowledge, kOpAcknowledge, int(Void));
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_DISPLAY_RPC_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/display_types.h b/libs/vr/libdisplay/include/private/dvr/display_types.h
new file mode 100644
index 0000000..2bd02bd
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/display_types.h
@@ -0,0 +1,83 @@
+#ifndef ANDROID_DVR_DISPLAY_TYPES_H_
+#define ANDROID_DVR_DISPLAY_TYPES_H_
+
+#ifdef __ARM_NEON
+#include <arm_neon.h>
+#else
+#ifndef __FLOAT32X4T_86
+#define __FLOAT32X4T_86
+typedef float float32x4_t __attribute__ ((__vector_size__ (16)));
+typedef struct float32x4x4_t { float32x4_t val[4]; };
+#endif
+#endif
+
+#include <cutils/native_handle.h>
+
+// DVR display-related data types.
+
+enum dvr_display_surface_type {
+  // Normal display surface meant to be used by applications' GL context to
+  // render into.
+  DVR_SURFACE_TYPE_NORMAL = 0,
+
+  // VideoMeshSurface is used to composite video frames into the 3D world.
+  DVR_SURFACE_TYPE_VIDEO_MESH,
+
+  // System overlay surface type. This is not currently in use.
+  DVR_SURFACE_TYPE_OVERLAY,
+};
+
+enum dvr_display_surface_flags {
+  DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_EDS = (1 << 0),
+  DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_DISTORTION = (1 << 1),
+  DVR_DISPLAY_SURFACE_FLAGS_VERTICAL_FLIP = (1 << 2),
+  DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2 = (1 << 3),
+  DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_CAC = (1 << 4),
+};
+
+enum dvr_display_surface_item_flags {
+  DVR_DISPLAY_SURFACE_ITEM_FLAGS_BUFFERS_CHANGED = (1 << 0),
+};
+
+enum dvr_display_surface_attribute {
+  DVR_DISPLAY_SURFACE_ATTRIBUTE_Z_ORDER = (1<<0),
+  DVR_DISPLAY_SURFACE_ATTRIBUTE_VISIBLE = (1<<1),
+  DVR_DISPLAY_SURFACE_ATTRIBUTE_BLUR = (1<<2),
+  DVR_DISPLAY_SURFACE_ATTRIBUTE_EXCLUDE_FROM_BLUR = (1<<3),
+  DVR_DISPLAY_SURFACE_ATTRIBUTE_BLUR_BEHIND = (1<<4),
+};
+
+// Maximum number of buffers for a surface. Each buffer represents a single
+// frame and may actually be a buffer array if multiview rendering is in use.
+// Define so that it can be used in shader code.
+#define kSurfaceBufferMaxCount 4
+
+// Maximum number of views per surface. Each eye is a view, for example.
+#define kSurfaceViewMaxCount 4
+
+namespace android {
+namespace dvr {
+
+struct __attribute__((packed, aligned(16))) DisplaySurfaceMetadata {
+  // Array of orientations and translations corresponding with surface buffers.
+  // The index is associated with each allocated buffer by DisplaySurface and
+  // communicated to clients.
+  // The maximum number of buffers is hard coded here as 4 so that we can bind
+  // this data structure in GPU shaders.
+  float32x4_t orientation[kSurfaceBufferMaxCount];
+  float32x4_t translation[kSurfaceBufferMaxCount];
+};
+
+struct __attribute__((packed, aligned(16))) VideoMeshSurfaceMetadata {
+  // Array of transform matrices corresponding with surface buffers.
+  // Note that The index is associated with each allocated buffer by
+  // DisplaySurface instead of VideoMeshSurface due to the fact that the
+  // metadata here is interpreted as video mesh's transformation in each
+  // application's rendering frame.
+  float32x4x4_t transform[4][2];
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_DISPLAY_TYPES_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/dummy_native_window.h b/libs/vr/libdisplay/include/private/dvr/dummy_native_window.h
new file mode 100644
index 0000000..b03eeaa
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/dummy_native_window.h
@@ -0,0 +1,30 @@
+#ifndef ANDROID_DVR_DUMMY_NATIVE_WINDOW_H_
+#define ANDROID_DVR_DUMMY_NATIVE_WINDOW_H_
+
+#include <android/native_window.h>
+#include <ui/ANativeObjectBase.h>
+
+namespace android {
+namespace dvr {
+
+// DummyNativeWindow is an implementation of ANativeWindow that is
+// essentially empty and is used as a surface placeholder during context
+// creation for contexts that we don't intend to call eglSwapBuffers on.
+class DummyNativeWindow
+    : public ANativeObjectBase<ANativeWindow, DummyNativeWindow,
+                               LightRefBase<DummyNativeWindow> > {
+ public:
+  DummyNativeWindow();
+
+ private:
+  static int Query(const ANativeWindow* window, int what, int* value);
+  static int Perform(ANativeWindow* window, int operation, ...);
+
+  DummyNativeWindow(const DummyNativeWindow&) = delete;
+  void operator=(DummyNativeWindow&) = delete;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_DUMMY_NATIVE_WINDOW_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/frame_history.h b/libs/vr/libdisplay/include/private/dvr/frame_history.h
new file mode 100644
index 0000000..53e0717
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/frame_history.h
@@ -0,0 +1,71 @@
+#ifndef ANDROID_DVR_FRAME_HISTORY_H_
+#define ANDROID_DVR_FRAME_HISTORY_H_
+
+#include <dvr/graphics.h>
+#include <pdx/file_handle.h>
+#include <private/dvr/ring_buffer.h>
+
+namespace android {
+namespace dvr {
+
+// FrameHistory tracks frame times from the start of rendering commands to when
+// the buffer is ready.
+class FrameHistory {
+ public:
+  FrameHistory();
+  explicit FrameHistory(int pending_frame_buffer_size);
+
+  void Reset(int pending_frame_buffer_size);
+
+  // Call when starting rendering commands (i.e. dvrBeginRenderFrame).
+  void OnFrameStart(uint32_t scheduled_vsync, int64_t scheduled_finish_ns);
+
+  // Call when rendering commands are finished (i.e. dvrPresent).
+  void OnFrameSubmit(android::pdx::LocalHandle&& fence);
+
+  // Call once per frame to see if any pending frames have finished.
+  void CheckForFinishedFrames();
+
+  // Uses the recently completed frame render times to predict how long the next
+  // frame will take, in vsync intervals. For example if the predicted frame
+  // time is 10ms and the vsync interval is 11ms, this will return 1. If the
+  // predicted frame time is 12ms and the vsync interval is 11ms, this will
+  // return 2.
+  int PredictNextFrameVsyncInterval(int64_t vsync_period_ns) const;
+
+  // Returns results for recently completed frames. Each frame's result is
+  // returned only once.
+  int GetPreviousFrameResults(DvrFrameScheduleResult* results,
+                              int result_count);
+
+  // Gets the vsync count for the most recently started frame. If there are no
+  // started frames this will return UINT32_MAX.
+  uint32_t GetCurrentFrameVsync() const;
+
+ private:
+  struct PendingFrame {
+    int64_t start_ns;
+    uint32_t scheduled_vsync;
+    int64_t scheduled_finish_ns;
+    android::pdx::LocalHandle fence;
+
+    PendingFrame();
+    PendingFrame(int64_t start_ns, uint32_t scheduled_vsync,
+                 int64_t scheduled_finish_ns,
+                 android::pdx::LocalHandle&& fence);
+
+    PendingFrame(PendingFrame&&) = default;
+    PendingFrame& operator=(PendingFrame&&) = default;
+    PendingFrame(const PendingFrame&) = delete;
+    PendingFrame& operator=(const PendingFrame&) = delete;
+  };
+
+  RingBuffer<PendingFrame> pending_frames_;
+  RingBuffer<DvrFrameScheduleResult> finished_frames_;
+  RingBuffer<int64_t> frame_duration_history_;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_FRAME_HISTORY_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/gl_fenced_flush.h b/libs/vr/libdisplay/include/private/dvr/gl_fenced_flush.h
new file mode 100644
index 0000000..1d75335
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/gl_fenced_flush.h
@@ -0,0 +1,17 @@
+#ifndef ANDROID_DVR_GL_FENCED_FLUSH_H_
+#define ANDROID_DVR_GL_FENCED_FLUSH_H_
+
+#include <EGL/egl.h>
+#include <pdx/file_handle.h>
+
+namespace android {
+namespace dvr {
+
+// Creates a EGL_SYNC_NATIVE_FENCE_ANDROID and flushes. Returns the fence as a
+// file descriptor.
+pdx::LocalHandle CreateGLSyncAndFlush(EGLDisplay display);
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_GL_FENCED_FLUSH_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/graphics_private.h b/libs/vr/libdisplay/include/private/dvr/graphics_private.h
new file mode 100644
index 0000000..57c99da
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/graphics_private.h
@@ -0,0 +1,39 @@
+#ifndef ANDROID_DVR_GRAPHICS_PRIVATE_H_
+#define ANDROID_DVR_GRAPHICS_PRIVATE_H_
+
+#ifdef __ARM_NEON
+#include <arm_neon.h>
+#else
+#ifndef __FLOAT32X4T_86
+#define __FLOAT32X4T_86
+typedef float float32x4_t __attribute__ ((__vector_size__ (16)));
+typedef struct float32x4x4_t { float32x4_t val[4]; };
+#endif
+#endif
+
+#include <sys/cdefs.h>
+
+#include <dvr/graphics.h>
+
+__BEGIN_DECLS
+
+// Sets the pose used by the system for EDS. If dvrBeginRenderFrameEds() or
+// dvrBeginRenderFrameLateLatch() are called instead of dvrBeginRenderFrame()
+// it's not necessary to call this function. If this function is used, the call
+// must be made after dvrBeginRenderFrame() and before dvrPresent().
+//
+// @param[in] graphics_context The DvrGraphicsContext.
+// @param[in] render_pose_orientation Head pose orientation that rendering for
+//            this frame will be based off of. This must be an unmodified value
+//            from DvrPoseAsync, returned by dvrPoseGet.
+// @param[in] render_pose_translation Head pose translation that rendering for
+//            this frame will be based off of. This must be an unmodified value
+//            from DvrPoseAsync, returned by dvrPoseGet.
+// @return 0 on success or a negative error code on failure.
+int dvrSetEdsPose(DvrGraphicsContext* graphics_context,
+                  float32x4_t render_pose_orientation,
+                  float32x4_t render_pose_translation);
+
+__END_DECLS
+
+#endif  // ANDROID_DVR_GRAPHICS_PRIVATE_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/late_latch.h b/libs/vr/libdisplay/include/private/dvr/late_latch.h
new file mode 100644
index 0000000..d0eff51
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/late_latch.h
@@ -0,0 +1,186 @@
+#ifndef ANDROID_DVR_LATE_LATCH_H_
+#define ANDROID_DVR_LATE_LATCH_H_
+
+#include <atomic>
+#include <thread>
+#include <vector>
+
+#include <dvr/pose_client.h>
+#include <pdx/file_handle.h>
+#include <private/dvr/display_types.h>
+#include <private/dvr/graphics/shader_program.h>
+#include <private/dvr/graphics/vr_gl_extensions.h>
+#include <private/dvr/types.h>
+
+struct DvrPose;
+
+namespace android {
+namespace dvr {
+
+// Input data for late latch compute shader.
+struct LateLatchInput {
+  // For app late latch:
+  mat4 eye_from_head_mat[kSurfaceViewMaxCount];
+  mat4 proj_mat[kSurfaceViewMaxCount];
+  mat4 pose_offset[kSurfaceViewMaxCount];
+  // For EDS late latch only:
+  mat4 eds_mat1[kSurfaceViewMaxCount];
+  mat4 eds_mat2[kSurfaceViewMaxCount];
+  // For both app and EDS late latch:
+  uint32_t pose_index;
+  uint32_t render_pose_index;
+};
+
+// Output data for late latch shader. The application can use all or part of
+// this data by calling LateLatch::BindUniformBuffer.
+// This struct matches the layout of DvrGraphicsLateLatchData.
+struct LateLatchOutput {
+  mat4 view_proj_matrix[kSurfaceViewMaxCount];
+  mat4 view_matrix[kSurfaceViewMaxCount];
+  vec4 pose_quaternion;
+  vec4 pose_translation;
+};
+
+// LateLatch provides a facility for GL workloads to acquire a late-adjusted
+// model-view projection matrix, adjusted based on the position/quaternion pose
+// read from a buffer that is being written to asynchronously. The adjusted
+// MVP matrix is written to a GL buffer object via GL transform feedback.
+class LateLatch {
+ public:
+  enum BufferType {
+    kViewProjMatrix,
+    kViewMatrix,
+    kPoseQuaternion,
+    kPoseTranslation,
+    // Max transform feedback count is 4, so no more buffers can go here.
+    kNumBuffers,
+  };
+
+  static size_t GetBufferSize(BufferType type) {
+    switch (type) {
+      default:
+      case kViewProjMatrix:
+      case kViewMatrix:
+        return 4 * 4 * sizeof(float);
+      case kPoseQuaternion:
+      case kPoseTranslation:
+        return 4 * sizeof(float);
+    }
+  }
+
+  static size_t GetBufferOffset(BufferType type, int view) {
+    switch (type) {
+      default:
+      case kViewProjMatrix:
+        return offsetof(LateLatchOutput, view_proj_matrix) +
+               GetBufferSize(type) * view;
+      case kViewMatrix:
+        return offsetof(LateLatchOutput, view_matrix) +
+               GetBufferSize(type) * view;
+      case kPoseQuaternion:
+        return offsetof(LateLatchOutput, pose_quaternion);
+      case kPoseTranslation:
+        return offsetof(LateLatchOutput, pose_translation);
+    }
+  }
+
+  explicit LateLatch(bool is_app_late_latch);
+  LateLatch(bool is_app_late_latch, pdx::LocalHandle&& surface_metadata_fd);
+  ~LateLatch();
+
+  // Bind the late-latch output data as a GL_UNIFORM_BUFFER. For example,
+  // to bind just the view_matrix from the output:
+  // BindUniformBuffer(BINDING, offsetof(LateLatchOutput, view_matrix),
+  //                   sizeof(mat4));
+  // buffer_index is the index of one of the output buffers if more than 1 were
+  // requested in the constructor.
+  void BindUniformBuffer(GLuint ubo_binding, size_t offset, size_t size) const {
+    glBindBufferRange(GL_UNIFORM_BUFFER, ubo_binding, output_buffer_id_, offset,
+                      size);
+  }
+
+  void BindUniformBuffer(GLuint ubo_binding, BufferType type, int view) const {
+    glBindBufferRange(GL_UNIFORM_BUFFER, ubo_binding, output_buffer_id_,
+                      GetBufferOffset(type, view), GetBufferSize(type));
+  }
+
+  GLuint output_buffer_id() const { return output_buffer_id_; }
+
+  void UnbindUniformBuffer(GLuint ubo_binding) const {
+    glBindBufferBase(GL_UNIFORM_BUFFER, ubo_binding, 0);
+  }
+
+  void CaptureOutputData(LateLatchOutput* data) const;
+
+  // Add the late latch GL commands for this frame. This should be done just
+  // before the first application draw calls that are dependent on the head
+  // latest head pose.
+  //
+  // For efficiency, the application projection and eye_from_head matrices are
+  // passed through the late latch shader and output in various combinations to
+  // allow for both simple application vertex shaders that can take the view-
+  // projection matrix as-is and shaders that need to access the view matrix
+  // separately.
+  //
+  // GL state must be reset to default for this call.
+  void AddLateLatch(const LateLatchInput& data) const;
+
+  // After calling AddEdsLateLatch one or more times, this method must be called
+  // to add the necessary GL memory barrier to ensure late latch outputs are
+  // written before the EDS and warp shaders read them.
+  void PostEdsLateLatchBarrier() const {
+    // The transform feedback buffer is going to be read as a uniform by EDS,
+    // so we need a uniform memory barrier.
+    glMemoryBarrier(GL_UNIFORM_BARRIER_BIT);
+  }
+
+  // Typically not for use by application code. This method adds the EDS late
+  // latch that will adjust the application framebuffer with the latest head
+  // pose.
+  // buffer_index is the index of one of the output buffers if more than 1 were
+  // requested in the constructor.
+  void AddEdsLateLatch(const LateLatchInput& data,
+                       GLuint render_pose_buffer_object) const;
+
+  // For debugging purposes, capture the output during the next call to
+  // AddLateLatch. Set to NULL to reset.
+  void SetLateLatchDataCapture(LateLatchOutput* app_late_latch) {
+    app_late_latch_output_ = app_late_latch;
+  }
+
+  // For debugging purposes, capture the output during the next call to
+  // AddEdsLateLatch. Set to NULL to reset.
+  void SetEdsLateLatchDataCapture(LateLatchOutput* eds_late_latch) {
+    eds_late_latch_output_ = eds_late_latch;
+  }
+
+ private:
+  LateLatch(const LateLatch&) = delete;
+  LateLatch& operator=(const LateLatch&) = delete;
+
+  void LoadLateLatchShader();
+
+  // Late latch shader.
+  ShaderProgram late_latch_program_;
+
+  // Async pose ring buffer object.
+  GLuint pose_buffer_object_;
+
+  GLuint metadata_buffer_id_;
+
+  // Pose matrix buffers
+  GLuint input_buffer_id_;
+  GLuint output_buffer_id_;
+
+  bool is_app_late_latch_;
+  // During development, these can be used to capture the pose output data.
+  LateLatchOutput* app_late_latch_output_;
+  LateLatchOutput* eds_late_latch_output_;
+
+  DvrPose* pose_client_;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_LATE_LATCH_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/native_buffer_queue.h b/libs/vr/libdisplay/include/private/dvr/native_buffer_queue.h
new file mode 100644
index 0000000..87e9c9f
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/native_buffer_queue.h
@@ -0,0 +1,73 @@
+#ifndef ANDROID_DVR_NATIVE_BUFFER_QUEUE_H_
+#define ANDROID_DVR_NATIVE_BUFFER_QUEUE_H_
+
+#include <semaphore.h>
+
+#include <mutex>
+#include <vector>
+
+#include <private/dvr/native_buffer.h>
+#include <private/dvr/ring_buffer.h>
+
+#include "display_client.h"
+
+namespace android {
+namespace dvr {
+
+// NativeBufferQueue manages a queue of NativeBufferProducers allocated from a
+// DisplaySurfaceClient. Buffers are automatically re-enqueued when released by
+// the consumer side.
+class NativeBufferQueue {
+ public:
+  // Create a queue with the given number of free buffers.
+  NativeBufferQueue(const std::shared_ptr<DisplaySurfaceClient>& surface,
+                    size_t capacity);
+  NativeBufferQueue(EGLDisplay display,
+                    const std::shared_ptr<DisplaySurfaceClient>& surface,
+                    size_t capacity);
+  ~NativeBufferQueue();
+
+  std::shared_ptr<DisplaySurfaceClient> surface() const { return surface_; }
+
+  // Dequeue a buffer from the free queue, blocking until one is available.
+  NativeBufferProducer* Dequeue();
+
+  // Enqueue a buffer at the end of the free queue.
+  void Enqueue(NativeBufferProducer* buf);
+
+  // Get the number of free buffers in the queue.
+  size_t GetFreeBufferCount() const;
+
+  // Get the total number of buffers managed by this queue.
+  size_t GetQueueCapacity() const;
+
+  // Accessors for display surface buffer attributes.
+  int width() const { return surface_->width(); }
+  int height() const { return surface_->height(); }
+  int format() const { return surface_->format(); }
+  int usage() const { return surface_->usage(); }
+
+ private:
+  // Wait for buffers to be released and enqueue them.
+  bool WaitForBuffers();
+
+  std::shared_ptr<DisplaySurfaceClient> surface_;
+
+  // A list of strong pointers to the buffers, used for managing buffer
+  // lifetime.
+  std::vector<android::sp<NativeBufferProducer>> buffers_;
+
+  // Used to implement queue semantics.
+  RingBuffer<NativeBufferProducer*> buffer_queue_;
+
+  // Epoll fd used to wait for BufferHub events.
+  int epoll_fd_;
+
+  NativeBufferQueue(const NativeBufferQueue&) = delete;
+  void operator=(NativeBufferQueue&) = delete;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_NATIVE_BUFFER_QUEUE_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/screenshot_client.h b/libs/vr/libdisplay/include/private/dvr/screenshot_client.h
new file mode 100644
index 0000000..b6fc859
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/screenshot_client.h
@@ -0,0 +1,42 @@
+#ifndef ANDROID_DVR_SCREENSHOT_CLIENT_H_
+#define ANDROID_DVR_SCREENSHOT_CLIENT_H_
+
+#include <memory>
+#include <vector>
+
+#include <pdx/client.h>
+#include <private/dvr/display_rpc.h>
+#include <system/graphics.h>
+
+namespace android {
+namespace dvr {
+
+// Represents a connection to the screenshot service, which allows capturing an
+// upcoming frame as it is being rendered to the display.
+class ScreenshotClient : public pdx::ClientBase<ScreenshotClient> {
+ public:
+  int format() const { return format_; }
+
+  // Attempts to take a screenshot. If successful, sets *data to the contents
+  // of the screenshot and returns zero. Otherwise, returns a negative error
+  // code.
+  // |index| is used to match the requested buffer with various buffer layers.
+  int Take(std::vector<uint8_t>* data, int index, int* return_width,
+           int* return_height);
+
+ private:
+  friend BASE;
+
+  ScreenshotClient();
+
+  // Layout information for screenshots.
+  int format_;
+
+  ScreenshotClient(const ScreenshotClient&) = delete;
+  void operator=(const ScreenshotClient&) = delete;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_SCREENSHOT_CLIENT_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/video_mesh_surface_client.h b/libs/vr/libdisplay/include/private/dvr/video_mesh_surface_client.h
new file mode 100644
index 0000000..a2659a6
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/video_mesh_surface_client.h
@@ -0,0 +1,42 @@
+#ifndef ANDROID_DVR_VIDEO_MESH_SURFACE_CLIENT_H_
+#define ANDROID_DVR_VIDEO_MESH_SURFACE_CLIENT_H_
+
+#include <base/macros.h>
+#include <private/dvr/buffer_hub_queue_client.h>
+#include <private/dvr/display_client.h>
+
+namespace android {
+namespace dvr {
+
+class VideoMeshSurfaceClient
+    : pdx::ClientBase<VideoMeshSurfaceClient, SurfaceClient> {
+ public:
+  using LocalChannelHandle = pdx::LocalChannelHandle;
+
+  // This call assumes ownership of |handle|.
+  static std::unique_ptr<VideoMeshSurfaceClient> Import(
+      LocalChannelHandle handle);
+
+  std::shared_ptr<ProducerQueue> GetProducerQueue();
+
+  // Get the shared memory metadata buffer for this video mesh surface. If it is
+  // not yet allocated, this will allocate it.
+  volatile VideoMeshSurfaceMetadata* GetMetadataBufferPtr();
+
+ private:
+  friend BASE;
+
+  std::shared_ptr<android::dvr::ProducerQueue> producer_queue_;
+  VideoMeshSurfaceMetadata* mapped_metadata_buffer_;
+
+  explicit VideoMeshSurfaceClient(LocalChannelHandle handle);
+};
+
+}  // namespace dvr
+}  // namespace android
+
+struct DvrVideoMeshSurface {
+  std::shared_ptr<android::dvr::VideoMeshSurfaceClient> client;
+};
+
+#endif  // ANDROID_DVR_VIDEO_MESH_SURFACE_CLIENT_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/vsync_client.h b/libs/vr/libdisplay/include/private/dvr/vsync_client.h
new file mode 100644
index 0000000..32fa40f
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/vsync_client.h
@@ -0,0 +1,70 @@
+#ifndef ANDROID_DVR_VSYNC_CLIENT_H_
+#define ANDROID_DVR_VSYNC_CLIENT_H_
+
+#include <stdint.h>
+
+#include <pdx/client.h>
+#include <private/dvr/vsync_client_api.h>
+
+struct dvr_vsync_client {};
+
+namespace android {
+namespace dvr {
+
+/*
+ * VSyncClient is a remote interface to the vsync service in displayd.
+ * This class is used to wait for and retrieve information about the
+ * display vsync.
+ */
+class VSyncClient : public pdx::ClientBase<VSyncClient>,
+                    public dvr_vsync_client {
+ public:
+  /*
+   * Wait for the next vsync signal.
+   * The timestamp (in ns) is written into *ts when ts is non-NULL.
+   */
+  int Wait(int64_t* timestamp_ns);
+
+  /*
+   * Returns the file descriptor used to communicate with the vsync system
+   * service or -1 on error.
+   */
+  int GetFd();
+
+  /*
+   * Clears the select/poll/epoll event so that subsequent calls to
+   * these will not signal until the next vsync.
+   */
+  int Acknowledge();
+
+  /*
+   * Get the timestamp of the last vsync event in ns. This call has
+   * the same side effect on events as Acknowledge(), which saves
+   * an IPC message.
+   */
+  int GetLastTimestamp(int64_t* timestamp_ns);
+
+  /*
+   * Get vsync scheduling info.
+   * Get the estimated timestamp of the next GPU lens warp preemption event in
+   * ns. Also returns the corresponding vsync count that the next lens warp
+   * operation will target. This call has the same side effect on events as
+   * Acknowledge(), which saves an IPC message.
+   */
+  int GetSchedInfo(int64_t* vsync_period_ns, int64_t* next_timestamp_ns,
+                   uint32_t* next_vsync_count);
+
+ private:
+  friend BASE;
+
+  VSyncClient();
+  explicit VSyncClient(long timeout_ms);
+
+  VSyncClient(const VSyncClient&) = delete;
+  void operator=(const VSyncClient&) = delete;
+};
+
+}  // namespace dvr
+}  // namespace android
+
+#endif  // ANDROID_DVR_VSYNC_CLIENT_H_
diff --git a/libs/vr/libdisplay/include/private/dvr/vsync_client_api.h b/libs/vr/libdisplay/include/private/dvr/vsync_client_api.h
new file mode 100644
index 0000000..4cdbc71
--- /dev/null
+++ b/libs/vr/libdisplay/include/private/dvr/vsync_client_api.h
@@ -0,0 +1,44 @@
+#ifndef ANDROID_DVR_VSYNC_CLIENT_API_H_
+#define ANDROID_DVR_VSYNC_CLIENT_API_H_
+
+#include <stdint.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// A client of the vsync service.
+//
+// The "dvr_vsync_client" structure wraps a client connection to the
+// system vsync service. It is used to synchronize application drawing
+// with the scanout of the display.
+typedef struct dvr_vsync_client dreamos_vsync_client;
+
+// Creates a new client to the system vsync service.
+dvr_vsync_client* dvr_vsync_client_create();
+
+// Destroys the vsync client.
+void dvr_vsync_client_destroy(dvr_vsync_client* client);
+
+// Blocks until the next vsync signal.
+// The timestamp (in ns) is written into |*timestamp_ns| when it is non-NULL.
+// Returns 0 upon success, or -errno.
+int dvr_vsync_client_wait(dvr_vsync_client* client, int64_t* timestamp_ns);
+
+// Returns the file descriptor used to communicate with the vsync service.
+int dvr_vsync_client_get_fd(dvr_vsync_client* client);
+
+// Clears the select/poll/epoll event so that subsequent calls to these
+// will not signal until the next vsync.
+int dvr_vsync_client_acknowledge(dvr_vsync_client* client);
+
+// Gets the timestamp of the last vsync signal in ns. This call has the
+// same side effects on events as acknowledge.
+int dvr_vsync_client_get_last_timestamp(dvr_vsync_client* client,
+                                        int64_t* timestamp_ns);
+
+#ifdef __cplusplus
+}  // extern "C"
+#endif
+
+#endif  // ANDROID_DVR_VSYNC_CLIENT_API_H_