blob: c2fbb8b814774991c3f658f26742e65ce1e03ea3 [file] [log] [blame]
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001#include <dvr/graphics.h>
2
Alex Vakulenko4fe60582017-02-02 11:35:59 -08003#include <inttypes.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08004#include <sys/timerfd.h>
5#include <array>
6#include <vector>
7
Alex Vakulenko4fe60582017-02-02 11:35:59 -08008#include <log/log.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08009#include <utils/Trace.h>
10
11#ifndef VK_USE_PLATFORM_ANDROID_KHR
12#define VK_USE_PLATFORM_ANDROID_KHR 1
13#endif
14#include <vulkan/vulkan.h>
15
16#include <pdx/file_handle.h>
17#include <private/dvr/clock_ns.h>
18#include <private/dvr/debug.h>
19#include <private/dvr/display_types.h>
Hendrik Wagenaar10e68eb2017-03-15 13:29:02 -070020#include <private/dvr/dvr_buffer.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080021#include <private/dvr/frame_history.h>
22#include <private/dvr/gl_fenced_flush.h>
23#include <private/dvr/graphics/vr_gl_extensions.h>
24#include <private/dvr/graphics_private.h>
25#include <private/dvr/late_latch.h>
26#include <private/dvr/native_buffer_queue.h>
27#include <private/dvr/sensor_constants.h>
28#include <private/dvr/video_mesh_surface_client.h>
29#include <private/dvr/vsync_client.h>
30
31#include <android/native_window.h>
32
33#ifndef EGL_CONTEXT_MAJOR_VERSION
34#define EGL_CONTEXT_MAJOR_VERSION 0x3098
35#define EGL_CONTEXT_MINOR_VERSION 0x30FB
36#endif
37
38using android::pdx::LocalHandle;
39using android::pdx::LocalChannelHandle;
40
41using android::dvr::DisplaySurfaceAttributeEnum;
42using android::dvr::DisplaySurfaceAttributeValue;
43
44namespace {
45
46constexpr int kDefaultDisplaySurfaceUsage =
47 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
48constexpr int kDefaultDisplaySurfaceFormat = HAL_PIXEL_FORMAT_RGBA_8888;
49// TODO(alexst): revisit this count when HW encode is available for casting.
50constexpr int kDefaultBufferCount = 4;
51
52// Use with dvrBeginRenderFrame to disable EDS for the current frame.
53constexpr float32x4_t DVR_POSE_NO_EDS = {10.0f, 0.0f, 0.0f, 0.0f};
54
55// Use with dvrBeginRenderFrame to indicate that GPU late-latching is being used
56// for determining the render pose.
57constexpr float32x4_t DVR_POSE_LATE_LATCH = {20.0f, 0.0f, 0.0f, 0.0f};
58
59#ifndef NDEBUG
60
61static const char* GetGlCallbackType(GLenum type) {
62 switch (type) {
63 case GL_DEBUG_TYPE_ERROR_KHR:
64 return "ERROR";
65 case GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_KHR:
66 return "DEPRECATED_BEHAVIOR";
67 case GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_KHR:
68 return "UNDEFINED_BEHAVIOR";
69 case GL_DEBUG_TYPE_PORTABILITY_KHR:
70 return "PORTABILITY";
71 case GL_DEBUG_TYPE_PERFORMANCE_KHR:
72 return "PERFORMANCE";
73 case GL_DEBUG_TYPE_OTHER_KHR:
74 return "OTHER";
75 default:
76 return "UNKNOWN";
77 }
78}
79
80static void on_gl_error(GLenum /*source*/, GLenum type, GLuint /*id*/,
81 GLenum severity, GLsizei /*length*/,
82 const char* message, const void* /*user_param*/) {
83 char msg[400];
84 snprintf(msg, sizeof(msg), "[" __FILE__ ":%u] GL %s: %s", __LINE__,
85 GetGlCallbackType(type), message);
86 switch (severity) {
87 case GL_DEBUG_SEVERITY_LOW_KHR:
88 ALOGI("%s", msg);
89 break;
90 case GL_DEBUG_SEVERITY_MEDIUM_KHR:
91 ALOGW("%s", msg);
92 break;
93 case GL_DEBUG_SEVERITY_HIGH_KHR:
94 ALOGE("%s", msg);
95 break;
96 }
97 fprintf(stderr, "%s\n", msg);
98}
99
100#endif
101
102int DvrToHalSurfaceFormat(int dvr_surface_format) {
103 switch (dvr_surface_format) {
104 case DVR_SURFACE_FORMAT_RGBA_8888:
105 return HAL_PIXEL_FORMAT_RGBA_8888;
106 case DVR_SURFACE_FORMAT_RGB_565:
107 return HAL_PIXEL_FORMAT_RGB_565;
108 default:
109 return HAL_PIXEL_FORMAT_RGBA_8888;
110 }
111}
112
113int SelectEGLConfig(EGLDisplay dpy, EGLint* attr, unsigned format,
114 EGLConfig* config) {
115 std::array<EGLint, 4> desired_rgba;
116 switch (format) {
117 case HAL_PIXEL_FORMAT_RGBA_8888:
118 case HAL_PIXEL_FORMAT_BGRA_8888:
119 desired_rgba = {{8, 8, 8, 8}};
120 break;
121 case HAL_PIXEL_FORMAT_RGB_565:
122 desired_rgba = {{5, 6, 5, 0}};
123 break;
124 default:
125 ALOGE("Unsupported framebuffer pixel format %d", format);
126 return -1;
127 }
128
129 EGLint max_configs = 0;
130 if (eglGetConfigs(dpy, NULL, 0, &max_configs) == EGL_FALSE) {
131 ALOGE("No EGL configurations available?!");
132 return -1;
133 }
134
135 std::vector<EGLConfig> configs(max_configs);
136
137 EGLint num_configs;
138 if (eglChooseConfig(dpy, attr, &configs[0], max_configs, &num_configs) ==
139 EGL_FALSE) {
140 ALOGE("eglChooseConfig failed");
141 return -1;
142 }
143
144 std::array<EGLint, 4> config_rgba;
145 for (int i = 0; i < num_configs; i++) {
146 eglGetConfigAttrib(dpy, configs[i], EGL_RED_SIZE, &config_rgba[0]);
147 eglGetConfigAttrib(dpy, configs[i], EGL_GREEN_SIZE, &config_rgba[1]);
148 eglGetConfigAttrib(dpy, configs[i], EGL_BLUE_SIZE, &config_rgba[2]);
149 eglGetConfigAttrib(dpy, configs[i], EGL_ALPHA_SIZE, &config_rgba[3]);
150 if (config_rgba == desired_rgba) {
151 *config = configs[i];
152 return 0;
153 }
154 }
155
156 ALOGE("Cannot find a matching EGL config");
157 return -1;
158}
159
160void DestroyEglContext(EGLDisplay egl_display, EGLContext* egl_context) {
161 if (*egl_context != EGL_NO_CONTEXT) {
162 eglDestroyContext(egl_display, *egl_context);
163 *egl_context = EGL_NO_CONTEXT;
164 }
165}
166
167// Perform internal initialization. A GL context must be bound to the current
168// thread.
169// @param internally_created_context True if we created and own the GL context,
170// false if it was supplied by the application.
171// @return 0 if init was successful, or a negative error code on failure.
172int InitGl(bool internally_created_context) {
173 EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
174 if (egl_display == EGL_NO_DISPLAY) {
175 ALOGE("eglGetDisplay failed");
176 return -EINVAL;
177 }
178
179 EGLContext egl_context = eglGetCurrentContext();
180 if (egl_context == EGL_NO_CONTEXT) {
181 ALOGE("No GL context bound");
182 return -EINVAL;
183 }
184
185 glGetError(); // Clear the error state
186 GLint major_version, minor_version;
187 glGetIntegerv(GL_MAJOR_VERSION, &major_version);
188 glGetIntegerv(GL_MINOR_VERSION, &minor_version);
189 if (glGetError() != GL_NO_ERROR) {
190 // GL_MAJOR_VERSION and GL_MINOR_VERSION were added in GLES 3. If we get an
191 // error querying them it's almost certainly because it's GLES 1 or 2.
192 ALOGE("Error getting GL version. Must be GLES 3.2 or greater.");
193 return -EINVAL;
194 }
195
196 if (major_version < 3 || (major_version == 3 && minor_version < 2)) {
197 ALOGE("Invalid GL version: %d.%d. Must be GLES 3.2 or greater.",
198 major_version, minor_version);
199 return -EINVAL;
200 }
201
202#ifndef NDEBUG
203 if (internally_created_context) {
204 // Enable verbose GL debug output.
205 glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS_KHR);
206 glDebugMessageCallbackKHR(on_gl_error, NULL);
207 GLuint unused_ids = 0;
208 glDebugMessageControlKHR(GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0,
209 &unused_ids, GL_TRUE);
210 }
211#else
212 (void)internally_created_context;
213#endif
214
215 load_gl_extensions();
216 return 0;
217}
218
219int CreateEglContext(EGLDisplay egl_display, DvrSurfaceParameter* parameters,
220 EGLContext* egl_context) {
221 *egl_context = EGL_NO_CONTEXT;
222
223 EGLint major, minor;
224 if (!eglInitialize(egl_display, &major, &minor)) {
225 ALOGE("Failed to initialize EGL");
226 return -ENXIO;
227 }
228
229 ALOGI("EGL version: %d.%d\n", major, minor);
230
231 int buffer_format = kDefaultDisplaySurfaceFormat;
232
233 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
234 switch (p->key) {
235 case DVR_SURFACE_PARAMETER_FORMAT_IN:
236 buffer_format = DvrToHalSurfaceFormat(p->value);
237 break;
238 }
239 }
240
241 EGLint config_attrs[] = {EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
242 EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_NONE};
243 EGLConfig config = {0};
244
245 int ret = SelectEGLConfig(egl_display, config_attrs, buffer_format, &config);
246 if (ret < 0)
247 return ret;
248
249 ALOGI("EGL SelectEGLConfig ok.\n");
250
251 EGLint context_attrs[] = {EGL_CONTEXT_MAJOR_VERSION,
252 3,
253 EGL_CONTEXT_MINOR_VERSION,
254 2,
255#ifndef NDEBUG
256 EGL_CONTEXT_FLAGS_KHR,
257 EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR,
258#endif
259 EGL_NONE};
260
261 *egl_context =
262 eglCreateContext(egl_display, config, EGL_NO_CONTEXT, context_attrs);
263 if (*egl_context == EGL_NO_CONTEXT) {
264 ALOGE("eglCreateContext failed");
265 return -ENXIO;
266 }
267
268 ALOGI("eglCreateContext ok.\n");
269
270 if (!eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
271 *egl_context)) {
272 ALOGE("eglMakeCurrent failed");
273 DestroyEglContext(egl_display, egl_context);
274 return -EINVAL;
275 }
276
277 return 0;
278}
279
280} // anonymous namespace
281
282// TODO(hendrikw): When we remove the calls to this in native_window.cpp, move
283// this back into the anonymous namespace
284std::shared_ptr<android::dvr::DisplaySurfaceClient> CreateDisplaySurfaceClient(
285 struct DvrSurfaceParameter* parameters,
286 /*out*/ android::dvr::SystemDisplayMetrics* metrics) {
287 auto client = android::dvr::DisplayClient::Create();
288 if (!client) {
289 ALOGE("Failed to create display client!");
290 return nullptr;
291 }
292
293 const int ret = client->GetDisplayMetrics(metrics);
294 if (ret < 0) {
295 ALOGE("Failed to get display metrics: %s", strerror(-ret));
296 return nullptr;
297 }
298
299 // Parameters that may be modified by the parameters array. Some of these are
300 // here for future expansion.
301 int request_width = -1;
302 int request_height = -1;
303 int request_flags = 0;
304 bool disable_distortion = false;
305 bool disable_stabilization = false;
306 bool disable_cac = false;
307 bool request_visible = true;
308 bool vertical_flip = false;
309 int request_z_order = 0;
310 bool request_exclude_from_blur = false;
311 bool request_blur_behind = true;
312 int request_format = kDefaultDisplaySurfaceFormat;
313 int request_usage = kDefaultDisplaySurfaceUsage;
314 int geometry_type = DVR_SURFACE_GEOMETRY_SINGLE;
315
316 // Handle parameter inputs.
317 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
318 switch (p->key) {
319 case DVR_SURFACE_PARAMETER_WIDTH_IN:
320 request_width = p->value;
321 break;
322 case DVR_SURFACE_PARAMETER_HEIGHT_IN:
323 request_height = p->value;
324 break;
325 case DVR_SURFACE_PARAMETER_DISABLE_DISTORTION_IN:
326 disable_distortion = !!p->value;
327 break;
328 case DVR_SURFACE_PARAMETER_DISABLE_STABILIZATION_IN:
329 disable_stabilization = !!p->value;
330 break;
331 case DVR_SURFACE_PARAMETER_DISABLE_CAC_IN:
332 disable_cac = !!p->value;
333 break;
334 case DVR_SURFACE_PARAMETER_VISIBLE_IN:
335 request_visible = !!p->value;
336 break;
337 case DVR_SURFACE_PARAMETER_Z_ORDER_IN:
338 request_z_order = p->value;
339 break;
340 case DVR_SURFACE_PARAMETER_EXCLUDE_FROM_BLUR_IN:
341 request_exclude_from_blur = !!p->value;
342 break;
343 case DVR_SURFACE_PARAMETER_BLUR_BEHIND_IN:
344 request_blur_behind = !!p->value;
345 break;
346 case DVR_SURFACE_PARAMETER_VERTICAL_FLIP_IN:
347 vertical_flip = !!p->value;
348 break;
349 case DVR_SURFACE_PARAMETER_GEOMETRY_IN:
350 geometry_type = p->value;
351 break;
352 case DVR_SURFACE_PARAMETER_FORMAT_IN:
353 request_format = DvrToHalSurfaceFormat(p->value);
354 break;
355 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
356 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
357 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
358 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
359 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
360 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
361 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
362 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
363 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
364 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
365 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
366 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
367 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
368 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
369 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
370 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
371 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
372 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
373 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
374 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
375 break;
376 default:
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800377 ALOGE("Invalid display surface parameter: key=%d value=%" PRId64,
378 p->key, p->value);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800379 return nullptr;
380 }
381 }
382
383 request_flags |= disable_distortion
384 ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_DISTORTION
385 : 0;
386 request_flags |=
387 disable_stabilization ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_EDS : 0;
388 request_flags |=
389 disable_cac ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_CAC : 0;
390 request_flags |= vertical_flip ? DVR_DISPLAY_SURFACE_FLAGS_VERTICAL_FLIP : 0;
391 request_flags |= (geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2)
392 ? DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2
393 : 0;
394
395 if (request_width == -1) {
396 request_width = disable_distortion ? metrics->display_native_width
397 : metrics->distorted_width;
398 if (!disable_distortion &&
399 geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2) {
400 // The metrics always return the single wide buffer resolution.
401 // When split between eyes, we need to halve the width of the surface.
402 request_width /= 2;
403 }
404 }
405 if (request_height == -1) {
406 request_height = disable_distortion ? metrics->display_native_height
407 : metrics->distorted_height;
408 }
409
410 std::shared_ptr<android::dvr::DisplaySurfaceClient> surface =
411 client->CreateDisplaySurface(request_width, request_height,
412 request_format, request_usage,
413 request_flags);
414 surface->SetAttributes(
415 {{DisplaySurfaceAttributeEnum::Visible,
416 DisplaySurfaceAttributeValue{request_visible}},
417 {DisplaySurfaceAttributeEnum::ZOrder,
418 DisplaySurfaceAttributeValue{request_z_order}},
419 {DisplaySurfaceAttributeEnum::ExcludeFromBlur,
420 DisplaySurfaceAttributeValue{request_exclude_from_blur}},
421 {DisplaySurfaceAttributeEnum::BlurBehind,
422 DisplaySurfaceAttributeValue{request_blur_behind}}});
423
424 // Handle parameter output requests down here so we can return surface info.
425 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
426 switch (p->key) {
427 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
428 *static_cast<int32_t*>(p->value_out) = metrics->display_native_width;
429 break;
430 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
431 *static_cast<int32_t*>(p->value_out) = metrics->display_native_height;
432 break;
433 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
434 *static_cast<int32_t*>(p->value_out) = surface->width();
435 break;
436 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
437 *static_cast<int32_t*>(p->value_out) = surface->height();
438 break;
439 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
440 *static_cast<float*>(p->value_out) = metrics->inter_lens_distance_m;
441 break;
442 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
443 for (int i = 0; i < 4; ++i) {
444 float* float_values_out = static_cast<float*>(p->value_out);
445 float_values_out[i] = metrics->left_fov_lrbt[i];
446 }
447 break;
448 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
449 for (int i = 0; i < 4; ++i) {
450 float* float_values_out = static_cast<float*>(p->value_out);
451 float_values_out[i] = metrics->right_fov_lrbt[i];
452 }
453 break;
454 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
455 *static_cast<uint64_t*>(p->value_out) = metrics->vsync_period_ns;
456 break;
457 default:
458 break;
459 }
460 }
461
462 return surface;
463}
464
465extern "C" int dvrGetNativeDisplayDimensions(int* native_width,
466 int* native_height) {
467 int error = 0;
468 auto client = android::dvr::DisplayClient::Create(&error);
469 if (!client) {
470 ALOGE("Failed to create display client!");
471 return error;
472 }
473
474 android::dvr::SystemDisplayMetrics metrics;
475 const int ret = client->GetDisplayMetrics(&metrics);
476
477 if (ret != 0) {
478 ALOGE("Failed to get display metrics!");
479 return ret;
480 }
481
482 *native_width = static_cast<int>(metrics.display_native_width);
483 *native_height = static_cast<int>(metrics.display_native_height);
484 return 0;
485}
486
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800487struct DvrGraphicsContext : public android::ANativeObjectBase<
488 ANativeWindow, DvrGraphicsContext,
489 android::LightRefBase<DvrGraphicsContext>> {
490 public:
491 DvrGraphicsContext();
492 ~DvrGraphicsContext();
493
494 int graphics_api; // DVR_SURFACE_GRAPHICS_API_*
495
496 // GL specific members.
497 struct {
498 EGLDisplay egl_display;
499 EGLContext egl_context;
500 bool owns_egl_context;
501 GLuint texture_id[kSurfaceViewMaxCount];
502 int texture_count;
503 GLenum texture_target_type;
504 } gl;
505
506 // VK specific members
507 struct {
508 // These objects are passed in by the application, and are NOT owned
509 // by the context.
510 VkInstance instance;
511 VkPhysicalDevice physical_device;
512 VkDevice device;
513 VkQueue present_queue;
514 uint32_t present_queue_family;
515 const VkAllocationCallbacks* allocation_callbacks;
516 // These objects are owned by the context.
517 ANativeWindow* window;
518 VkSurfaceKHR surface;
519 VkSwapchainKHR swapchain;
520 std::vector<VkImage> swapchain_images;
521 std::vector<VkImageView> swapchain_image_views;
522 } vk;
523
524 // Display surface, metrics, and buffer management members.
525 std::shared_ptr<android::dvr::DisplaySurfaceClient> display_surface;
526 android::dvr::SystemDisplayMetrics display_metrics;
527 std::unique_ptr<android::dvr::NativeBufferQueue> buffer_queue;
528 android::dvr::NativeBufferProducer* current_buffer;
529 bool buffer_already_posted;
530
531 // Synchronization members.
532 std::unique_ptr<android::dvr::VSyncClient> vsync_client;
533 LocalHandle timerfd;
534
535 android::dvr::FrameHistory frame_history;
536
537 // Mapped surface metadata (ie: for pose delivery with presented frames).
538 volatile android::dvr::DisplaySurfaceMetadata* surface_metadata;
539
540 // LateLatch support.
541 std::unique_ptr<android::dvr::LateLatch> late_latch;
542
543 // Video mesh support.
544 std::vector<std::shared_ptr<android::dvr::VideoMeshSurfaceClient>>
545 video_mesh_surfaces;
546
547 private:
548 // ANativeWindow function implementations
549 std::mutex lock_;
550 int Post(android::dvr::NativeBufferProducer* buffer, int fence_fd);
551 static int SetSwapInterval(ANativeWindow* window, int interval);
552 static int DequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer,
553 int* fence_fd);
554 static int QueueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
555 int fence_fd);
556 static int CancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
557 int fence_fd);
558 static int Query(const ANativeWindow* window, int what, int* value);
559 static int Perform(ANativeWindow* window, int operation, ...);
560 static int DequeueBuffer_DEPRECATED(ANativeWindow* window,
561 ANativeWindowBuffer** buffer);
562 static int CancelBuffer_DEPRECATED(ANativeWindow* window,
563 ANativeWindowBuffer* buffer);
564 static int QueueBuffer_DEPRECATED(ANativeWindow* window,
565 ANativeWindowBuffer* buffer);
566 static int LockBuffer_DEPRECATED(ANativeWindow* window,
567 ANativeWindowBuffer* buffer);
568
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800569 DvrGraphicsContext(const DvrGraphicsContext&) = delete;
570 void operator=(const DvrGraphicsContext&) = delete;
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800571};
572
573DvrGraphicsContext::DvrGraphicsContext()
574 : graphics_api(DVR_GRAPHICS_API_GLES),
575 gl{},
576 vk{},
577 current_buffer(nullptr),
578 buffer_already_posted(false),
579 surface_metadata(nullptr) {
580 gl.egl_display = EGL_NO_DISPLAY;
581 gl.egl_context = EGL_NO_CONTEXT;
582 gl.owns_egl_context = true;
583 gl.texture_target_type = GL_TEXTURE_2D;
584
585 ANativeWindow::setSwapInterval = SetSwapInterval;
586 ANativeWindow::dequeueBuffer = DequeueBuffer;
587 ANativeWindow::cancelBuffer = CancelBuffer;
588 ANativeWindow::queueBuffer = QueueBuffer;
589 ANativeWindow::query = Query;
590 ANativeWindow::perform = Perform;
591
592 ANativeWindow::dequeueBuffer_DEPRECATED = DequeueBuffer_DEPRECATED;
593 ANativeWindow::cancelBuffer_DEPRECATED = CancelBuffer_DEPRECATED;
594 ANativeWindow::lockBuffer_DEPRECATED = LockBuffer_DEPRECATED;
595 ANativeWindow::queueBuffer_DEPRECATED = QueueBuffer_DEPRECATED;
596}
597
598DvrGraphicsContext::~DvrGraphicsContext() {
599 if (graphics_api == DVR_GRAPHICS_API_GLES) {
600 glDeleteTextures(gl.texture_count, gl.texture_id);
601 if (gl.owns_egl_context)
602 DestroyEglContext(gl.egl_display, &gl.egl_context);
603 } else if (graphics_api == DVR_GRAPHICS_API_VULKAN) {
604 if (vk.swapchain != VK_NULL_HANDLE) {
605 for (auto view : vk.swapchain_image_views) {
606 vkDestroyImageView(vk.device, view, vk.allocation_callbacks);
607 }
608 vkDestroySwapchainKHR(vk.device, vk.swapchain, vk.allocation_callbacks);
609 vkDestroySurfaceKHR(vk.instance, vk.surface, vk.allocation_callbacks);
610 delete vk.window;
611 }
612 }
613}
614
615int dvrGraphicsContextCreate(struct DvrSurfaceParameter* parameters,
616 DvrGraphicsContext** return_graphics_context) {
617 std::unique_ptr<DvrGraphicsContext> context(new DvrGraphicsContext);
618
619 // See whether we're using GL or Vulkan
620 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
621 switch (p->key) {
622 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
623 context->graphics_api = p->value;
624 break;
625 }
626 }
627
628 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
629 context->gl.egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
630 if (context->gl.egl_display == EGL_NO_DISPLAY) {
631 ALOGE("eglGetDisplay failed");
632 return -ENXIO;
633 }
634
635 // See if we should create a GL context
636 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
637 switch (p->key) {
638 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
639 context->gl.owns_egl_context = p->value != 0;
640 break;
641 }
642 }
643
644 if (context->gl.owns_egl_context) {
645 int ret = CreateEglContext(context->gl.egl_display, parameters,
646 &context->gl.egl_context);
647 if (ret < 0)
648 return ret;
649 } else {
650 context->gl.egl_context = eglGetCurrentContext();
651 }
652
653 int ret = InitGl(context->gl.owns_egl_context);
654 if (ret < 0)
655 return ret;
656 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
657 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
658 switch (p->key) {
659 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
660 context->vk.instance = reinterpret_cast<VkInstance>(p->value);
661 break;
662 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
663 context->vk.physical_device =
664 reinterpret_cast<VkPhysicalDevice>(p->value);
665 break;
666 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
667 context->vk.device = reinterpret_cast<VkDevice>(p->value);
668 break;
669 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
670 context->vk.present_queue = reinterpret_cast<VkQueue>(p->value);
671 break;
672 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
673 context->vk.present_queue_family = static_cast<uint32_t>(p->value);
674 break;
675 }
676 }
677 } else {
678 ALOGE("Error: invalid graphics API type");
679 return -EINVAL;
680 }
681
682 context->display_surface =
683 CreateDisplaySurfaceClient(parameters, &context->display_metrics);
684 if (!context->display_surface) {
685 ALOGE("Error: failed to create display surface client");
686 return -ECOMM;
687 }
688
689 context->buffer_queue.reset(new android::dvr::NativeBufferQueue(
690 context->gl.egl_display, context->display_surface, kDefaultBufferCount));
691
692 // The way the call sequence works we need 1 more than the buffer queue
693 // capacity to store data for all pending frames
694 context->frame_history.Reset(context->buffer_queue->GetQueueCapacity() + 1);
695
696 context->vsync_client = android::dvr::VSyncClient::Create();
697 if (!context->vsync_client) {
698 ALOGE("Error: failed to create vsync client");
699 return -ECOMM;
700 }
701
702 context->timerfd.Reset(timerfd_create(CLOCK_MONOTONIC, 0));
703 if (!context->timerfd) {
704 ALOGE("Error: timerfd_create failed because: %s", strerror(errno));
705 return -EPERM;
706 }
707
708 context->surface_metadata = context->display_surface->GetMetadataBufferPtr();
709 if (!context->surface_metadata) {
710 ALOGE("Error: surface metadata allocation failed");
711 return -ENOMEM;
712 }
713
714 ALOGI("buffer: %d x %d\n", context->display_surface->width(),
715 context->display_surface->height());
716
717 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
718 context->gl.texture_count = (context->display_surface->flags() &
719 DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2)
720 ? 2
721 : 1;
722
723 // Create the GL textures.
724 glGenTextures(context->gl.texture_count, context->gl.texture_id);
725
726 // We must make sure that we have at least one buffer allocated at this time
727 // so that anyone who tries to bind an FBO to context->texture_id
728 // will not get an incomplete buffer.
729 context->current_buffer = context->buffer_queue->Dequeue();
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800730 LOG_ALWAYS_FATAL_IF(context->gl.texture_count !=
731 context->current_buffer->buffer()->slice_count());
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800732 for (int i = 0; i < context->gl.texture_count; ++i) {
733 glBindTexture(context->gl.texture_target_type, context->gl.texture_id[i]);
734 glEGLImageTargetTexture2DOES(context->gl.texture_target_type,
735 context->current_buffer->image_khr(i));
736 }
737 glBindTexture(context->gl.texture_target_type, 0);
738 CHECK_GL();
739
740 bool is_late_latch = false;
741
742 // Pass back the texture target type and id.
743 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
744 switch (p->key) {
745 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
746 is_late_latch = !!p->value;
747 break;
748 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
749 *static_cast<GLenum*>(p->value_out) = context->gl.texture_target_type;
750 break;
751 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
752 for (int i = 0; i < context->gl.texture_count; ++i) {
753 *(static_cast<GLuint*>(p->value_out) + i) =
754 context->gl.texture_id[i];
755 }
756 break;
757 }
758 }
759
760 // Initialize late latch.
761 if (is_late_latch) {
762 LocalHandle fd;
763 int ret = context->display_surface->GetMetadataBufferFd(&fd);
764 if (ret == 0) {
765 context->late_latch.reset(
766 new android::dvr::LateLatch(true, std::move(fd)));
767 } else {
768 ALOGE("Error: failed to get surface metadata buffer fd for late latch");
769 }
770 }
771 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
772 VkResult result = VK_SUCCESS;
773 // Create a VkSurfaceKHR from the ANativeWindow.
774 VkAndroidSurfaceCreateInfoKHR android_surface_ci = {};
775 android_surface_ci.sType =
776 VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
777 android_surface_ci.window = context.get();
778 result = vkCreateAndroidSurfaceKHR(
779 context->vk.instance, &android_surface_ci,
780 context->vk.allocation_callbacks, &context->vk.surface);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800781 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800782 VkBool32 surface_supports_present = VK_FALSE;
783 result = vkGetPhysicalDeviceSurfaceSupportKHR(
784 context->vk.physical_device, context->vk.present_queue_family,
785 context->vk.surface, &surface_supports_present);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800786 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800787 if (!surface_supports_present) {
788 ALOGE("Error: provided queue family (%u) does not support presentation",
789 context->vk.present_queue_family);
790 return -EPERM;
791 }
792 VkSurfaceCapabilitiesKHR surface_capabilities = {};
793 result = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
794 context->vk.physical_device, context->vk.surface,
795 &surface_capabilities);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800796 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800797 // Determine the swapchain image format.
798 uint32_t device_surface_format_count = 0;
799 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
800 context->vk.physical_device, context->vk.surface,
801 &device_surface_format_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800802 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800803 std::vector<VkSurfaceFormatKHR> device_surface_formats(
804 device_surface_format_count);
805 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
806 context->vk.physical_device, context->vk.surface,
807 &device_surface_format_count, device_surface_formats.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800808 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
809 LOG_ALWAYS_FATAL_IF(device_surface_format_count == 0U);
810 LOG_ALWAYS_FATAL_IF(device_surface_formats[0].format ==
811 VK_FORMAT_UNDEFINED);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800812 VkSurfaceFormatKHR present_surface_format = device_surface_formats[0];
813 // Determine the swapchain present mode.
814 // TODO(cort): query device_present_modes to make sure MAILBOX is supported.
815 // But according to libvulkan, it is.
816 uint32_t device_present_mode_count = 0;
817 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
818 context->vk.physical_device, context->vk.surface,
819 &device_present_mode_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800820 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800821 std::vector<VkPresentModeKHR> device_present_modes(
822 device_present_mode_count);
823 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
824 context->vk.physical_device, context->vk.surface,
825 &device_present_mode_count, device_present_modes.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800826 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800827 VkPresentModeKHR present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
828 // Extract presentation surface extents, image count, transform, usages,
829 // etc.
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800830 LOG_ALWAYS_FATAL_IF(
831 static_cast<int>(surface_capabilities.currentExtent.width) == -1 ||
832 static_cast<int>(surface_capabilities.currentExtent.height) == -1);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800833 VkExtent2D swapchain_extent = surface_capabilities.currentExtent;
834
835 uint32_t desired_image_count = surface_capabilities.minImageCount;
836 if (surface_capabilities.maxImageCount > 0 &&
837 desired_image_count > surface_capabilities.maxImageCount) {
838 desired_image_count = surface_capabilities.maxImageCount;
839 }
840 VkSurfaceTransformFlagBitsKHR surface_transform =
841 surface_capabilities.currentTransform;
842 VkImageUsageFlags image_usage_flags =
843 surface_capabilities.supportedUsageFlags;
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800844 LOG_ALWAYS_FATAL_IF(surface_capabilities.supportedCompositeAlpha ==
845 static_cast<VkFlags>(0));
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800846 VkCompositeAlphaFlagBitsKHR composite_alpha =
847 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
848 if (!(surface_capabilities.supportedCompositeAlpha &
849 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) {
850 composite_alpha = VkCompositeAlphaFlagBitsKHR(
851 static_cast<int>(surface_capabilities.supportedCompositeAlpha) &
852 -static_cast<int>(surface_capabilities.supportedCompositeAlpha));
853 }
854 // Create VkSwapchainKHR
855 VkSwapchainCreateInfoKHR swapchain_ci = {};
856 swapchain_ci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
857 swapchain_ci.pNext = nullptr;
858 swapchain_ci.surface = context->vk.surface;
859 swapchain_ci.minImageCount = desired_image_count;
860 swapchain_ci.imageFormat = present_surface_format.format;
861 swapchain_ci.imageColorSpace = present_surface_format.colorSpace;
862 swapchain_ci.imageExtent.width = swapchain_extent.width;
863 swapchain_ci.imageExtent.height = swapchain_extent.height;
864 swapchain_ci.imageUsage = image_usage_flags;
865 swapchain_ci.preTransform = surface_transform;
866 swapchain_ci.compositeAlpha = composite_alpha;
867 swapchain_ci.imageArrayLayers = 1;
868 swapchain_ci.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
869 swapchain_ci.queueFamilyIndexCount = 0;
870 swapchain_ci.pQueueFamilyIndices = nullptr;
871 swapchain_ci.presentMode = present_mode;
872 swapchain_ci.clipped = VK_TRUE;
873 swapchain_ci.oldSwapchain = VK_NULL_HANDLE;
874 result = vkCreateSwapchainKHR(context->vk.device, &swapchain_ci,
875 context->vk.allocation_callbacks,
876 &context->vk.swapchain);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800877 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800878 // Create swapchain image views
879 uint32_t image_count = 0;
880 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
881 &image_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800882 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
883 LOG_ALWAYS_FATAL_IF(image_count == 0U);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800884 context->vk.swapchain_images.resize(image_count);
885 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
886 &image_count,
887 context->vk.swapchain_images.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800888 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800889 context->vk.swapchain_image_views.resize(image_count);
890 VkImageViewCreateInfo image_view_ci = {};
891 image_view_ci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
892 image_view_ci.pNext = nullptr;
893 image_view_ci.flags = 0;
894 image_view_ci.format = swapchain_ci.imageFormat;
895 image_view_ci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
896 image_view_ci.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
897 image_view_ci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
898 image_view_ci.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
899 image_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
900 image_view_ci.subresourceRange.baseMipLevel = 0;
901 image_view_ci.subresourceRange.levelCount = 1;
902 image_view_ci.subresourceRange.baseArrayLayer = 0;
903 image_view_ci.subresourceRange.layerCount = 1;
904 image_view_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
905 image_view_ci.image = VK_NULL_HANDLE; // filled in below
906 for (uint32_t i = 0; i < image_count; ++i) {
907 image_view_ci.image = context->vk.swapchain_images[i];
908 result = vkCreateImageView(context->vk.device, &image_view_ci,
909 context->vk.allocation_callbacks,
910 &context->vk.swapchain_image_views[i]);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800911 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800912 }
913 // Fill in any requested output parameters.
914 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
915 switch (p->key) {
916 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
917 *static_cast<uint32_t*>(p->value_out) = image_count;
918 break;
919 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
920 *static_cast<VkFormat*>(p->value_out) = swapchain_ci.imageFormat;
921 break;
922 }
923 }
924 }
925
926 *return_graphics_context = context.release();
927 return 0;
928}
929
930void dvrGraphicsContextDestroy(DvrGraphicsContext* graphics_context) {
931 delete graphics_context;
932}
933
934// ANativeWindow function implementations. These should only be used
935// by the Vulkan path.
936int DvrGraphicsContext::Post(android::dvr::NativeBufferProducer* buffer,
937 int fence_fd) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800938 LOG_ALWAYS_FATAL_IF(graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800939 ATRACE_NAME(__PRETTY_FUNCTION__);
940 ALOGI_IF(TRACE, "DvrGraphicsContext::Post: buffer_id=%d, fence_fd=%d",
941 buffer->buffer()->id(), fence_fd);
942 ALOGW_IF(!display_surface->visible(),
943 "DvrGraphicsContext::Post: Posting buffer on invisible surface!!!");
944 // The NativeBufferProducer closes the fence fd, so dup it for tracking in the
945 // frame history.
946 frame_history.OnFrameSubmit(LocalHandle::AsDuplicate(fence_fd));
947 int result = buffer->Post(fence_fd, 0);
948 return result;
949}
950
951int DvrGraphicsContext::SetSwapInterval(ANativeWindow* window, int interval) {
952 ALOGI_IF(TRACE, "SetSwapInterval: window=%p interval=%d", window, interval);
953 DvrGraphicsContext* self = getSelf(window);
954 (void)self;
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800955 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800956 return android::NO_ERROR;
957}
958
959int DvrGraphicsContext::DequeueBuffer(ANativeWindow* window,
960 ANativeWindowBuffer** buffer,
961 int* fence_fd) {
962 ATRACE_NAME(__PRETTY_FUNCTION__);
963
964 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800965 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800966 std::lock_guard<std::mutex> autolock(self->lock_);
967
968 if (!self->current_buffer) {
969 self->current_buffer = self->buffer_queue.get()->Dequeue();
970 }
971 ATRACE_ASYNC_BEGIN("BufferDraw", self->current_buffer->buffer()->id());
972 *fence_fd = self->current_buffer->ClaimReleaseFence().Release();
973 *buffer = self->current_buffer;
974
975 ALOGI_IF(TRACE, "DvrGraphicsContext::DequeueBuffer: fence_fd=%d", *fence_fd);
976 return android::NO_ERROR;
977}
978
979int DvrGraphicsContext::QueueBuffer(ANativeWindow* window,
980 ANativeWindowBuffer* buffer, int fence_fd) {
981 ATRACE_NAME("NativeWindow::QueueBuffer");
982 ALOGI_IF(TRACE, "NativeWindow::QueueBuffer: fence_fd=%d", fence_fd);
983
984 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800985 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800986 std::lock_guard<std::mutex> autolock(self->lock_);
987
988 android::dvr::NativeBufferProducer* native_buffer =
989 static_cast<android::dvr::NativeBufferProducer*>(buffer);
990 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
991 bool do_post = true;
992 if (self->buffer_already_posted) {
993 // Check that the buffer is the one we expect, but handle it if this happens
994 // in production by allowing this buffer to post on top of the previous one.
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800995 LOG_FATAL_IF(native_buffer != self->current_buffer);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800996 if (native_buffer == self->current_buffer) {
997 do_post = false;
998 if (fence_fd >= 0)
999 close(fence_fd);
1000 }
1001 }
1002 if (do_post) {
1003 ATRACE_ASYNC_BEGIN("BufferPost", native_buffer->buffer()->id());
1004 self->Post(native_buffer, fence_fd);
1005 }
1006 self->buffer_already_posted = false;
1007 self->current_buffer = nullptr;
1008
1009 return android::NO_ERROR;
1010}
1011
1012int DvrGraphicsContext::CancelBuffer(ANativeWindow* window,
1013 ANativeWindowBuffer* buffer,
1014 int fence_fd) {
1015 ATRACE_NAME("DvrGraphicsContext::CancelBuffer");
1016 ALOGI_IF(TRACE, "DvrGraphicsContext::CancelBuffer: fence_fd: %d", fence_fd);
1017
1018 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001019 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001020 std::lock_guard<std::mutex> autolock(self->lock_);
1021
1022 android::dvr::NativeBufferProducer* native_buffer =
1023 static_cast<android::dvr::NativeBufferProducer*>(buffer);
1024 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
1025 ATRACE_INT("CancelBuffer", native_buffer->buffer()->id());
1026 bool do_enqueue = true;
1027 if (self->buffer_already_posted) {
1028 // Check that the buffer is the one we expect, but handle it if this happens
1029 // in production by returning this buffer to the buffer queue.
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001030 LOG_FATAL_IF(native_buffer != self->current_buffer);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001031 if (native_buffer == self->current_buffer) {
1032 do_enqueue = false;
1033 }
1034 }
1035 if (do_enqueue) {
1036 self->buffer_queue.get()->Enqueue(native_buffer);
1037 }
1038 if (fence_fd >= 0)
1039 close(fence_fd);
1040 self->buffer_already_posted = false;
1041 self->current_buffer = nullptr;
1042
1043 return android::NO_ERROR;
1044}
1045
1046int DvrGraphicsContext::Query(const ANativeWindow* window, int what,
1047 int* value) {
1048 DvrGraphicsContext* self = getSelf(const_cast<ANativeWindow*>(window));
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001049 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001050 std::lock_guard<std::mutex> autolock(self->lock_);
1051
1052 switch (what) {
1053 case NATIVE_WINDOW_WIDTH:
1054 *value = self->display_surface->width();
1055 return android::NO_ERROR;
1056 case NATIVE_WINDOW_HEIGHT:
1057 *value = self->display_surface->height();
1058 return android::NO_ERROR;
1059 case NATIVE_WINDOW_FORMAT:
1060 *value = self->display_surface->format();
1061 return android::NO_ERROR;
1062 case NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS:
1063 *value = 1;
1064 return android::NO_ERROR;
1065 case NATIVE_WINDOW_CONCRETE_TYPE:
1066 *value = NATIVE_WINDOW_SURFACE;
1067 return android::NO_ERROR;
1068 case NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER:
1069 *value = 1;
1070 return android::NO_ERROR;
1071 case NATIVE_WINDOW_DEFAULT_WIDTH:
1072 *value = self->display_surface->width();
1073 return android::NO_ERROR;
1074 case NATIVE_WINDOW_DEFAULT_HEIGHT:
1075 *value = self->display_surface->height();
1076 return android::NO_ERROR;
1077 case NATIVE_WINDOW_TRANSFORM_HINT:
1078 *value = 0;
1079 return android::NO_ERROR;
1080 }
1081
1082 *value = 0;
1083 return android::BAD_VALUE;
1084}
1085
1086int DvrGraphicsContext::Perform(ANativeWindow* window, int operation, ...) {
1087 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001088 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001089 std::lock_guard<std::mutex> autolock(self->lock_);
1090
1091 va_list args;
1092 va_start(args, operation);
1093
1094 // TODO(eieio): The following operations are not used at this time. They are
1095 // included here to help document which operations may be useful and what
1096 // parameters they take.
1097 switch (operation) {
1098 case NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: {
1099 int w = va_arg(args, int);
1100 int h = va_arg(args, int);
1101 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: w=%d h=%d", w, h);
1102 return android::NO_ERROR;
1103 }
1104
1105 case NATIVE_WINDOW_SET_BUFFERS_FORMAT: {
1106 int format = va_arg(args, int);
1107 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_FORMAT: format=%d", format);
1108 return android::NO_ERROR;
1109 }
1110
1111 case NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: {
1112 int transform = va_arg(args, int);
1113 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: transform=%d",
1114 transform);
1115 return android::NO_ERROR;
1116 }
1117
1118 case NATIVE_WINDOW_SET_USAGE: {
1119 int usage = va_arg(args, int);
1120 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_USAGE: usage=%d", usage);
1121 return android::NO_ERROR;
1122 }
1123
1124 case NATIVE_WINDOW_CONNECT:
1125 case NATIVE_WINDOW_DISCONNECT:
1126 case NATIVE_WINDOW_SET_BUFFERS_GEOMETRY:
1127 case NATIVE_WINDOW_API_CONNECT:
1128 case NATIVE_WINDOW_API_DISCONNECT:
1129 // TODO(eieio): we should implement these
1130 return android::NO_ERROR;
1131
1132 case NATIVE_WINDOW_SET_BUFFER_COUNT: {
1133 int buffer_count = va_arg(args, int);
1134 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFER_COUNT: bufferCount=%d",
1135 buffer_count);
1136 return android::NO_ERROR;
1137 }
1138 case NATIVE_WINDOW_SET_BUFFERS_DATASPACE: {
1139 android_dataspace_t data_space =
1140 static_cast<android_dataspace_t>(va_arg(args, int));
1141 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DATASPACE: dataSpace=%d",
1142 data_space);
1143 return android::NO_ERROR;
1144 }
1145 case NATIVE_WINDOW_SET_SCALING_MODE: {
1146 int mode = va_arg(args, int);
1147 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_SCALING_MODE: mode=%d", mode);
1148 return android::NO_ERROR;
1149 }
1150
1151 case NATIVE_WINDOW_LOCK:
1152 case NATIVE_WINDOW_UNLOCK_AND_POST:
1153 case NATIVE_WINDOW_SET_CROP:
1154 case NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP:
1155 return android::INVALID_OPERATION;
1156 }
1157
1158 return android::NAME_NOT_FOUND;
1159}
1160
1161int DvrGraphicsContext::DequeueBuffer_DEPRECATED(ANativeWindow* window,
1162 ANativeWindowBuffer** buffer) {
1163 int fence_fd = -1;
1164 int ret = DequeueBuffer(window, buffer, &fence_fd);
1165
1166 // wait for fence
1167 if (ret == android::NO_ERROR && fence_fd != -1)
1168 close(fence_fd);
1169
1170 return ret;
1171}
1172
1173int DvrGraphicsContext::CancelBuffer_DEPRECATED(ANativeWindow* window,
1174 ANativeWindowBuffer* buffer) {
1175 return CancelBuffer(window, buffer, -1);
1176}
1177
1178int DvrGraphicsContext::QueueBuffer_DEPRECATED(ANativeWindow* window,
1179 ANativeWindowBuffer* buffer) {
1180 return QueueBuffer(window, buffer, -1);
1181}
1182
1183int DvrGraphicsContext::LockBuffer_DEPRECATED(ANativeWindow* /*window*/,
1184 ANativeWindowBuffer* /*buffer*/) {
1185 return android::NO_ERROR;
1186}
1187// End ANativeWindow implementation
1188
1189int dvrSetEdsPose(DvrGraphicsContext* graphics_context,
1190 float32x4_t render_pose_orientation,
1191 float32x4_t render_pose_translation) {
1192 ATRACE_NAME("dvrSetEdsPose");
1193 if (!graphics_context->current_buffer) {
1194 ALOGE("dvrBeginRenderFrame must be called before dvrSetEdsPose");
1195 return -EPERM;
1196 }
1197
1198 // When late-latching is enabled, the pose buffer is written by the GPU, so
1199 // we don't touch it here.
1200 float32x4_t is_late_latch = DVR_POSE_LATE_LATCH;
1201 if (render_pose_orientation[0] != is_late_latch[0]) {
1202 volatile android::dvr::DisplaySurfaceMetadata* data =
1203 graphics_context->surface_metadata;
1204 uint32_t buffer_index =
1205 graphics_context->current_buffer->surface_buffer_index();
1206 ALOGE_IF(TRACE, "write pose index %d %f %f", buffer_index,
1207 render_pose_orientation[0], render_pose_orientation[1]);
1208 data->orientation[buffer_index] = render_pose_orientation;
1209 data->translation[buffer_index] = render_pose_translation;
1210 }
1211
1212 return 0;
1213}
1214
1215int dvrBeginRenderFrameEds(DvrGraphicsContext* graphics_context,
1216 float32x4_t render_pose_orientation,
1217 float32x4_t render_pose_translation) {
1218 ATRACE_NAME("dvrBeginRenderFrameEds");
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001219 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001220 CHECK_GL();
1221 // Grab a buffer from the queue and set its pose.
1222 if (!graphics_context->current_buffer) {
1223 graphics_context->current_buffer =
1224 graphics_context->buffer_queue->Dequeue();
1225 }
1226
1227 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1228 render_pose_translation);
1229 if (ret < 0)
1230 return ret;
1231
1232 ATRACE_ASYNC_BEGIN("BufferDraw",
1233 graphics_context->current_buffer->buffer()->id());
1234
1235 {
1236 ATRACE_NAME("glEGLImageTargetTexture2DOES");
1237 // Bind the texture to the latest buffer in the queue.
1238 for (int i = 0; i < graphics_context->gl.texture_count; ++i) {
1239 glBindTexture(graphics_context->gl.texture_target_type,
1240 graphics_context->gl.texture_id[i]);
1241 glEGLImageTargetTexture2DOES(
1242 graphics_context->gl.texture_target_type,
1243 graphics_context->current_buffer->image_khr(i));
1244 }
1245 glBindTexture(graphics_context->gl.texture_target_type, 0);
1246 }
1247 CHECK_GL();
1248 return 0;
1249}
1250int dvrBeginRenderFrameEdsVk(DvrGraphicsContext* graphics_context,
1251 float32x4_t render_pose_orientation,
1252 float32x4_t render_pose_translation,
1253 VkSemaphore acquire_semaphore,
1254 VkFence acquire_fence,
1255 uint32_t* swapchain_image_index,
1256 VkImageView* swapchain_image_view) {
1257 ATRACE_NAME("dvrBeginRenderFrameEds");
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001258 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api !=
1259 DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001260
1261 // Acquire a swapchain image. This calls Dequeue() internally.
1262 VkResult result = vkAcquireNextImageKHR(
1263 graphics_context->vk.device, graphics_context->vk.swapchain, UINT64_MAX,
1264 acquire_semaphore, acquire_fence, swapchain_image_index);
1265 if (result != VK_SUCCESS)
1266 return -EINVAL;
1267
1268 // Set the pose pose.
1269 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1270 render_pose_translation);
1271 if (ret < 0)
1272 return ret;
1273 *swapchain_image_view =
1274 graphics_context->vk.swapchain_image_views[*swapchain_image_index];
1275 return 0;
1276}
1277
1278int dvrBeginRenderFrame(DvrGraphicsContext* graphics_context) {
1279 return dvrBeginRenderFrameEds(graphics_context, DVR_POSE_NO_EDS,
1280 DVR_POSE_NO_EDS);
1281}
1282int dvrBeginRenderFrameVk(DvrGraphicsContext* graphics_context,
1283 VkSemaphore acquire_semaphore, VkFence acquire_fence,
1284 uint32_t* swapchain_image_index,
1285 VkImageView* swapchain_image_view) {
1286 return dvrBeginRenderFrameEdsVk(
1287 graphics_context, DVR_POSE_NO_EDS, DVR_POSE_NO_EDS, acquire_semaphore,
1288 acquire_fence, swapchain_image_index, swapchain_image_view);
1289}
1290
1291int dvrBeginRenderFrameLateLatch(DvrGraphicsContext* graphics_context,
1292 uint32_t /*flags*/,
1293 uint32_t target_vsync_count, int num_views,
1294 const float** projection_matrices,
1295 const float** eye_from_head_matrices,
1296 const float** pose_offset_matrices,
1297 uint32_t* out_late_latch_buffer_id) {
1298 if (!graphics_context->late_latch) {
1299 return -EPERM;
1300 }
1301 if (num_views > DVR_GRAPHICS_SURFACE_MAX_VIEWS) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001302 ALOGE("dvrBeginRenderFrameLateLatch called with too many views.");
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001303 return -EINVAL;
1304 }
1305 dvrBeginRenderFrameEds(graphics_context, DVR_POSE_LATE_LATCH,
1306 DVR_POSE_LATE_LATCH);
1307 auto& ll = graphics_context->late_latch;
1308 // TODO(jbates) Need to change this shader so that it dumps the single
1309 // captured pose for both eyes into the display surface metadata buffer at
1310 // the right index.
1311 android::dvr::LateLatchInput input;
1312 memset(&input, 0, sizeof(input));
1313 for (int i = 0; i < num_views; ++i) {
1314 memcpy(input.proj_mat + i, *(projection_matrices + i), 16 * sizeof(float));
1315 memcpy(input.eye_from_head_mat + i, *(eye_from_head_matrices + i),
1316 16 * sizeof(float));
1317 memcpy(input.pose_offset + i, *(pose_offset_matrices + i),
1318 16 * sizeof(float));
1319 }
1320 input.pose_index =
1321 target_vsync_count & android::dvr::kPoseAsyncBufferIndexMask;
1322 input.render_pose_index =
1323 graphics_context->current_buffer->surface_buffer_index();
1324 ll->AddLateLatch(input);
1325 *out_late_latch_buffer_id = ll->output_buffer_id();
1326 return 0;
1327}
1328
1329extern "C" int dvrGraphicsWaitNextFrame(
1330 DvrGraphicsContext* graphics_context, int64_t start_delay_ns,
1331 DvrFrameSchedule* out_next_frame_schedule) {
1332 start_delay_ns = std::max(start_delay_ns, static_cast<int64_t>(0));
1333
1334 // We only do one-shot timers:
1335 int64_t wake_time_ns = 0;
1336
1337 uint32_t current_frame_vsync;
1338 int64_t current_frame_scheduled_finish_ns;
1339 int64_t vsync_period_ns;
1340
1341 int fetch_schedule_result = graphics_context->vsync_client->GetSchedInfo(
1342 &vsync_period_ns, &current_frame_scheduled_finish_ns,
1343 &current_frame_vsync);
1344 if (fetch_schedule_result == 0) {
1345 wake_time_ns = current_frame_scheduled_finish_ns + start_delay_ns;
1346 // If the last wakeup time is still in the future, use it instead to avoid
1347 // major schedule jumps when applications call WaitNextFrame with
1348 // aggressive offsets.
1349 int64_t now = android::dvr::GetSystemClockNs();
1350 if (android::dvr::TimestampGT(wake_time_ns - vsync_period_ns, now)) {
1351 wake_time_ns -= vsync_period_ns;
1352 --current_frame_vsync;
1353 }
1354 // If the next wakeup time is in the past, add a vsync period to keep the
1355 // application on schedule.
1356 if (android::dvr::TimestampLT(wake_time_ns, now)) {
1357 wake_time_ns += vsync_period_ns;
1358 ++current_frame_vsync;
1359 }
1360 } else {
1361 ALOGE("Error getting frame schedule because: %s",
1362 strerror(-fetch_schedule_result));
1363 // Sleep for a vsync period to avoid cascading failure.
1364 wake_time_ns = android::dvr::GetSystemClockNs() +
1365 graphics_context->display_metrics.vsync_period_ns;
1366 }
1367
1368 // Adjust nsec to [0..999,999,999].
1369 struct itimerspec wake_time;
1370 wake_time.it_interval.tv_sec = 0;
1371 wake_time.it_interval.tv_nsec = 0;
1372 wake_time.it_value = android::dvr::NsToTimespec(wake_time_ns);
1373 bool sleep_result =
1374 timerfd_settime(graphics_context->timerfd.Get(), TFD_TIMER_ABSTIME,
1375 &wake_time, nullptr) == 0;
1376 if (sleep_result) {
1377 ATRACE_NAME("sleep");
1378 uint64_t expirations = 0;
1379 sleep_result = read(graphics_context->timerfd.Get(), &expirations,
1380 sizeof(uint64_t)) == sizeof(uint64_t);
1381 if (!sleep_result) {
1382 ALOGE("Error: timerfd read failed");
1383 }
1384 } else {
1385 ALOGE("Error: timerfd_settime failed because: %s", strerror(errno));
1386 }
1387
1388 auto& frame_history = graphics_context->frame_history;
1389 frame_history.CheckForFinishedFrames();
1390 if (fetch_schedule_result == 0) {
1391 uint32_t next_frame_vsync =
1392 current_frame_vsync +
1393 frame_history.PredictNextFrameVsyncInterval(vsync_period_ns);
1394 int64_t next_frame_scheduled_finish =
1395 (wake_time_ns - start_delay_ns) + vsync_period_ns;
1396 frame_history.OnFrameStart(next_frame_vsync, next_frame_scheduled_finish);
1397 if (out_next_frame_schedule) {
1398 out_next_frame_schedule->vsync_count = next_frame_vsync;
1399 out_next_frame_schedule->scheduled_frame_finish_ns =
1400 next_frame_scheduled_finish;
1401 }
1402 } else {
1403 frame_history.OnFrameStart(UINT32_MAX, -1);
1404 }
1405
1406 return (fetch_schedule_result == 0 && sleep_result) ? 0 : -1;
1407}
1408
1409extern "C" void dvrGraphicsPostEarly(DvrGraphicsContext* graphics_context) {
1410 ATRACE_NAME("dvrGraphicsPostEarly");
1411 ALOGI_IF(TRACE, "dvrGraphicsPostEarly");
1412
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001413 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001414
1415 // Note that this function can be called before or after
1416 // dvrBeginRenderFrame.
1417 if (!graphics_context->buffer_already_posted) {
1418 graphics_context->buffer_already_posted = true;
1419
1420 if (!graphics_context->current_buffer) {
1421 graphics_context->current_buffer =
1422 graphics_context->buffer_queue->Dequeue();
1423 }
1424
1425 auto buffer = graphics_context->current_buffer->buffer().get();
1426 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1427 int result = buffer->Post<uint64_t>(LocalHandle(), 0);
1428 if (result < 0)
1429 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1430 }
1431}
1432
1433int dvrPresent(DvrGraphicsContext* graphics_context) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001434 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001435
1436 std::array<char, 128> buf;
1437 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1438 graphics_context->frame_history.GetCurrentFrameVsync());
1439 ATRACE_NAME(buf.data());
1440
1441 if (!graphics_context->current_buffer) {
1442 ALOGE("Error: dvrPresent called without dvrBeginRenderFrame");
1443 return -EPERM;
1444 }
1445
1446 LocalHandle fence_fd =
1447 android::dvr::CreateGLSyncAndFlush(graphics_context->gl.egl_display);
1448
1449 ALOGI_IF(TRACE, "PostBuffer: buffer_id=%d, fence_fd=%d",
1450 graphics_context->current_buffer->buffer()->id(), fence_fd.Get());
1451 ALOGW_IF(!graphics_context->display_surface->visible(),
1452 "PostBuffer: Posting buffer on invisible surface!!!");
1453
1454 auto buffer = graphics_context->current_buffer->buffer().get();
1455 ATRACE_ASYNC_END("BufferDraw", buffer->id());
1456 if (!graphics_context->buffer_already_posted) {
1457 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1458 int result = buffer->Post<uint64_t>(fence_fd, 0);
1459 if (result < 0)
1460 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1461 }
1462
1463 graphics_context->frame_history.OnFrameSubmit(std::move(fence_fd));
1464 graphics_context->buffer_already_posted = false;
1465 graphics_context->current_buffer = nullptr;
1466 return 0;
1467}
1468
1469int dvrPresentVk(DvrGraphicsContext* graphics_context,
1470 VkSemaphore submit_semaphore, uint32_t swapchain_image_index) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001471 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api !=
1472 DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001473
1474 std::array<char, 128> buf;
1475 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1476 graphics_context->frame_history.GetCurrentFrameVsync());
1477 ATRACE_NAME(buf.data());
1478
1479 if (!graphics_context->current_buffer) {
1480 ALOGE("Error: dvrPresentVk called without dvrBeginRenderFrameVk");
1481 return -EPERM;
1482 }
1483
1484 // Present the specified image. Internally, this gets a fence from the
1485 // Vulkan driver and passes it to DvrGraphicsContext::Post(),
1486 // which in turn passes it to buffer->Post() and adds it to frame_history.
1487 VkPresentInfoKHR present_info = {};
1488 present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
1489 present_info.swapchainCount = 1;
1490 present_info.pSwapchains = &graphics_context->vk.swapchain;
1491 present_info.pImageIndices = &swapchain_image_index;
1492 present_info.waitSemaphoreCount =
1493 (submit_semaphore != VK_NULL_HANDLE) ? 1 : 0;
1494 present_info.pWaitSemaphores = &submit_semaphore;
1495 VkResult result =
1496 vkQueuePresentKHR(graphics_context->vk.present_queue, &present_info);
1497 if (result != VK_SUCCESS) {
1498 return -EINVAL;
1499 }
1500
1501 return 0;
1502}
1503
1504extern "C" int dvrGetFrameScheduleResults(DvrGraphicsContext* context,
1505 DvrFrameScheduleResult* results,
1506 int in_result_count) {
1507 if (!context || !results)
1508 return -EINVAL;
1509
1510 return context->frame_history.GetPreviousFrameResults(results,
1511 in_result_count);
1512}
1513
1514extern "C" void dvrGraphicsSurfaceSetVisible(
1515 DvrGraphicsContext* graphics_context, int visible) {
1516 graphics_context->display_surface->SetVisible(visible);
1517}
1518
1519extern "C" int dvrGraphicsSurfaceGetVisible(
1520 DvrGraphicsContext* graphics_context) {
1521 return graphics_context->display_surface->visible() ? 1 : 0;
1522}
1523
1524extern "C" void dvrGraphicsSurfaceSetZOrder(
1525 DvrGraphicsContext* graphics_context, int z_order) {
1526 graphics_context->display_surface->SetZOrder(z_order);
1527}
1528
1529extern "C" int dvrGraphicsSurfaceGetZOrder(
1530 DvrGraphicsContext* graphics_context) {
1531 return graphics_context->display_surface->z_order();
1532}
1533
1534extern "C" DvrVideoMeshSurface* dvrGraphicsVideoMeshSurfaceCreate(
1535 DvrGraphicsContext* graphics_context) {
1536 auto display_surface = graphics_context->display_surface;
1537 // A DisplaySurface must be created prior to the creation of a
1538 // VideoMeshSurface.
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001539 LOG_ALWAYS_FATAL_IF(display_surface == nullptr);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001540
1541 LocalChannelHandle surface_handle = display_surface->CreateVideoMeshSurface();
1542 if (!surface_handle.valid()) {
1543 return nullptr;
1544 }
1545
1546 std::unique_ptr<DvrVideoMeshSurface> surface(new DvrVideoMeshSurface);
1547 surface->client =
1548 android::dvr::VideoMeshSurfaceClient::Import(std::move(surface_handle));
1549
1550 // TODO(jwcai) The next line is not needed...
1551 auto producer_queue = surface->client->GetProducerQueue();
1552 return surface.release();
1553}
1554
1555extern "C" void dvrGraphicsVideoMeshSurfaceDestroy(
1556 DvrVideoMeshSurface* surface) {
1557 delete surface;
1558}
1559
1560extern "C" void dvrGraphicsVideoMeshSurfacePresent(
1561 DvrGraphicsContext* graphics_context, DvrVideoMeshSurface* surface,
1562 const int eye, const float* transform) {
1563 volatile android::dvr::VideoMeshSurfaceMetadata* metadata =
1564 surface->client->GetMetadataBufferPtr();
1565
1566 const uint32_t graphics_buffer_index =
1567 graphics_context->current_buffer->surface_buffer_index();
1568
1569 for (int i = 0; i < 4; ++i) {
1570 metadata->transform[graphics_buffer_index][eye].val[i] = {
1571 transform[i + 0], transform[i + 4], transform[i + 8], transform[i + 12],
1572 };
1573 }
1574}
Hendrik Wagenaar10e68eb2017-03-15 13:29:02 -07001575
1576extern "C" int dvrGetPoseBuffer(DvrReadBuffer** pose_buffer) {
1577 auto client = android::dvr::DisplayClient::Create();
1578 if (!client) {
1579 ALOGE("Failed to create display client!");
1580 return -ECOMM;
1581 }
1582
1583 *pose_buffer = CreateDvrReadBufferFromBufferConsumer(client->GetPoseBuffer());
1584 return 0;
1585}