blob: 37133892a2bc952a70095f1ab2240ecb1994be5e [file] [log] [blame]
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001#include <dvr/graphics.h>
2
Alex Vakulenko4fe60582017-02-02 11:35:59 -08003#include <inttypes.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08004#include <sys/timerfd.h>
5#include <array>
6#include <vector>
7
Alex Vakulenko4fe60582017-02-02 11:35:59 -08008#include <log/log.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08009#include <utils/Trace.h>
10
11#ifndef VK_USE_PLATFORM_ANDROID_KHR
12#define VK_USE_PLATFORM_ANDROID_KHR 1
13#endif
14#include <vulkan/vulkan.h>
15
16#include <pdx/file_handle.h>
17#include <private/dvr/clock_ns.h>
18#include <private/dvr/debug.h>
19#include <private/dvr/display_types.h>
Hendrik Wagenaar10e68eb2017-03-15 13:29:02 -070020#include <private/dvr/dvr_buffer.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080021#include <private/dvr/frame_history.h>
22#include <private/dvr/gl_fenced_flush.h>
23#include <private/dvr/graphics/vr_gl_extensions.h>
24#include <private/dvr/graphics_private.h>
25#include <private/dvr/late_latch.h>
26#include <private/dvr/native_buffer_queue.h>
27#include <private/dvr/sensor_constants.h>
28#include <private/dvr/video_mesh_surface_client.h>
29#include <private/dvr/vsync_client.h>
Mark Urbanusa6c1f922017-03-22 13:11:51 -070030#include <private/dvr/platform_defines.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080031
32#include <android/native_window.h>
33
34#ifndef EGL_CONTEXT_MAJOR_VERSION
35#define EGL_CONTEXT_MAJOR_VERSION 0x3098
36#define EGL_CONTEXT_MINOR_VERSION 0x30FB
37#endif
38
39using android::pdx::LocalHandle;
40using android::pdx::LocalChannelHandle;
41
42using android::dvr::DisplaySurfaceAttributeEnum;
43using android::dvr::DisplaySurfaceAttributeValue;
44
45namespace {
46
Mark Urbanusa6c1f922017-03-22 13:11:51 -070047// TODO(urbanus): revisit once we have per-platform usage config in place.
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080048constexpr int kDefaultDisplaySurfaceUsage =
Mark Urbanusa6c1f922017-03-22 13:11:51 -070049 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE |
50 GRALLOC_USAGE_QCOM_FRAMEBUFFER_COMPRESSION;
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080051constexpr int kDefaultDisplaySurfaceFormat = HAL_PIXEL_FORMAT_RGBA_8888;
52// TODO(alexst): revisit this count when HW encode is available for casting.
53constexpr int kDefaultBufferCount = 4;
54
55// Use with dvrBeginRenderFrame to disable EDS for the current frame.
56constexpr float32x4_t DVR_POSE_NO_EDS = {10.0f, 0.0f, 0.0f, 0.0f};
57
58// Use with dvrBeginRenderFrame to indicate that GPU late-latching is being used
59// for determining the render pose.
60constexpr float32x4_t DVR_POSE_LATE_LATCH = {20.0f, 0.0f, 0.0f, 0.0f};
61
62#ifndef NDEBUG
63
64static const char* GetGlCallbackType(GLenum type) {
65 switch (type) {
66 case GL_DEBUG_TYPE_ERROR_KHR:
67 return "ERROR";
68 case GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_KHR:
69 return "DEPRECATED_BEHAVIOR";
70 case GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_KHR:
71 return "UNDEFINED_BEHAVIOR";
72 case GL_DEBUG_TYPE_PORTABILITY_KHR:
73 return "PORTABILITY";
74 case GL_DEBUG_TYPE_PERFORMANCE_KHR:
75 return "PERFORMANCE";
76 case GL_DEBUG_TYPE_OTHER_KHR:
77 return "OTHER";
78 default:
79 return "UNKNOWN";
80 }
81}
82
83static void on_gl_error(GLenum /*source*/, GLenum type, GLuint /*id*/,
84 GLenum severity, GLsizei /*length*/,
85 const char* message, const void* /*user_param*/) {
86 char msg[400];
87 snprintf(msg, sizeof(msg), "[" __FILE__ ":%u] GL %s: %s", __LINE__,
88 GetGlCallbackType(type), message);
89 switch (severity) {
90 case GL_DEBUG_SEVERITY_LOW_KHR:
91 ALOGI("%s", msg);
92 break;
93 case GL_DEBUG_SEVERITY_MEDIUM_KHR:
94 ALOGW("%s", msg);
95 break;
96 case GL_DEBUG_SEVERITY_HIGH_KHR:
97 ALOGE("%s", msg);
98 break;
99 }
100 fprintf(stderr, "%s\n", msg);
101}
102
103#endif
104
105int DvrToHalSurfaceFormat(int dvr_surface_format) {
106 switch (dvr_surface_format) {
107 case DVR_SURFACE_FORMAT_RGBA_8888:
108 return HAL_PIXEL_FORMAT_RGBA_8888;
109 case DVR_SURFACE_FORMAT_RGB_565:
110 return HAL_PIXEL_FORMAT_RGB_565;
111 default:
112 return HAL_PIXEL_FORMAT_RGBA_8888;
113 }
114}
115
116int SelectEGLConfig(EGLDisplay dpy, EGLint* attr, unsigned format,
117 EGLConfig* config) {
118 std::array<EGLint, 4> desired_rgba;
119 switch (format) {
120 case HAL_PIXEL_FORMAT_RGBA_8888:
121 case HAL_PIXEL_FORMAT_BGRA_8888:
122 desired_rgba = {{8, 8, 8, 8}};
123 break;
124 case HAL_PIXEL_FORMAT_RGB_565:
125 desired_rgba = {{5, 6, 5, 0}};
126 break;
127 default:
128 ALOGE("Unsupported framebuffer pixel format %d", format);
129 return -1;
130 }
131
132 EGLint max_configs = 0;
133 if (eglGetConfigs(dpy, NULL, 0, &max_configs) == EGL_FALSE) {
134 ALOGE("No EGL configurations available?!");
135 return -1;
136 }
137
138 std::vector<EGLConfig> configs(max_configs);
139
140 EGLint num_configs;
141 if (eglChooseConfig(dpy, attr, &configs[0], max_configs, &num_configs) ==
142 EGL_FALSE) {
143 ALOGE("eglChooseConfig failed");
144 return -1;
145 }
146
147 std::array<EGLint, 4> config_rgba;
148 for (int i = 0; i < num_configs; i++) {
149 eglGetConfigAttrib(dpy, configs[i], EGL_RED_SIZE, &config_rgba[0]);
150 eglGetConfigAttrib(dpy, configs[i], EGL_GREEN_SIZE, &config_rgba[1]);
151 eglGetConfigAttrib(dpy, configs[i], EGL_BLUE_SIZE, &config_rgba[2]);
152 eglGetConfigAttrib(dpy, configs[i], EGL_ALPHA_SIZE, &config_rgba[3]);
153 if (config_rgba == desired_rgba) {
154 *config = configs[i];
155 return 0;
156 }
157 }
158
159 ALOGE("Cannot find a matching EGL config");
160 return -1;
161}
162
163void DestroyEglContext(EGLDisplay egl_display, EGLContext* egl_context) {
164 if (*egl_context != EGL_NO_CONTEXT) {
165 eglDestroyContext(egl_display, *egl_context);
166 *egl_context = EGL_NO_CONTEXT;
167 }
168}
169
170// Perform internal initialization. A GL context must be bound to the current
171// thread.
172// @param internally_created_context True if we created and own the GL context,
173// false if it was supplied by the application.
174// @return 0 if init was successful, or a negative error code on failure.
175int InitGl(bool internally_created_context) {
176 EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
177 if (egl_display == EGL_NO_DISPLAY) {
178 ALOGE("eglGetDisplay failed");
179 return -EINVAL;
180 }
181
182 EGLContext egl_context = eglGetCurrentContext();
183 if (egl_context == EGL_NO_CONTEXT) {
184 ALOGE("No GL context bound");
185 return -EINVAL;
186 }
187
188 glGetError(); // Clear the error state
189 GLint major_version, minor_version;
190 glGetIntegerv(GL_MAJOR_VERSION, &major_version);
191 glGetIntegerv(GL_MINOR_VERSION, &minor_version);
192 if (glGetError() != GL_NO_ERROR) {
193 // GL_MAJOR_VERSION and GL_MINOR_VERSION were added in GLES 3. If we get an
194 // error querying them it's almost certainly because it's GLES 1 or 2.
195 ALOGE("Error getting GL version. Must be GLES 3.2 or greater.");
196 return -EINVAL;
197 }
198
199 if (major_version < 3 || (major_version == 3 && minor_version < 2)) {
200 ALOGE("Invalid GL version: %d.%d. Must be GLES 3.2 or greater.",
201 major_version, minor_version);
202 return -EINVAL;
203 }
204
205#ifndef NDEBUG
206 if (internally_created_context) {
207 // Enable verbose GL debug output.
208 glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS_KHR);
209 glDebugMessageCallbackKHR(on_gl_error, NULL);
210 GLuint unused_ids = 0;
211 glDebugMessageControlKHR(GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0,
212 &unused_ids, GL_TRUE);
213 }
214#else
215 (void)internally_created_context;
216#endif
217
218 load_gl_extensions();
219 return 0;
220}
221
222int CreateEglContext(EGLDisplay egl_display, DvrSurfaceParameter* parameters,
223 EGLContext* egl_context) {
224 *egl_context = EGL_NO_CONTEXT;
225
226 EGLint major, minor;
227 if (!eglInitialize(egl_display, &major, &minor)) {
228 ALOGE("Failed to initialize EGL");
229 return -ENXIO;
230 }
231
232 ALOGI("EGL version: %d.%d\n", major, minor);
233
234 int buffer_format = kDefaultDisplaySurfaceFormat;
235
236 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
237 switch (p->key) {
238 case DVR_SURFACE_PARAMETER_FORMAT_IN:
239 buffer_format = DvrToHalSurfaceFormat(p->value);
240 break;
241 }
242 }
243
244 EGLint config_attrs[] = {EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
245 EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_NONE};
246 EGLConfig config = {0};
247
248 int ret = SelectEGLConfig(egl_display, config_attrs, buffer_format, &config);
249 if (ret < 0)
250 return ret;
251
252 ALOGI("EGL SelectEGLConfig ok.\n");
253
254 EGLint context_attrs[] = {EGL_CONTEXT_MAJOR_VERSION,
255 3,
256 EGL_CONTEXT_MINOR_VERSION,
257 2,
258#ifndef NDEBUG
259 EGL_CONTEXT_FLAGS_KHR,
260 EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR,
261#endif
262 EGL_NONE};
263
264 *egl_context =
265 eglCreateContext(egl_display, config, EGL_NO_CONTEXT, context_attrs);
266 if (*egl_context == EGL_NO_CONTEXT) {
267 ALOGE("eglCreateContext failed");
268 return -ENXIO;
269 }
270
271 ALOGI("eglCreateContext ok.\n");
272
273 if (!eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
274 *egl_context)) {
275 ALOGE("eglMakeCurrent failed");
276 DestroyEglContext(egl_display, egl_context);
277 return -EINVAL;
278 }
279
280 return 0;
281}
282
283} // anonymous namespace
284
285// TODO(hendrikw): When we remove the calls to this in native_window.cpp, move
286// this back into the anonymous namespace
287std::shared_ptr<android::dvr::DisplaySurfaceClient> CreateDisplaySurfaceClient(
288 struct DvrSurfaceParameter* parameters,
289 /*out*/ android::dvr::SystemDisplayMetrics* metrics) {
290 auto client = android::dvr::DisplayClient::Create();
291 if (!client) {
292 ALOGE("Failed to create display client!");
293 return nullptr;
294 }
295
296 const int ret = client->GetDisplayMetrics(metrics);
297 if (ret < 0) {
298 ALOGE("Failed to get display metrics: %s", strerror(-ret));
299 return nullptr;
300 }
301
302 // Parameters that may be modified by the parameters array. Some of these are
303 // here for future expansion.
304 int request_width = -1;
305 int request_height = -1;
306 int request_flags = 0;
307 bool disable_distortion = false;
308 bool disable_stabilization = false;
309 bool disable_cac = false;
310 bool request_visible = true;
311 bool vertical_flip = false;
312 int request_z_order = 0;
313 bool request_exclude_from_blur = false;
314 bool request_blur_behind = true;
315 int request_format = kDefaultDisplaySurfaceFormat;
316 int request_usage = kDefaultDisplaySurfaceUsage;
317 int geometry_type = DVR_SURFACE_GEOMETRY_SINGLE;
318
319 // Handle parameter inputs.
320 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
321 switch (p->key) {
322 case DVR_SURFACE_PARAMETER_WIDTH_IN:
323 request_width = p->value;
324 break;
325 case DVR_SURFACE_PARAMETER_HEIGHT_IN:
326 request_height = p->value;
327 break;
328 case DVR_SURFACE_PARAMETER_DISABLE_DISTORTION_IN:
329 disable_distortion = !!p->value;
330 break;
331 case DVR_SURFACE_PARAMETER_DISABLE_STABILIZATION_IN:
332 disable_stabilization = !!p->value;
333 break;
334 case DVR_SURFACE_PARAMETER_DISABLE_CAC_IN:
335 disable_cac = !!p->value;
336 break;
337 case DVR_SURFACE_PARAMETER_VISIBLE_IN:
338 request_visible = !!p->value;
339 break;
340 case DVR_SURFACE_PARAMETER_Z_ORDER_IN:
341 request_z_order = p->value;
342 break;
343 case DVR_SURFACE_PARAMETER_EXCLUDE_FROM_BLUR_IN:
344 request_exclude_from_blur = !!p->value;
345 break;
346 case DVR_SURFACE_PARAMETER_BLUR_BEHIND_IN:
347 request_blur_behind = !!p->value;
348 break;
349 case DVR_SURFACE_PARAMETER_VERTICAL_FLIP_IN:
350 vertical_flip = !!p->value;
351 break;
352 case DVR_SURFACE_PARAMETER_GEOMETRY_IN:
353 geometry_type = p->value;
354 break;
355 case DVR_SURFACE_PARAMETER_FORMAT_IN:
356 request_format = DvrToHalSurfaceFormat(p->value);
357 break;
358 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
359 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
360 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
361 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
362 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
363 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
364 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
365 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
366 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
367 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
368 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
369 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
370 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
371 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
372 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
373 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
374 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
375 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
376 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
377 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
378 break;
379 default:
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800380 ALOGE("Invalid display surface parameter: key=%d value=%" PRId64,
381 p->key, p->value);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800382 return nullptr;
383 }
384 }
385
386 request_flags |= disable_distortion
387 ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_DISTORTION
388 : 0;
389 request_flags |=
390 disable_stabilization ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_EDS : 0;
391 request_flags |=
392 disable_cac ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_CAC : 0;
393 request_flags |= vertical_flip ? DVR_DISPLAY_SURFACE_FLAGS_VERTICAL_FLIP : 0;
394 request_flags |= (geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2)
395 ? DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2
396 : 0;
397
398 if (request_width == -1) {
399 request_width = disable_distortion ? metrics->display_native_width
400 : metrics->distorted_width;
401 if (!disable_distortion &&
402 geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2) {
403 // The metrics always return the single wide buffer resolution.
404 // When split between eyes, we need to halve the width of the surface.
405 request_width /= 2;
406 }
407 }
408 if (request_height == -1) {
409 request_height = disable_distortion ? metrics->display_native_height
410 : metrics->distorted_height;
411 }
412
413 std::shared_ptr<android::dvr::DisplaySurfaceClient> surface =
414 client->CreateDisplaySurface(request_width, request_height,
415 request_format, request_usage,
416 request_flags);
417 surface->SetAttributes(
418 {{DisplaySurfaceAttributeEnum::Visible,
419 DisplaySurfaceAttributeValue{request_visible}},
420 {DisplaySurfaceAttributeEnum::ZOrder,
421 DisplaySurfaceAttributeValue{request_z_order}},
422 {DisplaySurfaceAttributeEnum::ExcludeFromBlur,
423 DisplaySurfaceAttributeValue{request_exclude_from_blur}},
424 {DisplaySurfaceAttributeEnum::BlurBehind,
425 DisplaySurfaceAttributeValue{request_blur_behind}}});
426
427 // Handle parameter output requests down here so we can return surface info.
428 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
429 switch (p->key) {
430 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
431 *static_cast<int32_t*>(p->value_out) = metrics->display_native_width;
432 break;
433 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
434 *static_cast<int32_t*>(p->value_out) = metrics->display_native_height;
435 break;
436 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
437 *static_cast<int32_t*>(p->value_out) = surface->width();
438 break;
439 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
440 *static_cast<int32_t*>(p->value_out) = surface->height();
441 break;
442 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
443 *static_cast<float*>(p->value_out) = metrics->inter_lens_distance_m;
444 break;
445 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
446 for (int i = 0; i < 4; ++i) {
447 float* float_values_out = static_cast<float*>(p->value_out);
448 float_values_out[i] = metrics->left_fov_lrbt[i];
449 }
450 break;
451 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
452 for (int i = 0; i < 4; ++i) {
453 float* float_values_out = static_cast<float*>(p->value_out);
454 float_values_out[i] = metrics->right_fov_lrbt[i];
455 }
456 break;
457 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
458 *static_cast<uint64_t*>(p->value_out) = metrics->vsync_period_ns;
459 break;
460 default:
461 break;
462 }
463 }
464
465 return surface;
466}
467
468extern "C" int dvrGetNativeDisplayDimensions(int* native_width,
469 int* native_height) {
470 int error = 0;
471 auto client = android::dvr::DisplayClient::Create(&error);
472 if (!client) {
473 ALOGE("Failed to create display client!");
474 return error;
475 }
476
477 android::dvr::SystemDisplayMetrics metrics;
478 const int ret = client->GetDisplayMetrics(&metrics);
479
480 if (ret != 0) {
481 ALOGE("Failed to get display metrics!");
482 return ret;
483 }
484
485 *native_width = static_cast<int>(metrics.display_native_width);
486 *native_height = static_cast<int>(metrics.display_native_height);
487 return 0;
488}
489
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800490struct DvrGraphicsContext : public android::ANativeObjectBase<
491 ANativeWindow, DvrGraphicsContext,
492 android::LightRefBase<DvrGraphicsContext>> {
493 public:
494 DvrGraphicsContext();
495 ~DvrGraphicsContext();
496
497 int graphics_api; // DVR_SURFACE_GRAPHICS_API_*
498
499 // GL specific members.
500 struct {
501 EGLDisplay egl_display;
502 EGLContext egl_context;
503 bool owns_egl_context;
504 GLuint texture_id[kSurfaceViewMaxCount];
505 int texture_count;
506 GLenum texture_target_type;
507 } gl;
508
509 // VK specific members
510 struct {
511 // These objects are passed in by the application, and are NOT owned
512 // by the context.
513 VkInstance instance;
514 VkPhysicalDevice physical_device;
515 VkDevice device;
516 VkQueue present_queue;
517 uint32_t present_queue_family;
518 const VkAllocationCallbacks* allocation_callbacks;
519 // These objects are owned by the context.
520 ANativeWindow* window;
521 VkSurfaceKHR surface;
522 VkSwapchainKHR swapchain;
523 std::vector<VkImage> swapchain_images;
524 std::vector<VkImageView> swapchain_image_views;
525 } vk;
526
527 // Display surface, metrics, and buffer management members.
528 std::shared_ptr<android::dvr::DisplaySurfaceClient> display_surface;
529 android::dvr::SystemDisplayMetrics display_metrics;
530 std::unique_ptr<android::dvr::NativeBufferQueue> buffer_queue;
531 android::dvr::NativeBufferProducer* current_buffer;
532 bool buffer_already_posted;
533
534 // Synchronization members.
535 std::unique_ptr<android::dvr::VSyncClient> vsync_client;
536 LocalHandle timerfd;
537
538 android::dvr::FrameHistory frame_history;
539
540 // Mapped surface metadata (ie: for pose delivery with presented frames).
541 volatile android::dvr::DisplaySurfaceMetadata* surface_metadata;
542
543 // LateLatch support.
544 std::unique_ptr<android::dvr::LateLatch> late_latch;
545
546 // Video mesh support.
547 std::vector<std::shared_ptr<android::dvr::VideoMeshSurfaceClient>>
548 video_mesh_surfaces;
549
550 private:
551 // ANativeWindow function implementations
552 std::mutex lock_;
553 int Post(android::dvr::NativeBufferProducer* buffer, int fence_fd);
554 static int SetSwapInterval(ANativeWindow* window, int interval);
555 static int DequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer,
556 int* fence_fd);
557 static int QueueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
558 int fence_fd);
559 static int CancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
560 int fence_fd);
561 static int Query(const ANativeWindow* window, int what, int* value);
562 static int Perform(ANativeWindow* window, int operation, ...);
563 static int DequeueBuffer_DEPRECATED(ANativeWindow* window,
564 ANativeWindowBuffer** buffer);
565 static int CancelBuffer_DEPRECATED(ANativeWindow* window,
566 ANativeWindowBuffer* buffer);
567 static int QueueBuffer_DEPRECATED(ANativeWindow* window,
568 ANativeWindowBuffer* buffer);
569 static int LockBuffer_DEPRECATED(ANativeWindow* window,
570 ANativeWindowBuffer* buffer);
571
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800572 DvrGraphicsContext(const DvrGraphicsContext&) = delete;
573 void operator=(const DvrGraphicsContext&) = delete;
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800574};
575
576DvrGraphicsContext::DvrGraphicsContext()
577 : graphics_api(DVR_GRAPHICS_API_GLES),
578 gl{},
579 vk{},
580 current_buffer(nullptr),
581 buffer_already_posted(false),
582 surface_metadata(nullptr) {
583 gl.egl_display = EGL_NO_DISPLAY;
584 gl.egl_context = EGL_NO_CONTEXT;
585 gl.owns_egl_context = true;
586 gl.texture_target_type = GL_TEXTURE_2D;
587
588 ANativeWindow::setSwapInterval = SetSwapInterval;
589 ANativeWindow::dequeueBuffer = DequeueBuffer;
590 ANativeWindow::cancelBuffer = CancelBuffer;
591 ANativeWindow::queueBuffer = QueueBuffer;
592 ANativeWindow::query = Query;
593 ANativeWindow::perform = Perform;
594
595 ANativeWindow::dequeueBuffer_DEPRECATED = DequeueBuffer_DEPRECATED;
596 ANativeWindow::cancelBuffer_DEPRECATED = CancelBuffer_DEPRECATED;
597 ANativeWindow::lockBuffer_DEPRECATED = LockBuffer_DEPRECATED;
598 ANativeWindow::queueBuffer_DEPRECATED = QueueBuffer_DEPRECATED;
599}
600
601DvrGraphicsContext::~DvrGraphicsContext() {
602 if (graphics_api == DVR_GRAPHICS_API_GLES) {
603 glDeleteTextures(gl.texture_count, gl.texture_id);
604 if (gl.owns_egl_context)
605 DestroyEglContext(gl.egl_display, &gl.egl_context);
606 } else if (graphics_api == DVR_GRAPHICS_API_VULKAN) {
607 if (vk.swapchain != VK_NULL_HANDLE) {
608 for (auto view : vk.swapchain_image_views) {
609 vkDestroyImageView(vk.device, view, vk.allocation_callbacks);
610 }
611 vkDestroySwapchainKHR(vk.device, vk.swapchain, vk.allocation_callbacks);
612 vkDestroySurfaceKHR(vk.instance, vk.surface, vk.allocation_callbacks);
613 delete vk.window;
614 }
615 }
616}
617
618int dvrGraphicsContextCreate(struct DvrSurfaceParameter* parameters,
619 DvrGraphicsContext** return_graphics_context) {
620 std::unique_ptr<DvrGraphicsContext> context(new DvrGraphicsContext);
621
622 // See whether we're using GL or Vulkan
623 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
624 switch (p->key) {
625 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
626 context->graphics_api = p->value;
627 break;
628 }
629 }
630
631 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
632 context->gl.egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
633 if (context->gl.egl_display == EGL_NO_DISPLAY) {
634 ALOGE("eglGetDisplay failed");
635 return -ENXIO;
636 }
637
638 // See if we should create a GL context
639 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
640 switch (p->key) {
641 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
642 context->gl.owns_egl_context = p->value != 0;
643 break;
644 }
645 }
646
647 if (context->gl.owns_egl_context) {
648 int ret = CreateEglContext(context->gl.egl_display, parameters,
649 &context->gl.egl_context);
650 if (ret < 0)
651 return ret;
652 } else {
653 context->gl.egl_context = eglGetCurrentContext();
654 }
655
656 int ret = InitGl(context->gl.owns_egl_context);
657 if (ret < 0)
658 return ret;
659 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
660 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
661 switch (p->key) {
662 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
663 context->vk.instance = reinterpret_cast<VkInstance>(p->value);
664 break;
665 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
666 context->vk.physical_device =
667 reinterpret_cast<VkPhysicalDevice>(p->value);
668 break;
669 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
670 context->vk.device = reinterpret_cast<VkDevice>(p->value);
671 break;
672 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
673 context->vk.present_queue = reinterpret_cast<VkQueue>(p->value);
674 break;
675 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
676 context->vk.present_queue_family = static_cast<uint32_t>(p->value);
677 break;
678 }
679 }
680 } else {
681 ALOGE("Error: invalid graphics API type");
682 return -EINVAL;
683 }
684
685 context->display_surface =
686 CreateDisplaySurfaceClient(parameters, &context->display_metrics);
687 if (!context->display_surface) {
688 ALOGE("Error: failed to create display surface client");
689 return -ECOMM;
690 }
691
692 context->buffer_queue.reset(new android::dvr::NativeBufferQueue(
693 context->gl.egl_display, context->display_surface, kDefaultBufferCount));
694
695 // The way the call sequence works we need 1 more than the buffer queue
696 // capacity to store data for all pending frames
697 context->frame_history.Reset(context->buffer_queue->GetQueueCapacity() + 1);
698
699 context->vsync_client = android::dvr::VSyncClient::Create();
700 if (!context->vsync_client) {
701 ALOGE("Error: failed to create vsync client");
702 return -ECOMM;
703 }
704
705 context->timerfd.Reset(timerfd_create(CLOCK_MONOTONIC, 0));
706 if (!context->timerfd) {
707 ALOGE("Error: timerfd_create failed because: %s", strerror(errno));
708 return -EPERM;
709 }
710
711 context->surface_metadata = context->display_surface->GetMetadataBufferPtr();
712 if (!context->surface_metadata) {
713 ALOGE("Error: surface metadata allocation failed");
714 return -ENOMEM;
715 }
716
717 ALOGI("buffer: %d x %d\n", context->display_surface->width(),
718 context->display_surface->height());
719
720 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
721 context->gl.texture_count = (context->display_surface->flags() &
722 DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2)
723 ? 2
724 : 1;
725
726 // Create the GL textures.
727 glGenTextures(context->gl.texture_count, context->gl.texture_id);
728
729 // We must make sure that we have at least one buffer allocated at this time
730 // so that anyone who tries to bind an FBO to context->texture_id
731 // will not get an incomplete buffer.
732 context->current_buffer = context->buffer_queue->Dequeue();
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800733 LOG_ALWAYS_FATAL_IF(context->gl.texture_count !=
734 context->current_buffer->buffer()->slice_count());
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800735 for (int i = 0; i < context->gl.texture_count; ++i) {
736 glBindTexture(context->gl.texture_target_type, context->gl.texture_id[i]);
737 glEGLImageTargetTexture2DOES(context->gl.texture_target_type,
738 context->current_buffer->image_khr(i));
739 }
740 glBindTexture(context->gl.texture_target_type, 0);
741 CHECK_GL();
742
743 bool is_late_latch = false;
744
745 // Pass back the texture target type and id.
746 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
747 switch (p->key) {
748 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
749 is_late_latch = !!p->value;
750 break;
751 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
752 *static_cast<GLenum*>(p->value_out) = context->gl.texture_target_type;
753 break;
754 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
755 for (int i = 0; i < context->gl.texture_count; ++i) {
756 *(static_cast<GLuint*>(p->value_out) + i) =
757 context->gl.texture_id[i];
758 }
759 break;
760 }
761 }
762
763 // Initialize late latch.
764 if (is_late_latch) {
765 LocalHandle fd;
766 int ret = context->display_surface->GetMetadataBufferFd(&fd);
767 if (ret == 0) {
768 context->late_latch.reset(
769 new android::dvr::LateLatch(true, std::move(fd)));
770 } else {
771 ALOGE("Error: failed to get surface metadata buffer fd for late latch");
772 }
773 }
774 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
775 VkResult result = VK_SUCCESS;
776 // Create a VkSurfaceKHR from the ANativeWindow.
777 VkAndroidSurfaceCreateInfoKHR android_surface_ci = {};
778 android_surface_ci.sType =
779 VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
780 android_surface_ci.window = context.get();
781 result = vkCreateAndroidSurfaceKHR(
782 context->vk.instance, &android_surface_ci,
783 context->vk.allocation_callbacks, &context->vk.surface);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800784 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800785 VkBool32 surface_supports_present = VK_FALSE;
786 result = vkGetPhysicalDeviceSurfaceSupportKHR(
787 context->vk.physical_device, context->vk.present_queue_family,
788 context->vk.surface, &surface_supports_present);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800789 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800790 if (!surface_supports_present) {
791 ALOGE("Error: provided queue family (%u) does not support presentation",
792 context->vk.present_queue_family);
793 return -EPERM;
794 }
795 VkSurfaceCapabilitiesKHR surface_capabilities = {};
796 result = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
797 context->vk.physical_device, context->vk.surface,
798 &surface_capabilities);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800799 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800800 // Determine the swapchain image format.
801 uint32_t device_surface_format_count = 0;
802 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
803 context->vk.physical_device, context->vk.surface,
804 &device_surface_format_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800805 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800806 std::vector<VkSurfaceFormatKHR> device_surface_formats(
807 device_surface_format_count);
808 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
809 context->vk.physical_device, context->vk.surface,
810 &device_surface_format_count, device_surface_formats.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800811 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
812 LOG_ALWAYS_FATAL_IF(device_surface_format_count == 0U);
813 LOG_ALWAYS_FATAL_IF(device_surface_formats[0].format ==
814 VK_FORMAT_UNDEFINED);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800815 VkSurfaceFormatKHR present_surface_format = device_surface_formats[0];
816 // Determine the swapchain present mode.
817 // TODO(cort): query device_present_modes to make sure MAILBOX is supported.
818 // But according to libvulkan, it is.
819 uint32_t device_present_mode_count = 0;
820 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
821 context->vk.physical_device, context->vk.surface,
822 &device_present_mode_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800823 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800824 std::vector<VkPresentModeKHR> device_present_modes(
825 device_present_mode_count);
826 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
827 context->vk.physical_device, context->vk.surface,
828 &device_present_mode_count, device_present_modes.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800829 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800830 VkPresentModeKHR present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
831 // Extract presentation surface extents, image count, transform, usages,
832 // etc.
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800833 LOG_ALWAYS_FATAL_IF(
834 static_cast<int>(surface_capabilities.currentExtent.width) == -1 ||
835 static_cast<int>(surface_capabilities.currentExtent.height) == -1);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800836 VkExtent2D swapchain_extent = surface_capabilities.currentExtent;
837
838 uint32_t desired_image_count = surface_capabilities.minImageCount;
839 if (surface_capabilities.maxImageCount > 0 &&
840 desired_image_count > surface_capabilities.maxImageCount) {
841 desired_image_count = surface_capabilities.maxImageCount;
842 }
843 VkSurfaceTransformFlagBitsKHR surface_transform =
844 surface_capabilities.currentTransform;
845 VkImageUsageFlags image_usage_flags =
846 surface_capabilities.supportedUsageFlags;
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800847 LOG_ALWAYS_FATAL_IF(surface_capabilities.supportedCompositeAlpha ==
848 static_cast<VkFlags>(0));
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800849 VkCompositeAlphaFlagBitsKHR composite_alpha =
850 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
851 if (!(surface_capabilities.supportedCompositeAlpha &
852 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) {
853 composite_alpha = VkCompositeAlphaFlagBitsKHR(
854 static_cast<int>(surface_capabilities.supportedCompositeAlpha) &
855 -static_cast<int>(surface_capabilities.supportedCompositeAlpha));
856 }
857 // Create VkSwapchainKHR
858 VkSwapchainCreateInfoKHR swapchain_ci = {};
859 swapchain_ci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
860 swapchain_ci.pNext = nullptr;
861 swapchain_ci.surface = context->vk.surface;
862 swapchain_ci.minImageCount = desired_image_count;
863 swapchain_ci.imageFormat = present_surface_format.format;
864 swapchain_ci.imageColorSpace = present_surface_format.colorSpace;
865 swapchain_ci.imageExtent.width = swapchain_extent.width;
866 swapchain_ci.imageExtent.height = swapchain_extent.height;
867 swapchain_ci.imageUsage = image_usage_flags;
868 swapchain_ci.preTransform = surface_transform;
869 swapchain_ci.compositeAlpha = composite_alpha;
870 swapchain_ci.imageArrayLayers = 1;
871 swapchain_ci.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
872 swapchain_ci.queueFamilyIndexCount = 0;
873 swapchain_ci.pQueueFamilyIndices = nullptr;
874 swapchain_ci.presentMode = present_mode;
875 swapchain_ci.clipped = VK_TRUE;
876 swapchain_ci.oldSwapchain = VK_NULL_HANDLE;
877 result = vkCreateSwapchainKHR(context->vk.device, &swapchain_ci,
878 context->vk.allocation_callbacks,
879 &context->vk.swapchain);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800880 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800881 // Create swapchain image views
882 uint32_t image_count = 0;
883 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
884 &image_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800885 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
886 LOG_ALWAYS_FATAL_IF(image_count == 0U);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800887 context->vk.swapchain_images.resize(image_count);
888 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
889 &image_count,
890 context->vk.swapchain_images.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800891 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800892 context->vk.swapchain_image_views.resize(image_count);
893 VkImageViewCreateInfo image_view_ci = {};
894 image_view_ci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
895 image_view_ci.pNext = nullptr;
896 image_view_ci.flags = 0;
897 image_view_ci.format = swapchain_ci.imageFormat;
898 image_view_ci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
899 image_view_ci.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
900 image_view_ci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
901 image_view_ci.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
902 image_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
903 image_view_ci.subresourceRange.baseMipLevel = 0;
904 image_view_ci.subresourceRange.levelCount = 1;
905 image_view_ci.subresourceRange.baseArrayLayer = 0;
906 image_view_ci.subresourceRange.layerCount = 1;
907 image_view_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
908 image_view_ci.image = VK_NULL_HANDLE; // filled in below
909 for (uint32_t i = 0; i < image_count; ++i) {
910 image_view_ci.image = context->vk.swapchain_images[i];
911 result = vkCreateImageView(context->vk.device, &image_view_ci,
912 context->vk.allocation_callbacks,
913 &context->vk.swapchain_image_views[i]);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800914 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800915 }
916 // Fill in any requested output parameters.
917 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
918 switch (p->key) {
919 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
920 *static_cast<uint32_t*>(p->value_out) = image_count;
921 break;
922 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
923 *static_cast<VkFormat*>(p->value_out) = swapchain_ci.imageFormat;
924 break;
925 }
926 }
927 }
928
929 *return_graphics_context = context.release();
930 return 0;
931}
932
933void dvrGraphicsContextDestroy(DvrGraphicsContext* graphics_context) {
934 delete graphics_context;
935}
936
937// ANativeWindow function implementations. These should only be used
938// by the Vulkan path.
939int DvrGraphicsContext::Post(android::dvr::NativeBufferProducer* buffer,
940 int fence_fd) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800941 LOG_ALWAYS_FATAL_IF(graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800942 ATRACE_NAME(__PRETTY_FUNCTION__);
943 ALOGI_IF(TRACE, "DvrGraphicsContext::Post: buffer_id=%d, fence_fd=%d",
944 buffer->buffer()->id(), fence_fd);
945 ALOGW_IF(!display_surface->visible(),
946 "DvrGraphicsContext::Post: Posting buffer on invisible surface!!!");
947 // The NativeBufferProducer closes the fence fd, so dup it for tracking in the
948 // frame history.
949 frame_history.OnFrameSubmit(LocalHandle::AsDuplicate(fence_fd));
950 int result = buffer->Post(fence_fd, 0);
951 return result;
952}
953
954int DvrGraphicsContext::SetSwapInterval(ANativeWindow* window, int interval) {
955 ALOGI_IF(TRACE, "SetSwapInterval: window=%p interval=%d", window, interval);
956 DvrGraphicsContext* self = getSelf(window);
957 (void)self;
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800958 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800959 return android::NO_ERROR;
960}
961
962int DvrGraphicsContext::DequeueBuffer(ANativeWindow* window,
963 ANativeWindowBuffer** buffer,
964 int* fence_fd) {
965 ATRACE_NAME(__PRETTY_FUNCTION__);
966
967 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800968 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800969 std::lock_guard<std::mutex> autolock(self->lock_);
970
971 if (!self->current_buffer) {
972 self->current_buffer = self->buffer_queue.get()->Dequeue();
973 }
974 ATRACE_ASYNC_BEGIN("BufferDraw", self->current_buffer->buffer()->id());
975 *fence_fd = self->current_buffer->ClaimReleaseFence().Release();
976 *buffer = self->current_buffer;
977
978 ALOGI_IF(TRACE, "DvrGraphicsContext::DequeueBuffer: fence_fd=%d", *fence_fd);
979 return android::NO_ERROR;
980}
981
982int DvrGraphicsContext::QueueBuffer(ANativeWindow* window,
983 ANativeWindowBuffer* buffer, int fence_fd) {
984 ATRACE_NAME("NativeWindow::QueueBuffer");
985 ALOGI_IF(TRACE, "NativeWindow::QueueBuffer: fence_fd=%d", fence_fd);
986
987 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800988 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800989 std::lock_guard<std::mutex> autolock(self->lock_);
990
991 android::dvr::NativeBufferProducer* native_buffer =
992 static_cast<android::dvr::NativeBufferProducer*>(buffer);
993 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
994 bool do_post = true;
995 if (self->buffer_already_posted) {
996 // Check that the buffer is the one we expect, but handle it if this happens
997 // in production by allowing this buffer to post on top of the previous one.
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800998 LOG_FATAL_IF(native_buffer != self->current_buffer);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800999 if (native_buffer == self->current_buffer) {
1000 do_post = false;
1001 if (fence_fd >= 0)
1002 close(fence_fd);
1003 }
1004 }
1005 if (do_post) {
1006 ATRACE_ASYNC_BEGIN("BufferPost", native_buffer->buffer()->id());
1007 self->Post(native_buffer, fence_fd);
1008 }
1009 self->buffer_already_posted = false;
1010 self->current_buffer = nullptr;
1011
1012 return android::NO_ERROR;
1013}
1014
1015int DvrGraphicsContext::CancelBuffer(ANativeWindow* window,
1016 ANativeWindowBuffer* buffer,
1017 int fence_fd) {
1018 ATRACE_NAME("DvrGraphicsContext::CancelBuffer");
1019 ALOGI_IF(TRACE, "DvrGraphicsContext::CancelBuffer: fence_fd: %d", fence_fd);
1020
1021 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001022 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001023 std::lock_guard<std::mutex> autolock(self->lock_);
1024
1025 android::dvr::NativeBufferProducer* native_buffer =
1026 static_cast<android::dvr::NativeBufferProducer*>(buffer);
1027 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
1028 ATRACE_INT("CancelBuffer", native_buffer->buffer()->id());
1029 bool do_enqueue = true;
1030 if (self->buffer_already_posted) {
1031 // Check that the buffer is the one we expect, but handle it if this happens
1032 // in production by returning this buffer to the buffer queue.
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001033 LOG_FATAL_IF(native_buffer != self->current_buffer);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001034 if (native_buffer == self->current_buffer) {
1035 do_enqueue = false;
1036 }
1037 }
1038 if (do_enqueue) {
1039 self->buffer_queue.get()->Enqueue(native_buffer);
1040 }
1041 if (fence_fd >= 0)
1042 close(fence_fd);
1043 self->buffer_already_posted = false;
1044 self->current_buffer = nullptr;
1045
1046 return android::NO_ERROR;
1047}
1048
1049int DvrGraphicsContext::Query(const ANativeWindow* window, int what,
1050 int* value) {
1051 DvrGraphicsContext* self = getSelf(const_cast<ANativeWindow*>(window));
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001052 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001053 std::lock_guard<std::mutex> autolock(self->lock_);
1054
1055 switch (what) {
1056 case NATIVE_WINDOW_WIDTH:
1057 *value = self->display_surface->width();
1058 return android::NO_ERROR;
1059 case NATIVE_WINDOW_HEIGHT:
1060 *value = self->display_surface->height();
1061 return android::NO_ERROR;
1062 case NATIVE_WINDOW_FORMAT:
1063 *value = self->display_surface->format();
1064 return android::NO_ERROR;
1065 case NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS:
1066 *value = 1;
1067 return android::NO_ERROR;
1068 case NATIVE_WINDOW_CONCRETE_TYPE:
1069 *value = NATIVE_WINDOW_SURFACE;
1070 return android::NO_ERROR;
1071 case NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER:
1072 *value = 1;
1073 return android::NO_ERROR;
1074 case NATIVE_WINDOW_DEFAULT_WIDTH:
1075 *value = self->display_surface->width();
1076 return android::NO_ERROR;
1077 case NATIVE_WINDOW_DEFAULT_HEIGHT:
1078 *value = self->display_surface->height();
1079 return android::NO_ERROR;
1080 case NATIVE_WINDOW_TRANSFORM_HINT:
1081 *value = 0;
1082 return android::NO_ERROR;
1083 }
1084
1085 *value = 0;
1086 return android::BAD_VALUE;
1087}
1088
1089int DvrGraphicsContext::Perform(ANativeWindow* window, int operation, ...) {
1090 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001091 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001092 std::lock_guard<std::mutex> autolock(self->lock_);
1093
1094 va_list args;
1095 va_start(args, operation);
1096
1097 // TODO(eieio): The following operations are not used at this time. They are
1098 // included here to help document which operations may be useful and what
1099 // parameters they take.
1100 switch (operation) {
1101 case NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: {
1102 int w = va_arg(args, int);
1103 int h = va_arg(args, int);
1104 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: w=%d h=%d", w, h);
1105 return android::NO_ERROR;
1106 }
1107
1108 case NATIVE_WINDOW_SET_BUFFERS_FORMAT: {
1109 int format = va_arg(args, int);
1110 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_FORMAT: format=%d", format);
1111 return android::NO_ERROR;
1112 }
1113
1114 case NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: {
1115 int transform = va_arg(args, int);
1116 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: transform=%d",
1117 transform);
1118 return android::NO_ERROR;
1119 }
1120
1121 case NATIVE_WINDOW_SET_USAGE: {
1122 int usage = va_arg(args, int);
1123 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_USAGE: usage=%d", usage);
1124 return android::NO_ERROR;
1125 }
1126
1127 case NATIVE_WINDOW_CONNECT:
1128 case NATIVE_WINDOW_DISCONNECT:
1129 case NATIVE_WINDOW_SET_BUFFERS_GEOMETRY:
1130 case NATIVE_WINDOW_API_CONNECT:
1131 case NATIVE_WINDOW_API_DISCONNECT:
1132 // TODO(eieio): we should implement these
1133 return android::NO_ERROR;
1134
1135 case NATIVE_WINDOW_SET_BUFFER_COUNT: {
1136 int buffer_count = va_arg(args, int);
1137 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFER_COUNT: bufferCount=%d",
1138 buffer_count);
1139 return android::NO_ERROR;
1140 }
1141 case NATIVE_WINDOW_SET_BUFFERS_DATASPACE: {
1142 android_dataspace_t data_space =
1143 static_cast<android_dataspace_t>(va_arg(args, int));
1144 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DATASPACE: dataSpace=%d",
1145 data_space);
1146 return android::NO_ERROR;
1147 }
1148 case NATIVE_WINDOW_SET_SCALING_MODE: {
1149 int mode = va_arg(args, int);
1150 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_SCALING_MODE: mode=%d", mode);
1151 return android::NO_ERROR;
1152 }
1153
1154 case NATIVE_WINDOW_LOCK:
1155 case NATIVE_WINDOW_UNLOCK_AND_POST:
1156 case NATIVE_WINDOW_SET_CROP:
1157 case NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP:
1158 return android::INVALID_OPERATION;
1159 }
1160
1161 return android::NAME_NOT_FOUND;
1162}
1163
1164int DvrGraphicsContext::DequeueBuffer_DEPRECATED(ANativeWindow* window,
1165 ANativeWindowBuffer** buffer) {
1166 int fence_fd = -1;
1167 int ret = DequeueBuffer(window, buffer, &fence_fd);
1168
1169 // wait for fence
1170 if (ret == android::NO_ERROR && fence_fd != -1)
1171 close(fence_fd);
1172
1173 return ret;
1174}
1175
1176int DvrGraphicsContext::CancelBuffer_DEPRECATED(ANativeWindow* window,
1177 ANativeWindowBuffer* buffer) {
1178 return CancelBuffer(window, buffer, -1);
1179}
1180
1181int DvrGraphicsContext::QueueBuffer_DEPRECATED(ANativeWindow* window,
1182 ANativeWindowBuffer* buffer) {
1183 return QueueBuffer(window, buffer, -1);
1184}
1185
1186int DvrGraphicsContext::LockBuffer_DEPRECATED(ANativeWindow* /*window*/,
1187 ANativeWindowBuffer* /*buffer*/) {
1188 return android::NO_ERROR;
1189}
1190// End ANativeWindow implementation
1191
1192int dvrSetEdsPose(DvrGraphicsContext* graphics_context,
1193 float32x4_t render_pose_orientation,
1194 float32x4_t render_pose_translation) {
1195 ATRACE_NAME("dvrSetEdsPose");
1196 if (!graphics_context->current_buffer) {
1197 ALOGE("dvrBeginRenderFrame must be called before dvrSetEdsPose");
1198 return -EPERM;
1199 }
1200
1201 // When late-latching is enabled, the pose buffer is written by the GPU, so
1202 // we don't touch it here.
1203 float32x4_t is_late_latch = DVR_POSE_LATE_LATCH;
1204 if (render_pose_orientation[0] != is_late_latch[0]) {
1205 volatile android::dvr::DisplaySurfaceMetadata* data =
1206 graphics_context->surface_metadata;
1207 uint32_t buffer_index =
1208 graphics_context->current_buffer->surface_buffer_index();
1209 ALOGE_IF(TRACE, "write pose index %d %f %f", buffer_index,
1210 render_pose_orientation[0], render_pose_orientation[1]);
1211 data->orientation[buffer_index] = render_pose_orientation;
1212 data->translation[buffer_index] = render_pose_translation;
1213 }
1214
1215 return 0;
1216}
1217
1218int dvrBeginRenderFrameEds(DvrGraphicsContext* graphics_context,
1219 float32x4_t render_pose_orientation,
1220 float32x4_t render_pose_translation) {
1221 ATRACE_NAME("dvrBeginRenderFrameEds");
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001222 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001223 CHECK_GL();
1224 // Grab a buffer from the queue and set its pose.
1225 if (!graphics_context->current_buffer) {
1226 graphics_context->current_buffer =
1227 graphics_context->buffer_queue->Dequeue();
1228 }
1229
1230 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1231 render_pose_translation);
1232 if (ret < 0)
1233 return ret;
1234
1235 ATRACE_ASYNC_BEGIN("BufferDraw",
1236 graphics_context->current_buffer->buffer()->id());
1237
1238 {
1239 ATRACE_NAME("glEGLImageTargetTexture2DOES");
1240 // Bind the texture to the latest buffer in the queue.
1241 for (int i = 0; i < graphics_context->gl.texture_count; ++i) {
1242 glBindTexture(graphics_context->gl.texture_target_type,
1243 graphics_context->gl.texture_id[i]);
1244 glEGLImageTargetTexture2DOES(
1245 graphics_context->gl.texture_target_type,
1246 graphics_context->current_buffer->image_khr(i));
1247 }
1248 glBindTexture(graphics_context->gl.texture_target_type, 0);
1249 }
1250 CHECK_GL();
1251 return 0;
1252}
1253int dvrBeginRenderFrameEdsVk(DvrGraphicsContext* graphics_context,
1254 float32x4_t render_pose_orientation,
1255 float32x4_t render_pose_translation,
1256 VkSemaphore acquire_semaphore,
1257 VkFence acquire_fence,
1258 uint32_t* swapchain_image_index,
1259 VkImageView* swapchain_image_view) {
1260 ATRACE_NAME("dvrBeginRenderFrameEds");
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001261 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api !=
1262 DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001263
1264 // Acquire a swapchain image. This calls Dequeue() internally.
1265 VkResult result = vkAcquireNextImageKHR(
1266 graphics_context->vk.device, graphics_context->vk.swapchain, UINT64_MAX,
1267 acquire_semaphore, acquire_fence, swapchain_image_index);
1268 if (result != VK_SUCCESS)
1269 return -EINVAL;
1270
1271 // Set the pose pose.
1272 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1273 render_pose_translation);
1274 if (ret < 0)
1275 return ret;
1276 *swapchain_image_view =
1277 graphics_context->vk.swapchain_image_views[*swapchain_image_index];
1278 return 0;
1279}
1280
1281int dvrBeginRenderFrame(DvrGraphicsContext* graphics_context) {
1282 return dvrBeginRenderFrameEds(graphics_context, DVR_POSE_NO_EDS,
1283 DVR_POSE_NO_EDS);
1284}
1285int dvrBeginRenderFrameVk(DvrGraphicsContext* graphics_context,
1286 VkSemaphore acquire_semaphore, VkFence acquire_fence,
1287 uint32_t* swapchain_image_index,
1288 VkImageView* swapchain_image_view) {
1289 return dvrBeginRenderFrameEdsVk(
1290 graphics_context, DVR_POSE_NO_EDS, DVR_POSE_NO_EDS, acquire_semaphore,
1291 acquire_fence, swapchain_image_index, swapchain_image_view);
1292}
1293
1294int dvrBeginRenderFrameLateLatch(DvrGraphicsContext* graphics_context,
1295 uint32_t /*flags*/,
1296 uint32_t target_vsync_count, int num_views,
1297 const float** projection_matrices,
1298 const float** eye_from_head_matrices,
1299 const float** pose_offset_matrices,
1300 uint32_t* out_late_latch_buffer_id) {
1301 if (!graphics_context->late_latch) {
1302 return -EPERM;
1303 }
1304 if (num_views > DVR_GRAPHICS_SURFACE_MAX_VIEWS) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001305 ALOGE("dvrBeginRenderFrameLateLatch called with too many views.");
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001306 return -EINVAL;
1307 }
1308 dvrBeginRenderFrameEds(graphics_context, DVR_POSE_LATE_LATCH,
1309 DVR_POSE_LATE_LATCH);
1310 auto& ll = graphics_context->late_latch;
1311 // TODO(jbates) Need to change this shader so that it dumps the single
1312 // captured pose for both eyes into the display surface metadata buffer at
1313 // the right index.
1314 android::dvr::LateLatchInput input;
1315 memset(&input, 0, sizeof(input));
1316 for (int i = 0; i < num_views; ++i) {
1317 memcpy(input.proj_mat + i, *(projection_matrices + i), 16 * sizeof(float));
1318 memcpy(input.eye_from_head_mat + i, *(eye_from_head_matrices + i),
1319 16 * sizeof(float));
1320 memcpy(input.pose_offset + i, *(pose_offset_matrices + i),
1321 16 * sizeof(float));
1322 }
1323 input.pose_index =
1324 target_vsync_count & android::dvr::kPoseAsyncBufferIndexMask;
1325 input.render_pose_index =
1326 graphics_context->current_buffer->surface_buffer_index();
1327 ll->AddLateLatch(input);
1328 *out_late_latch_buffer_id = ll->output_buffer_id();
1329 return 0;
1330}
1331
1332extern "C" int dvrGraphicsWaitNextFrame(
1333 DvrGraphicsContext* graphics_context, int64_t start_delay_ns,
1334 DvrFrameSchedule* out_next_frame_schedule) {
1335 start_delay_ns = std::max(start_delay_ns, static_cast<int64_t>(0));
1336
1337 // We only do one-shot timers:
1338 int64_t wake_time_ns = 0;
1339
1340 uint32_t current_frame_vsync;
1341 int64_t current_frame_scheduled_finish_ns;
1342 int64_t vsync_period_ns;
1343
1344 int fetch_schedule_result = graphics_context->vsync_client->GetSchedInfo(
1345 &vsync_period_ns, &current_frame_scheduled_finish_ns,
1346 &current_frame_vsync);
1347 if (fetch_schedule_result == 0) {
1348 wake_time_ns = current_frame_scheduled_finish_ns + start_delay_ns;
1349 // If the last wakeup time is still in the future, use it instead to avoid
1350 // major schedule jumps when applications call WaitNextFrame with
1351 // aggressive offsets.
1352 int64_t now = android::dvr::GetSystemClockNs();
1353 if (android::dvr::TimestampGT(wake_time_ns - vsync_period_ns, now)) {
1354 wake_time_ns -= vsync_period_ns;
1355 --current_frame_vsync;
1356 }
1357 // If the next wakeup time is in the past, add a vsync period to keep the
1358 // application on schedule.
1359 if (android::dvr::TimestampLT(wake_time_ns, now)) {
1360 wake_time_ns += vsync_period_ns;
1361 ++current_frame_vsync;
1362 }
1363 } else {
1364 ALOGE("Error getting frame schedule because: %s",
1365 strerror(-fetch_schedule_result));
1366 // Sleep for a vsync period to avoid cascading failure.
1367 wake_time_ns = android::dvr::GetSystemClockNs() +
1368 graphics_context->display_metrics.vsync_period_ns;
1369 }
1370
1371 // Adjust nsec to [0..999,999,999].
1372 struct itimerspec wake_time;
1373 wake_time.it_interval.tv_sec = 0;
1374 wake_time.it_interval.tv_nsec = 0;
1375 wake_time.it_value = android::dvr::NsToTimespec(wake_time_ns);
1376 bool sleep_result =
1377 timerfd_settime(graphics_context->timerfd.Get(), TFD_TIMER_ABSTIME,
1378 &wake_time, nullptr) == 0;
1379 if (sleep_result) {
1380 ATRACE_NAME("sleep");
1381 uint64_t expirations = 0;
1382 sleep_result = read(graphics_context->timerfd.Get(), &expirations,
1383 sizeof(uint64_t)) == sizeof(uint64_t);
1384 if (!sleep_result) {
1385 ALOGE("Error: timerfd read failed");
1386 }
1387 } else {
1388 ALOGE("Error: timerfd_settime failed because: %s", strerror(errno));
1389 }
1390
1391 auto& frame_history = graphics_context->frame_history;
1392 frame_history.CheckForFinishedFrames();
1393 if (fetch_schedule_result == 0) {
1394 uint32_t next_frame_vsync =
1395 current_frame_vsync +
1396 frame_history.PredictNextFrameVsyncInterval(vsync_period_ns);
1397 int64_t next_frame_scheduled_finish =
1398 (wake_time_ns - start_delay_ns) + vsync_period_ns;
1399 frame_history.OnFrameStart(next_frame_vsync, next_frame_scheduled_finish);
1400 if (out_next_frame_schedule) {
1401 out_next_frame_schedule->vsync_count = next_frame_vsync;
1402 out_next_frame_schedule->scheduled_frame_finish_ns =
1403 next_frame_scheduled_finish;
1404 }
1405 } else {
1406 frame_history.OnFrameStart(UINT32_MAX, -1);
1407 }
1408
1409 return (fetch_schedule_result == 0 && sleep_result) ? 0 : -1;
1410}
1411
1412extern "C" void dvrGraphicsPostEarly(DvrGraphicsContext* graphics_context) {
1413 ATRACE_NAME("dvrGraphicsPostEarly");
1414 ALOGI_IF(TRACE, "dvrGraphicsPostEarly");
1415
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001416 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001417
1418 // Note that this function can be called before or after
1419 // dvrBeginRenderFrame.
1420 if (!graphics_context->buffer_already_posted) {
1421 graphics_context->buffer_already_posted = true;
1422
1423 if (!graphics_context->current_buffer) {
1424 graphics_context->current_buffer =
1425 graphics_context->buffer_queue->Dequeue();
1426 }
1427
1428 auto buffer = graphics_context->current_buffer->buffer().get();
1429 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1430 int result = buffer->Post<uint64_t>(LocalHandle(), 0);
1431 if (result < 0)
1432 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1433 }
1434}
1435
1436int dvrPresent(DvrGraphicsContext* graphics_context) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001437 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001438
1439 std::array<char, 128> buf;
1440 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1441 graphics_context->frame_history.GetCurrentFrameVsync());
1442 ATRACE_NAME(buf.data());
1443
1444 if (!graphics_context->current_buffer) {
1445 ALOGE("Error: dvrPresent called without dvrBeginRenderFrame");
1446 return -EPERM;
1447 }
1448
1449 LocalHandle fence_fd =
1450 android::dvr::CreateGLSyncAndFlush(graphics_context->gl.egl_display);
1451
1452 ALOGI_IF(TRACE, "PostBuffer: buffer_id=%d, fence_fd=%d",
1453 graphics_context->current_buffer->buffer()->id(), fence_fd.Get());
1454 ALOGW_IF(!graphics_context->display_surface->visible(),
1455 "PostBuffer: Posting buffer on invisible surface!!!");
1456
1457 auto buffer = graphics_context->current_buffer->buffer().get();
1458 ATRACE_ASYNC_END("BufferDraw", buffer->id());
1459 if (!graphics_context->buffer_already_posted) {
1460 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1461 int result = buffer->Post<uint64_t>(fence_fd, 0);
1462 if (result < 0)
1463 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1464 }
1465
1466 graphics_context->frame_history.OnFrameSubmit(std::move(fence_fd));
1467 graphics_context->buffer_already_posted = false;
1468 graphics_context->current_buffer = nullptr;
1469 return 0;
1470}
1471
1472int dvrPresentVk(DvrGraphicsContext* graphics_context,
1473 VkSemaphore submit_semaphore, uint32_t swapchain_image_index) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001474 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api !=
1475 DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001476
1477 std::array<char, 128> buf;
1478 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1479 graphics_context->frame_history.GetCurrentFrameVsync());
1480 ATRACE_NAME(buf.data());
1481
1482 if (!graphics_context->current_buffer) {
1483 ALOGE("Error: dvrPresentVk called without dvrBeginRenderFrameVk");
1484 return -EPERM;
1485 }
1486
1487 // Present the specified image. Internally, this gets a fence from the
1488 // Vulkan driver and passes it to DvrGraphicsContext::Post(),
1489 // which in turn passes it to buffer->Post() and adds it to frame_history.
1490 VkPresentInfoKHR present_info = {};
1491 present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
1492 present_info.swapchainCount = 1;
1493 present_info.pSwapchains = &graphics_context->vk.swapchain;
1494 present_info.pImageIndices = &swapchain_image_index;
1495 present_info.waitSemaphoreCount =
1496 (submit_semaphore != VK_NULL_HANDLE) ? 1 : 0;
1497 present_info.pWaitSemaphores = &submit_semaphore;
1498 VkResult result =
1499 vkQueuePresentKHR(graphics_context->vk.present_queue, &present_info);
1500 if (result != VK_SUCCESS) {
1501 return -EINVAL;
1502 }
1503
1504 return 0;
1505}
1506
1507extern "C" int dvrGetFrameScheduleResults(DvrGraphicsContext* context,
1508 DvrFrameScheduleResult* results,
1509 int in_result_count) {
1510 if (!context || !results)
1511 return -EINVAL;
1512
1513 return context->frame_history.GetPreviousFrameResults(results,
1514 in_result_count);
1515}
1516
1517extern "C" void dvrGraphicsSurfaceSetVisible(
1518 DvrGraphicsContext* graphics_context, int visible) {
1519 graphics_context->display_surface->SetVisible(visible);
1520}
1521
1522extern "C" int dvrGraphicsSurfaceGetVisible(
1523 DvrGraphicsContext* graphics_context) {
1524 return graphics_context->display_surface->visible() ? 1 : 0;
1525}
1526
1527extern "C" void dvrGraphicsSurfaceSetZOrder(
1528 DvrGraphicsContext* graphics_context, int z_order) {
1529 graphics_context->display_surface->SetZOrder(z_order);
1530}
1531
1532extern "C" int dvrGraphicsSurfaceGetZOrder(
1533 DvrGraphicsContext* graphics_context) {
1534 return graphics_context->display_surface->z_order();
1535}
1536
1537extern "C" DvrVideoMeshSurface* dvrGraphicsVideoMeshSurfaceCreate(
1538 DvrGraphicsContext* graphics_context) {
1539 auto display_surface = graphics_context->display_surface;
1540 // A DisplaySurface must be created prior to the creation of a
1541 // VideoMeshSurface.
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001542 LOG_ALWAYS_FATAL_IF(display_surface == nullptr);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001543
1544 LocalChannelHandle surface_handle = display_surface->CreateVideoMeshSurface();
1545 if (!surface_handle.valid()) {
1546 return nullptr;
1547 }
1548
1549 std::unique_ptr<DvrVideoMeshSurface> surface(new DvrVideoMeshSurface);
1550 surface->client =
1551 android::dvr::VideoMeshSurfaceClient::Import(std::move(surface_handle));
1552
1553 // TODO(jwcai) The next line is not needed...
1554 auto producer_queue = surface->client->GetProducerQueue();
1555 return surface.release();
1556}
1557
1558extern "C" void dvrGraphicsVideoMeshSurfaceDestroy(
1559 DvrVideoMeshSurface* surface) {
1560 delete surface;
1561}
1562
1563extern "C" void dvrGraphicsVideoMeshSurfacePresent(
1564 DvrGraphicsContext* graphics_context, DvrVideoMeshSurface* surface,
1565 const int eye, const float* transform) {
1566 volatile android::dvr::VideoMeshSurfaceMetadata* metadata =
1567 surface->client->GetMetadataBufferPtr();
1568
1569 const uint32_t graphics_buffer_index =
1570 graphics_context->current_buffer->surface_buffer_index();
1571
1572 for (int i = 0; i < 4; ++i) {
1573 metadata->transform[graphics_buffer_index][eye].val[i] = {
1574 transform[i + 0], transform[i + 4], transform[i + 8], transform[i + 12],
1575 };
1576 }
1577}
Hendrik Wagenaar10e68eb2017-03-15 13:29:02 -07001578
1579extern "C" int dvrGetPoseBuffer(DvrReadBuffer** pose_buffer) {
1580 auto client = android::dvr::DisplayClient::Create();
1581 if (!client) {
1582 ALOGE("Failed to create display client!");
1583 return -ECOMM;
1584 }
1585
1586 *pose_buffer = CreateDvrReadBufferFromBufferConsumer(client->GetPoseBuffer());
1587 return 0;
1588}