blob: 2abdf8ede0cc8b7c49245472256fc28817e5c805 [file] [log] [blame]
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001#include <dvr/graphics.h>
2
Alex Vakulenko4fe60582017-02-02 11:35:59 -08003#include <inttypes.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08004#include <sys/timerfd.h>
5#include <array>
6#include <vector>
7
Alex Vakulenko4fe60582017-02-02 11:35:59 -08008#include <log/log.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08009#include <utils/Trace.h>
10
11#ifndef VK_USE_PLATFORM_ANDROID_KHR
12#define VK_USE_PLATFORM_ANDROID_KHR 1
13#endif
14#include <vulkan/vulkan.h>
15
16#include <pdx/file_handle.h>
17#include <private/dvr/clock_ns.h>
18#include <private/dvr/debug.h>
19#include <private/dvr/display_types.h>
20#include <private/dvr/frame_history.h>
21#include <private/dvr/gl_fenced_flush.h>
22#include <private/dvr/graphics/vr_gl_extensions.h>
23#include <private/dvr/graphics_private.h>
24#include <private/dvr/late_latch.h>
25#include <private/dvr/native_buffer_queue.h>
26#include <private/dvr/sensor_constants.h>
27#include <private/dvr/video_mesh_surface_client.h>
28#include <private/dvr/vsync_client.h>
Mark Urbanusa6c1f922017-03-22 13:11:51 -070029#include <private/dvr/platform_defines.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080030
31#include <android/native_window.h>
32
33#ifndef EGL_CONTEXT_MAJOR_VERSION
34#define EGL_CONTEXT_MAJOR_VERSION 0x3098
35#define EGL_CONTEXT_MINOR_VERSION 0x30FB
36#endif
37
38using android::pdx::LocalHandle;
39using android::pdx::LocalChannelHandle;
40
41using android::dvr::DisplaySurfaceAttributeEnum;
42using android::dvr::DisplaySurfaceAttributeValue;
43
44namespace {
45
Mark Urbanusa6c1f922017-03-22 13:11:51 -070046// TODO(urbanus): revisit once we have per-platform usage config in place.
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080047constexpr int kDefaultDisplaySurfaceUsage =
Mark Urbanusa6c1f922017-03-22 13:11:51 -070048 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE |
49 GRALLOC_USAGE_QCOM_FRAMEBUFFER_COMPRESSION;
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080050constexpr int kDefaultDisplaySurfaceFormat = HAL_PIXEL_FORMAT_RGBA_8888;
51// TODO(alexst): revisit this count when HW encode is available for casting.
52constexpr int kDefaultBufferCount = 4;
53
54// Use with dvrBeginRenderFrame to disable EDS for the current frame.
55constexpr float32x4_t DVR_POSE_NO_EDS = {10.0f, 0.0f, 0.0f, 0.0f};
56
57// Use with dvrBeginRenderFrame to indicate that GPU late-latching is being used
58// for determining the render pose.
59constexpr float32x4_t DVR_POSE_LATE_LATCH = {20.0f, 0.0f, 0.0f, 0.0f};
60
61#ifndef NDEBUG
62
63static const char* GetGlCallbackType(GLenum type) {
64 switch (type) {
65 case GL_DEBUG_TYPE_ERROR_KHR:
66 return "ERROR";
67 case GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_KHR:
68 return "DEPRECATED_BEHAVIOR";
69 case GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_KHR:
70 return "UNDEFINED_BEHAVIOR";
71 case GL_DEBUG_TYPE_PORTABILITY_KHR:
72 return "PORTABILITY";
73 case GL_DEBUG_TYPE_PERFORMANCE_KHR:
74 return "PERFORMANCE";
75 case GL_DEBUG_TYPE_OTHER_KHR:
76 return "OTHER";
77 default:
78 return "UNKNOWN";
79 }
80}
81
82static void on_gl_error(GLenum /*source*/, GLenum type, GLuint /*id*/,
83 GLenum severity, GLsizei /*length*/,
84 const char* message, const void* /*user_param*/) {
85 char msg[400];
86 snprintf(msg, sizeof(msg), "[" __FILE__ ":%u] GL %s: %s", __LINE__,
87 GetGlCallbackType(type), message);
88 switch (severity) {
89 case GL_DEBUG_SEVERITY_LOW_KHR:
90 ALOGI("%s", msg);
91 break;
92 case GL_DEBUG_SEVERITY_MEDIUM_KHR:
93 ALOGW("%s", msg);
94 break;
95 case GL_DEBUG_SEVERITY_HIGH_KHR:
96 ALOGE("%s", msg);
97 break;
98 }
99 fprintf(stderr, "%s\n", msg);
100}
101
102#endif
103
104int DvrToHalSurfaceFormat(int dvr_surface_format) {
105 switch (dvr_surface_format) {
106 case DVR_SURFACE_FORMAT_RGBA_8888:
107 return HAL_PIXEL_FORMAT_RGBA_8888;
108 case DVR_SURFACE_FORMAT_RGB_565:
109 return HAL_PIXEL_FORMAT_RGB_565;
110 default:
111 return HAL_PIXEL_FORMAT_RGBA_8888;
112 }
113}
114
115int SelectEGLConfig(EGLDisplay dpy, EGLint* attr, unsigned format,
116 EGLConfig* config) {
117 std::array<EGLint, 4> desired_rgba;
118 switch (format) {
119 case HAL_PIXEL_FORMAT_RGBA_8888:
120 case HAL_PIXEL_FORMAT_BGRA_8888:
121 desired_rgba = {{8, 8, 8, 8}};
122 break;
123 case HAL_PIXEL_FORMAT_RGB_565:
124 desired_rgba = {{5, 6, 5, 0}};
125 break;
126 default:
127 ALOGE("Unsupported framebuffer pixel format %d", format);
128 return -1;
129 }
130
131 EGLint max_configs = 0;
132 if (eglGetConfigs(dpy, NULL, 0, &max_configs) == EGL_FALSE) {
133 ALOGE("No EGL configurations available?!");
134 return -1;
135 }
136
137 std::vector<EGLConfig> configs(max_configs);
138
139 EGLint num_configs;
140 if (eglChooseConfig(dpy, attr, &configs[0], max_configs, &num_configs) ==
141 EGL_FALSE) {
142 ALOGE("eglChooseConfig failed");
143 return -1;
144 }
145
146 std::array<EGLint, 4> config_rgba;
147 for (int i = 0; i < num_configs; i++) {
148 eglGetConfigAttrib(dpy, configs[i], EGL_RED_SIZE, &config_rgba[0]);
149 eglGetConfigAttrib(dpy, configs[i], EGL_GREEN_SIZE, &config_rgba[1]);
150 eglGetConfigAttrib(dpy, configs[i], EGL_BLUE_SIZE, &config_rgba[2]);
151 eglGetConfigAttrib(dpy, configs[i], EGL_ALPHA_SIZE, &config_rgba[3]);
152 if (config_rgba == desired_rgba) {
153 *config = configs[i];
154 return 0;
155 }
156 }
157
158 ALOGE("Cannot find a matching EGL config");
159 return -1;
160}
161
162void DestroyEglContext(EGLDisplay egl_display, EGLContext* egl_context) {
163 if (*egl_context != EGL_NO_CONTEXT) {
164 eglDestroyContext(egl_display, *egl_context);
165 *egl_context = EGL_NO_CONTEXT;
166 }
167}
168
169// Perform internal initialization. A GL context must be bound to the current
170// thread.
171// @param internally_created_context True if we created and own the GL context,
172// false if it was supplied by the application.
173// @return 0 if init was successful, or a negative error code on failure.
174int InitGl(bool internally_created_context) {
175 EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
176 if (egl_display == EGL_NO_DISPLAY) {
177 ALOGE("eglGetDisplay failed");
178 return -EINVAL;
179 }
180
181 EGLContext egl_context = eglGetCurrentContext();
182 if (egl_context == EGL_NO_CONTEXT) {
183 ALOGE("No GL context bound");
184 return -EINVAL;
185 }
186
187 glGetError(); // Clear the error state
188 GLint major_version, minor_version;
189 glGetIntegerv(GL_MAJOR_VERSION, &major_version);
190 glGetIntegerv(GL_MINOR_VERSION, &minor_version);
191 if (glGetError() != GL_NO_ERROR) {
192 // GL_MAJOR_VERSION and GL_MINOR_VERSION were added in GLES 3. If we get an
193 // error querying them it's almost certainly because it's GLES 1 or 2.
194 ALOGE("Error getting GL version. Must be GLES 3.2 or greater.");
195 return -EINVAL;
196 }
197
198 if (major_version < 3 || (major_version == 3 && minor_version < 2)) {
199 ALOGE("Invalid GL version: %d.%d. Must be GLES 3.2 or greater.",
200 major_version, minor_version);
201 return -EINVAL;
202 }
203
204#ifndef NDEBUG
205 if (internally_created_context) {
206 // Enable verbose GL debug output.
207 glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS_KHR);
208 glDebugMessageCallbackKHR(on_gl_error, NULL);
209 GLuint unused_ids = 0;
210 glDebugMessageControlKHR(GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0,
211 &unused_ids, GL_TRUE);
212 }
213#else
214 (void)internally_created_context;
215#endif
216
217 load_gl_extensions();
218 return 0;
219}
220
221int CreateEglContext(EGLDisplay egl_display, DvrSurfaceParameter* parameters,
222 EGLContext* egl_context) {
223 *egl_context = EGL_NO_CONTEXT;
224
225 EGLint major, minor;
226 if (!eglInitialize(egl_display, &major, &minor)) {
227 ALOGE("Failed to initialize EGL");
228 return -ENXIO;
229 }
230
231 ALOGI("EGL version: %d.%d\n", major, minor);
232
233 int buffer_format = kDefaultDisplaySurfaceFormat;
234
235 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
236 switch (p->key) {
237 case DVR_SURFACE_PARAMETER_FORMAT_IN:
238 buffer_format = DvrToHalSurfaceFormat(p->value);
239 break;
240 }
241 }
242
243 EGLint config_attrs[] = {EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
244 EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_NONE};
245 EGLConfig config = {0};
246
247 int ret = SelectEGLConfig(egl_display, config_attrs, buffer_format, &config);
248 if (ret < 0)
249 return ret;
250
251 ALOGI("EGL SelectEGLConfig ok.\n");
252
253 EGLint context_attrs[] = {EGL_CONTEXT_MAJOR_VERSION,
254 3,
255 EGL_CONTEXT_MINOR_VERSION,
256 2,
257#ifndef NDEBUG
258 EGL_CONTEXT_FLAGS_KHR,
259 EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR,
260#endif
261 EGL_NONE};
262
263 *egl_context =
264 eglCreateContext(egl_display, config, EGL_NO_CONTEXT, context_attrs);
265 if (*egl_context == EGL_NO_CONTEXT) {
266 ALOGE("eglCreateContext failed");
267 return -ENXIO;
268 }
269
270 ALOGI("eglCreateContext ok.\n");
271
272 if (!eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
273 *egl_context)) {
274 ALOGE("eglMakeCurrent failed");
275 DestroyEglContext(egl_display, egl_context);
276 return -EINVAL;
277 }
278
279 return 0;
280}
281
282} // anonymous namespace
283
284// TODO(hendrikw): When we remove the calls to this in native_window.cpp, move
285// this back into the anonymous namespace
286std::shared_ptr<android::dvr::DisplaySurfaceClient> CreateDisplaySurfaceClient(
287 struct DvrSurfaceParameter* parameters,
288 /*out*/ android::dvr::SystemDisplayMetrics* metrics) {
289 auto client = android::dvr::DisplayClient::Create();
290 if (!client) {
291 ALOGE("Failed to create display client!");
292 return nullptr;
293 }
294
295 const int ret = client->GetDisplayMetrics(metrics);
296 if (ret < 0) {
297 ALOGE("Failed to get display metrics: %s", strerror(-ret));
298 return nullptr;
299 }
300
301 // Parameters that may be modified by the parameters array. Some of these are
302 // here for future expansion.
303 int request_width = -1;
304 int request_height = -1;
305 int request_flags = 0;
306 bool disable_distortion = false;
307 bool disable_stabilization = false;
308 bool disable_cac = false;
309 bool request_visible = true;
310 bool vertical_flip = false;
311 int request_z_order = 0;
312 bool request_exclude_from_blur = false;
313 bool request_blur_behind = true;
314 int request_format = kDefaultDisplaySurfaceFormat;
315 int request_usage = kDefaultDisplaySurfaceUsage;
316 int geometry_type = DVR_SURFACE_GEOMETRY_SINGLE;
317
318 // Handle parameter inputs.
319 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
320 switch (p->key) {
321 case DVR_SURFACE_PARAMETER_WIDTH_IN:
322 request_width = p->value;
323 break;
324 case DVR_SURFACE_PARAMETER_HEIGHT_IN:
325 request_height = p->value;
326 break;
327 case DVR_SURFACE_PARAMETER_DISABLE_DISTORTION_IN:
328 disable_distortion = !!p->value;
329 break;
330 case DVR_SURFACE_PARAMETER_DISABLE_STABILIZATION_IN:
331 disable_stabilization = !!p->value;
332 break;
333 case DVR_SURFACE_PARAMETER_DISABLE_CAC_IN:
334 disable_cac = !!p->value;
335 break;
336 case DVR_SURFACE_PARAMETER_VISIBLE_IN:
337 request_visible = !!p->value;
338 break;
339 case DVR_SURFACE_PARAMETER_Z_ORDER_IN:
340 request_z_order = p->value;
341 break;
342 case DVR_SURFACE_PARAMETER_EXCLUDE_FROM_BLUR_IN:
343 request_exclude_from_blur = !!p->value;
344 break;
345 case DVR_SURFACE_PARAMETER_BLUR_BEHIND_IN:
346 request_blur_behind = !!p->value;
347 break;
348 case DVR_SURFACE_PARAMETER_VERTICAL_FLIP_IN:
349 vertical_flip = !!p->value;
350 break;
351 case DVR_SURFACE_PARAMETER_GEOMETRY_IN:
352 geometry_type = p->value;
353 break;
354 case DVR_SURFACE_PARAMETER_FORMAT_IN:
355 request_format = DvrToHalSurfaceFormat(p->value);
356 break;
357 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
358 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
359 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
360 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
361 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
362 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
363 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
364 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
365 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
366 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
367 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
368 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
369 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
370 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
371 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
372 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
373 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
374 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
375 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
376 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
377 break;
378 default:
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800379 ALOGE("Invalid display surface parameter: key=%d value=%" PRId64,
380 p->key, p->value);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800381 return nullptr;
382 }
383 }
384
385 request_flags |= disable_distortion
386 ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_DISTORTION
387 : 0;
388 request_flags |=
389 disable_stabilization ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_EDS : 0;
390 request_flags |=
391 disable_cac ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_CAC : 0;
392 request_flags |= vertical_flip ? DVR_DISPLAY_SURFACE_FLAGS_VERTICAL_FLIP : 0;
393 request_flags |= (geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2)
394 ? DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2
395 : 0;
396
397 if (request_width == -1) {
398 request_width = disable_distortion ? metrics->display_native_width
399 : metrics->distorted_width;
400 if (!disable_distortion &&
401 geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2) {
402 // The metrics always return the single wide buffer resolution.
403 // When split between eyes, we need to halve the width of the surface.
404 request_width /= 2;
405 }
406 }
407 if (request_height == -1) {
408 request_height = disable_distortion ? metrics->display_native_height
409 : metrics->distorted_height;
410 }
411
412 std::shared_ptr<android::dvr::DisplaySurfaceClient> surface =
413 client->CreateDisplaySurface(request_width, request_height,
414 request_format, request_usage,
415 request_flags);
416 surface->SetAttributes(
417 {{DisplaySurfaceAttributeEnum::Visible,
418 DisplaySurfaceAttributeValue{request_visible}},
419 {DisplaySurfaceAttributeEnum::ZOrder,
420 DisplaySurfaceAttributeValue{request_z_order}},
421 {DisplaySurfaceAttributeEnum::ExcludeFromBlur,
422 DisplaySurfaceAttributeValue{request_exclude_from_blur}},
423 {DisplaySurfaceAttributeEnum::BlurBehind,
424 DisplaySurfaceAttributeValue{request_blur_behind}}});
425
426 // Handle parameter output requests down here so we can return surface info.
427 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
428 switch (p->key) {
429 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
430 *static_cast<int32_t*>(p->value_out) = metrics->display_native_width;
431 break;
432 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
433 *static_cast<int32_t*>(p->value_out) = metrics->display_native_height;
434 break;
435 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
436 *static_cast<int32_t*>(p->value_out) = surface->width();
437 break;
438 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
439 *static_cast<int32_t*>(p->value_out) = surface->height();
440 break;
441 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
442 *static_cast<float*>(p->value_out) = metrics->inter_lens_distance_m;
443 break;
444 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
445 for (int i = 0; i < 4; ++i) {
446 float* float_values_out = static_cast<float*>(p->value_out);
447 float_values_out[i] = metrics->left_fov_lrbt[i];
448 }
449 break;
450 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
451 for (int i = 0; i < 4; ++i) {
452 float* float_values_out = static_cast<float*>(p->value_out);
453 float_values_out[i] = metrics->right_fov_lrbt[i];
454 }
455 break;
456 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
457 *static_cast<uint64_t*>(p->value_out) = metrics->vsync_period_ns;
458 break;
459 default:
460 break;
461 }
462 }
463
464 return surface;
465}
466
467extern "C" int dvrGetNativeDisplayDimensions(int* native_width,
468 int* native_height) {
469 int error = 0;
470 auto client = android::dvr::DisplayClient::Create(&error);
471 if (!client) {
472 ALOGE("Failed to create display client!");
473 return error;
474 }
475
476 android::dvr::SystemDisplayMetrics metrics;
477 const int ret = client->GetDisplayMetrics(&metrics);
478
479 if (ret != 0) {
480 ALOGE("Failed to get display metrics!");
481 return ret;
482 }
483
484 *native_width = static_cast<int>(metrics.display_native_width);
485 *native_height = static_cast<int>(metrics.display_native_height);
486 return 0;
487}
488
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800489struct DvrGraphicsContext : public android::ANativeObjectBase<
490 ANativeWindow, DvrGraphicsContext,
491 android::LightRefBase<DvrGraphicsContext>> {
492 public:
493 DvrGraphicsContext();
494 ~DvrGraphicsContext();
495
496 int graphics_api; // DVR_SURFACE_GRAPHICS_API_*
497
498 // GL specific members.
499 struct {
500 EGLDisplay egl_display;
501 EGLContext egl_context;
502 bool owns_egl_context;
503 GLuint texture_id[kSurfaceViewMaxCount];
504 int texture_count;
505 GLenum texture_target_type;
506 } gl;
507
508 // VK specific members
509 struct {
510 // These objects are passed in by the application, and are NOT owned
511 // by the context.
512 VkInstance instance;
513 VkPhysicalDevice physical_device;
514 VkDevice device;
515 VkQueue present_queue;
516 uint32_t present_queue_family;
517 const VkAllocationCallbacks* allocation_callbacks;
518 // These objects are owned by the context.
519 ANativeWindow* window;
520 VkSurfaceKHR surface;
521 VkSwapchainKHR swapchain;
522 std::vector<VkImage> swapchain_images;
523 std::vector<VkImageView> swapchain_image_views;
524 } vk;
525
526 // Display surface, metrics, and buffer management members.
527 std::shared_ptr<android::dvr::DisplaySurfaceClient> display_surface;
528 android::dvr::SystemDisplayMetrics display_metrics;
529 std::unique_ptr<android::dvr::NativeBufferQueue> buffer_queue;
530 android::dvr::NativeBufferProducer* current_buffer;
531 bool buffer_already_posted;
532
533 // Synchronization members.
534 std::unique_ptr<android::dvr::VSyncClient> vsync_client;
535 LocalHandle timerfd;
536
537 android::dvr::FrameHistory frame_history;
538
539 // Mapped surface metadata (ie: for pose delivery with presented frames).
540 volatile android::dvr::DisplaySurfaceMetadata* surface_metadata;
541
542 // LateLatch support.
543 std::unique_ptr<android::dvr::LateLatch> late_latch;
544
545 // Video mesh support.
546 std::vector<std::shared_ptr<android::dvr::VideoMeshSurfaceClient>>
547 video_mesh_surfaces;
548
549 private:
550 // ANativeWindow function implementations
551 std::mutex lock_;
552 int Post(android::dvr::NativeBufferProducer* buffer, int fence_fd);
553 static int SetSwapInterval(ANativeWindow* window, int interval);
554 static int DequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer,
555 int* fence_fd);
556 static int QueueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
557 int fence_fd);
558 static int CancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
559 int fence_fd);
560 static int Query(const ANativeWindow* window, int what, int* value);
561 static int Perform(ANativeWindow* window, int operation, ...);
562 static int DequeueBuffer_DEPRECATED(ANativeWindow* window,
563 ANativeWindowBuffer** buffer);
564 static int CancelBuffer_DEPRECATED(ANativeWindow* window,
565 ANativeWindowBuffer* buffer);
566 static int QueueBuffer_DEPRECATED(ANativeWindow* window,
567 ANativeWindowBuffer* buffer);
568 static int LockBuffer_DEPRECATED(ANativeWindow* window,
569 ANativeWindowBuffer* buffer);
570
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800571 DvrGraphicsContext(const DvrGraphicsContext&) = delete;
572 void operator=(const DvrGraphicsContext&) = delete;
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800573};
574
575DvrGraphicsContext::DvrGraphicsContext()
576 : graphics_api(DVR_GRAPHICS_API_GLES),
577 gl{},
578 vk{},
579 current_buffer(nullptr),
580 buffer_already_posted(false),
581 surface_metadata(nullptr) {
582 gl.egl_display = EGL_NO_DISPLAY;
583 gl.egl_context = EGL_NO_CONTEXT;
584 gl.owns_egl_context = true;
585 gl.texture_target_type = GL_TEXTURE_2D;
586
587 ANativeWindow::setSwapInterval = SetSwapInterval;
588 ANativeWindow::dequeueBuffer = DequeueBuffer;
589 ANativeWindow::cancelBuffer = CancelBuffer;
590 ANativeWindow::queueBuffer = QueueBuffer;
591 ANativeWindow::query = Query;
592 ANativeWindow::perform = Perform;
593
594 ANativeWindow::dequeueBuffer_DEPRECATED = DequeueBuffer_DEPRECATED;
595 ANativeWindow::cancelBuffer_DEPRECATED = CancelBuffer_DEPRECATED;
596 ANativeWindow::lockBuffer_DEPRECATED = LockBuffer_DEPRECATED;
597 ANativeWindow::queueBuffer_DEPRECATED = QueueBuffer_DEPRECATED;
598}
599
600DvrGraphicsContext::~DvrGraphicsContext() {
601 if (graphics_api == DVR_GRAPHICS_API_GLES) {
602 glDeleteTextures(gl.texture_count, gl.texture_id);
603 if (gl.owns_egl_context)
604 DestroyEglContext(gl.egl_display, &gl.egl_context);
605 } else if (graphics_api == DVR_GRAPHICS_API_VULKAN) {
606 if (vk.swapchain != VK_NULL_HANDLE) {
607 for (auto view : vk.swapchain_image_views) {
608 vkDestroyImageView(vk.device, view, vk.allocation_callbacks);
609 }
610 vkDestroySwapchainKHR(vk.device, vk.swapchain, vk.allocation_callbacks);
611 vkDestroySurfaceKHR(vk.instance, vk.surface, vk.allocation_callbacks);
612 delete vk.window;
613 }
614 }
615}
616
617int dvrGraphicsContextCreate(struct DvrSurfaceParameter* parameters,
618 DvrGraphicsContext** return_graphics_context) {
619 std::unique_ptr<DvrGraphicsContext> context(new DvrGraphicsContext);
620
621 // See whether we're using GL or Vulkan
622 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
623 switch (p->key) {
624 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
625 context->graphics_api = p->value;
626 break;
627 }
628 }
629
630 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
631 context->gl.egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
632 if (context->gl.egl_display == EGL_NO_DISPLAY) {
633 ALOGE("eglGetDisplay failed");
634 return -ENXIO;
635 }
636
637 // See if we should create a GL context
638 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
639 switch (p->key) {
640 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
641 context->gl.owns_egl_context = p->value != 0;
642 break;
643 }
644 }
645
646 if (context->gl.owns_egl_context) {
647 int ret = CreateEglContext(context->gl.egl_display, parameters,
648 &context->gl.egl_context);
649 if (ret < 0)
650 return ret;
651 } else {
652 context->gl.egl_context = eglGetCurrentContext();
653 }
654
655 int ret = InitGl(context->gl.owns_egl_context);
656 if (ret < 0)
657 return ret;
658 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
659 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
660 switch (p->key) {
661 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
662 context->vk.instance = reinterpret_cast<VkInstance>(p->value);
663 break;
664 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
665 context->vk.physical_device =
666 reinterpret_cast<VkPhysicalDevice>(p->value);
667 break;
668 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
669 context->vk.device = reinterpret_cast<VkDevice>(p->value);
670 break;
671 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
672 context->vk.present_queue = reinterpret_cast<VkQueue>(p->value);
673 break;
674 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
675 context->vk.present_queue_family = static_cast<uint32_t>(p->value);
676 break;
677 }
678 }
679 } else {
680 ALOGE("Error: invalid graphics API type");
681 return -EINVAL;
682 }
683
684 context->display_surface =
685 CreateDisplaySurfaceClient(parameters, &context->display_metrics);
686 if (!context->display_surface) {
687 ALOGE("Error: failed to create display surface client");
688 return -ECOMM;
689 }
690
691 context->buffer_queue.reset(new android::dvr::NativeBufferQueue(
692 context->gl.egl_display, context->display_surface, kDefaultBufferCount));
693
694 // The way the call sequence works we need 1 more than the buffer queue
695 // capacity to store data for all pending frames
696 context->frame_history.Reset(context->buffer_queue->GetQueueCapacity() + 1);
697
698 context->vsync_client = android::dvr::VSyncClient::Create();
699 if (!context->vsync_client) {
700 ALOGE("Error: failed to create vsync client");
701 return -ECOMM;
702 }
703
704 context->timerfd.Reset(timerfd_create(CLOCK_MONOTONIC, 0));
705 if (!context->timerfd) {
706 ALOGE("Error: timerfd_create failed because: %s", strerror(errno));
707 return -EPERM;
708 }
709
710 context->surface_metadata = context->display_surface->GetMetadataBufferPtr();
711 if (!context->surface_metadata) {
712 ALOGE("Error: surface metadata allocation failed");
713 return -ENOMEM;
714 }
715
716 ALOGI("buffer: %d x %d\n", context->display_surface->width(),
717 context->display_surface->height());
718
719 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
720 context->gl.texture_count = (context->display_surface->flags() &
721 DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2)
722 ? 2
723 : 1;
724
725 // Create the GL textures.
726 glGenTextures(context->gl.texture_count, context->gl.texture_id);
727
728 // We must make sure that we have at least one buffer allocated at this time
729 // so that anyone who tries to bind an FBO to context->texture_id
730 // will not get an incomplete buffer.
731 context->current_buffer = context->buffer_queue->Dequeue();
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800732 LOG_ALWAYS_FATAL_IF(context->gl.texture_count !=
733 context->current_buffer->buffer()->slice_count());
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800734 for (int i = 0; i < context->gl.texture_count; ++i) {
735 glBindTexture(context->gl.texture_target_type, context->gl.texture_id[i]);
736 glEGLImageTargetTexture2DOES(context->gl.texture_target_type,
737 context->current_buffer->image_khr(i));
738 }
739 glBindTexture(context->gl.texture_target_type, 0);
740 CHECK_GL();
741
742 bool is_late_latch = false;
743
744 // Pass back the texture target type and id.
745 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
746 switch (p->key) {
747 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
748 is_late_latch = !!p->value;
749 break;
750 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
751 *static_cast<GLenum*>(p->value_out) = context->gl.texture_target_type;
752 break;
753 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
754 for (int i = 0; i < context->gl.texture_count; ++i) {
755 *(static_cast<GLuint*>(p->value_out) + i) =
756 context->gl.texture_id[i];
757 }
758 break;
759 }
760 }
761
762 // Initialize late latch.
763 if (is_late_latch) {
764 LocalHandle fd;
765 int ret = context->display_surface->GetMetadataBufferFd(&fd);
766 if (ret == 0) {
767 context->late_latch.reset(
768 new android::dvr::LateLatch(true, std::move(fd)));
769 } else {
770 ALOGE("Error: failed to get surface metadata buffer fd for late latch");
771 }
772 }
773 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
774 VkResult result = VK_SUCCESS;
775 // Create a VkSurfaceKHR from the ANativeWindow.
776 VkAndroidSurfaceCreateInfoKHR android_surface_ci = {};
777 android_surface_ci.sType =
778 VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
779 android_surface_ci.window = context.get();
780 result = vkCreateAndroidSurfaceKHR(
781 context->vk.instance, &android_surface_ci,
782 context->vk.allocation_callbacks, &context->vk.surface);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800783 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800784 VkBool32 surface_supports_present = VK_FALSE;
785 result = vkGetPhysicalDeviceSurfaceSupportKHR(
786 context->vk.physical_device, context->vk.present_queue_family,
787 context->vk.surface, &surface_supports_present);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800788 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800789 if (!surface_supports_present) {
790 ALOGE("Error: provided queue family (%u) does not support presentation",
791 context->vk.present_queue_family);
792 return -EPERM;
793 }
794 VkSurfaceCapabilitiesKHR surface_capabilities = {};
795 result = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
796 context->vk.physical_device, context->vk.surface,
797 &surface_capabilities);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800798 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800799 // Determine the swapchain image format.
800 uint32_t device_surface_format_count = 0;
801 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
802 context->vk.physical_device, context->vk.surface,
803 &device_surface_format_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800804 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800805 std::vector<VkSurfaceFormatKHR> device_surface_formats(
806 device_surface_format_count);
807 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
808 context->vk.physical_device, context->vk.surface,
809 &device_surface_format_count, device_surface_formats.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800810 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
811 LOG_ALWAYS_FATAL_IF(device_surface_format_count == 0U);
812 LOG_ALWAYS_FATAL_IF(device_surface_formats[0].format ==
813 VK_FORMAT_UNDEFINED);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800814 VkSurfaceFormatKHR present_surface_format = device_surface_formats[0];
815 // Determine the swapchain present mode.
816 // TODO(cort): query device_present_modes to make sure MAILBOX is supported.
817 // But according to libvulkan, it is.
818 uint32_t device_present_mode_count = 0;
819 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
820 context->vk.physical_device, context->vk.surface,
821 &device_present_mode_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800822 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800823 std::vector<VkPresentModeKHR> device_present_modes(
824 device_present_mode_count);
825 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
826 context->vk.physical_device, context->vk.surface,
827 &device_present_mode_count, device_present_modes.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800828 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800829 VkPresentModeKHR present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
830 // Extract presentation surface extents, image count, transform, usages,
831 // etc.
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800832 LOG_ALWAYS_FATAL_IF(
833 static_cast<int>(surface_capabilities.currentExtent.width) == -1 ||
834 static_cast<int>(surface_capabilities.currentExtent.height) == -1);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800835 VkExtent2D swapchain_extent = surface_capabilities.currentExtent;
836
837 uint32_t desired_image_count = surface_capabilities.minImageCount;
838 if (surface_capabilities.maxImageCount > 0 &&
839 desired_image_count > surface_capabilities.maxImageCount) {
840 desired_image_count = surface_capabilities.maxImageCount;
841 }
842 VkSurfaceTransformFlagBitsKHR surface_transform =
843 surface_capabilities.currentTransform;
844 VkImageUsageFlags image_usage_flags =
845 surface_capabilities.supportedUsageFlags;
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800846 LOG_ALWAYS_FATAL_IF(surface_capabilities.supportedCompositeAlpha ==
847 static_cast<VkFlags>(0));
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800848 VkCompositeAlphaFlagBitsKHR composite_alpha =
849 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
850 if (!(surface_capabilities.supportedCompositeAlpha &
851 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) {
852 composite_alpha = VkCompositeAlphaFlagBitsKHR(
853 static_cast<int>(surface_capabilities.supportedCompositeAlpha) &
854 -static_cast<int>(surface_capabilities.supportedCompositeAlpha));
855 }
856 // Create VkSwapchainKHR
857 VkSwapchainCreateInfoKHR swapchain_ci = {};
858 swapchain_ci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
859 swapchain_ci.pNext = nullptr;
860 swapchain_ci.surface = context->vk.surface;
861 swapchain_ci.minImageCount = desired_image_count;
862 swapchain_ci.imageFormat = present_surface_format.format;
863 swapchain_ci.imageColorSpace = present_surface_format.colorSpace;
864 swapchain_ci.imageExtent.width = swapchain_extent.width;
865 swapchain_ci.imageExtent.height = swapchain_extent.height;
866 swapchain_ci.imageUsage = image_usage_flags;
867 swapchain_ci.preTransform = surface_transform;
868 swapchain_ci.compositeAlpha = composite_alpha;
869 swapchain_ci.imageArrayLayers = 1;
870 swapchain_ci.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
871 swapchain_ci.queueFamilyIndexCount = 0;
872 swapchain_ci.pQueueFamilyIndices = nullptr;
873 swapchain_ci.presentMode = present_mode;
874 swapchain_ci.clipped = VK_TRUE;
875 swapchain_ci.oldSwapchain = VK_NULL_HANDLE;
876 result = vkCreateSwapchainKHR(context->vk.device, &swapchain_ci,
877 context->vk.allocation_callbacks,
878 &context->vk.swapchain);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800879 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800880 // Create swapchain image views
881 uint32_t image_count = 0;
882 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
883 &image_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800884 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
885 LOG_ALWAYS_FATAL_IF(image_count == 0U);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800886 context->vk.swapchain_images.resize(image_count);
887 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
888 &image_count,
889 context->vk.swapchain_images.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800890 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800891 context->vk.swapchain_image_views.resize(image_count);
892 VkImageViewCreateInfo image_view_ci = {};
893 image_view_ci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
894 image_view_ci.pNext = nullptr;
895 image_view_ci.flags = 0;
896 image_view_ci.format = swapchain_ci.imageFormat;
897 image_view_ci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
898 image_view_ci.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
899 image_view_ci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
900 image_view_ci.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
901 image_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
902 image_view_ci.subresourceRange.baseMipLevel = 0;
903 image_view_ci.subresourceRange.levelCount = 1;
904 image_view_ci.subresourceRange.baseArrayLayer = 0;
905 image_view_ci.subresourceRange.layerCount = 1;
906 image_view_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
907 image_view_ci.image = VK_NULL_HANDLE; // filled in below
908 for (uint32_t i = 0; i < image_count; ++i) {
909 image_view_ci.image = context->vk.swapchain_images[i];
910 result = vkCreateImageView(context->vk.device, &image_view_ci,
911 context->vk.allocation_callbacks,
912 &context->vk.swapchain_image_views[i]);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800913 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800914 }
915 // Fill in any requested output parameters.
916 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
917 switch (p->key) {
918 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
919 *static_cast<uint32_t*>(p->value_out) = image_count;
920 break;
921 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
922 *static_cast<VkFormat*>(p->value_out) = swapchain_ci.imageFormat;
923 break;
924 }
925 }
926 }
927
928 *return_graphics_context = context.release();
929 return 0;
930}
931
932void dvrGraphicsContextDestroy(DvrGraphicsContext* graphics_context) {
933 delete graphics_context;
934}
935
936// ANativeWindow function implementations. These should only be used
937// by the Vulkan path.
938int DvrGraphicsContext::Post(android::dvr::NativeBufferProducer* buffer,
939 int fence_fd) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800940 LOG_ALWAYS_FATAL_IF(graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800941 ATRACE_NAME(__PRETTY_FUNCTION__);
942 ALOGI_IF(TRACE, "DvrGraphicsContext::Post: buffer_id=%d, fence_fd=%d",
943 buffer->buffer()->id(), fence_fd);
944 ALOGW_IF(!display_surface->visible(),
945 "DvrGraphicsContext::Post: Posting buffer on invisible surface!!!");
946 // The NativeBufferProducer closes the fence fd, so dup it for tracking in the
947 // frame history.
948 frame_history.OnFrameSubmit(LocalHandle::AsDuplicate(fence_fd));
949 int result = buffer->Post(fence_fd, 0);
950 return result;
951}
952
953int DvrGraphicsContext::SetSwapInterval(ANativeWindow* window, int interval) {
954 ALOGI_IF(TRACE, "SetSwapInterval: window=%p interval=%d", window, interval);
955 DvrGraphicsContext* self = getSelf(window);
956 (void)self;
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800957 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800958 return android::NO_ERROR;
959}
960
961int DvrGraphicsContext::DequeueBuffer(ANativeWindow* window,
962 ANativeWindowBuffer** buffer,
963 int* fence_fd) {
964 ATRACE_NAME(__PRETTY_FUNCTION__);
965
966 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800967 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800968 std::lock_guard<std::mutex> autolock(self->lock_);
969
970 if (!self->current_buffer) {
971 self->current_buffer = self->buffer_queue.get()->Dequeue();
972 }
973 ATRACE_ASYNC_BEGIN("BufferDraw", self->current_buffer->buffer()->id());
974 *fence_fd = self->current_buffer->ClaimReleaseFence().Release();
975 *buffer = self->current_buffer;
976
977 ALOGI_IF(TRACE, "DvrGraphicsContext::DequeueBuffer: fence_fd=%d", *fence_fd);
978 return android::NO_ERROR;
979}
980
981int DvrGraphicsContext::QueueBuffer(ANativeWindow* window,
982 ANativeWindowBuffer* buffer, int fence_fd) {
983 ATRACE_NAME("NativeWindow::QueueBuffer");
984 ALOGI_IF(TRACE, "NativeWindow::QueueBuffer: fence_fd=%d", fence_fd);
985
986 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800987 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800988 std::lock_guard<std::mutex> autolock(self->lock_);
989
990 android::dvr::NativeBufferProducer* native_buffer =
991 static_cast<android::dvr::NativeBufferProducer*>(buffer);
992 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
993 bool do_post = true;
994 if (self->buffer_already_posted) {
995 // Check that the buffer is the one we expect, but handle it if this happens
996 // in production by allowing this buffer to post on top of the previous one.
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800997 LOG_FATAL_IF(native_buffer != self->current_buffer);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800998 if (native_buffer == self->current_buffer) {
999 do_post = false;
1000 if (fence_fd >= 0)
1001 close(fence_fd);
1002 }
1003 }
1004 if (do_post) {
1005 ATRACE_ASYNC_BEGIN("BufferPost", native_buffer->buffer()->id());
1006 self->Post(native_buffer, fence_fd);
1007 }
1008 self->buffer_already_posted = false;
1009 self->current_buffer = nullptr;
1010
1011 return android::NO_ERROR;
1012}
1013
1014int DvrGraphicsContext::CancelBuffer(ANativeWindow* window,
1015 ANativeWindowBuffer* buffer,
1016 int fence_fd) {
1017 ATRACE_NAME("DvrGraphicsContext::CancelBuffer");
1018 ALOGI_IF(TRACE, "DvrGraphicsContext::CancelBuffer: fence_fd: %d", fence_fd);
1019
1020 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001021 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001022 std::lock_guard<std::mutex> autolock(self->lock_);
1023
1024 android::dvr::NativeBufferProducer* native_buffer =
1025 static_cast<android::dvr::NativeBufferProducer*>(buffer);
1026 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
1027 ATRACE_INT("CancelBuffer", native_buffer->buffer()->id());
1028 bool do_enqueue = true;
1029 if (self->buffer_already_posted) {
1030 // Check that the buffer is the one we expect, but handle it if this happens
1031 // in production by returning this buffer to the buffer queue.
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001032 LOG_FATAL_IF(native_buffer != self->current_buffer);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001033 if (native_buffer == self->current_buffer) {
1034 do_enqueue = false;
1035 }
1036 }
1037 if (do_enqueue) {
1038 self->buffer_queue.get()->Enqueue(native_buffer);
1039 }
1040 if (fence_fd >= 0)
1041 close(fence_fd);
1042 self->buffer_already_posted = false;
1043 self->current_buffer = nullptr;
1044
1045 return android::NO_ERROR;
1046}
1047
1048int DvrGraphicsContext::Query(const ANativeWindow* window, int what,
1049 int* value) {
1050 DvrGraphicsContext* self = getSelf(const_cast<ANativeWindow*>(window));
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001051 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001052 std::lock_guard<std::mutex> autolock(self->lock_);
1053
1054 switch (what) {
1055 case NATIVE_WINDOW_WIDTH:
1056 *value = self->display_surface->width();
1057 return android::NO_ERROR;
1058 case NATIVE_WINDOW_HEIGHT:
1059 *value = self->display_surface->height();
1060 return android::NO_ERROR;
1061 case NATIVE_WINDOW_FORMAT:
1062 *value = self->display_surface->format();
1063 return android::NO_ERROR;
1064 case NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS:
1065 *value = 1;
1066 return android::NO_ERROR;
1067 case NATIVE_WINDOW_CONCRETE_TYPE:
1068 *value = NATIVE_WINDOW_SURFACE;
1069 return android::NO_ERROR;
1070 case NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER:
1071 *value = 1;
1072 return android::NO_ERROR;
1073 case NATIVE_WINDOW_DEFAULT_WIDTH:
1074 *value = self->display_surface->width();
1075 return android::NO_ERROR;
1076 case NATIVE_WINDOW_DEFAULT_HEIGHT:
1077 *value = self->display_surface->height();
1078 return android::NO_ERROR;
1079 case NATIVE_WINDOW_TRANSFORM_HINT:
1080 *value = 0;
1081 return android::NO_ERROR;
1082 }
1083
1084 *value = 0;
1085 return android::BAD_VALUE;
1086}
1087
1088int DvrGraphicsContext::Perform(ANativeWindow* window, int operation, ...) {
1089 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001090 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001091 std::lock_guard<std::mutex> autolock(self->lock_);
1092
1093 va_list args;
1094 va_start(args, operation);
1095
1096 // TODO(eieio): The following operations are not used at this time. They are
1097 // included here to help document which operations may be useful and what
1098 // parameters they take.
1099 switch (operation) {
1100 case NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: {
1101 int w = va_arg(args, int);
1102 int h = va_arg(args, int);
1103 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: w=%d h=%d", w, h);
1104 return android::NO_ERROR;
1105 }
1106
1107 case NATIVE_WINDOW_SET_BUFFERS_FORMAT: {
1108 int format = va_arg(args, int);
1109 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_FORMAT: format=%d", format);
1110 return android::NO_ERROR;
1111 }
1112
1113 case NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: {
1114 int transform = va_arg(args, int);
1115 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: transform=%d",
1116 transform);
1117 return android::NO_ERROR;
1118 }
1119
1120 case NATIVE_WINDOW_SET_USAGE: {
1121 int usage = va_arg(args, int);
1122 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_USAGE: usage=%d", usage);
1123 return android::NO_ERROR;
1124 }
1125
1126 case NATIVE_WINDOW_CONNECT:
1127 case NATIVE_WINDOW_DISCONNECT:
1128 case NATIVE_WINDOW_SET_BUFFERS_GEOMETRY:
1129 case NATIVE_WINDOW_API_CONNECT:
1130 case NATIVE_WINDOW_API_DISCONNECT:
1131 // TODO(eieio): we should implement these
1132 return android::NO_ERROR;
1133
1134 case NATIVE_WINDOW_SET_BUFFER_COUNT: {
1135 int buffer_count = va_arg(args, int);
1136 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFER_COUNT: bufferCount=%d",
1137 buffer_count);
1138 return android::NO_ERROR;
1139 }
1140 case NATIVE_WINDOW_SET_BUFFERS_DATASPACE: {
1141 android_dataspace_t data_space =
1142 static_cast<android_dataspace_t>(va_arg(args, int));
1143 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DATASPACE: dataSpace=%d",
1144 data_space);
1145 return android::NO_ERROR;
1146 }
1147 case NATIVE_WINDOW_SET_SCALING_MODE: {
1148 int mode = va_arg(args, int);
1149 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_SCALING_MODE: mode=%d", mode);
1150 return android::NO_ERROR;
1151 }
1152
1153 case NATIVE_WINDOW_LOCK:
1154 case NATIVE_WINDOW_UNLOCK_AND_POST:
1155 case NATIVE_WINDOW_SET_CROP:
1156 case NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP:
1157 return android::INVALID_OPERATION;
1158 }
1159
1160 return android::NAME_NOT_FOUND;
1161}
1162
1163int DvrGraphicsContext::DequeueBuffer_DEPRECATED(ANativeWindow* window,
1164 ANativeWindowBuffer** buffer) {
1165 int fence_fd = -1;
1166 int ret = DequeueBuffer(window, buffer, &fence_fd);
1167
1168 // wait for fence
1169 if (ret == android::NO_ERROR && fence_fd != -1)
1170 close(fence_fd);
1171
1172 return ret;
1173}
1174
1175int DvrGraphicsContext::CancelBuffer_DEPRECATED(ANativeWindow* window,
1176 ANativeWindowBuffer* buffer) {
1177 return CancelBuffer(window, buffer, -1);
1178}
1179
1180int DvrGraphicsContext::QueueBuffer_DEPRECATED(ANativeWindow* window,
1181 ANativeWindowBuffer* buffer) {
1182 return QueueBuffer(window, buffer, -1);
1183}
1184
1185int DvrGraphicsContext::LockBuffer_DEPRECATED(ANativeWindow* /*window*/,
1186 ANativeWindowBuffer* /*buffer*/) {
1187 return android::NO_ERROR;
1188}
1189// End ANativeWindow implementation
1190
1191int dvrSetEdsPose(DvrGraphicsContext* graphics_context,
1192 float32x4_t render_pose_orientation,
1193 float32x4_t render_pose_translation) {
1194 ATRACE_NAME("dvrSetEdsPose");
1195 if (!graphics_context->current_buffer) {
1196 ALOGE("dvrBeginRenderFrame must be called before dvrSetEdsPose");
1197 return -EPERM;
1198 }
1199
1200 // When late-latching is enabled, the pose buffer is written by the GPU, so
1201 // we don't touch it here.
1202 float32x4_t is_late_latch = DVR_POSE_LATE_LATCH;
1203 if (render_pose_orientation[0] != is_late_latch[0]) {
1204 volatile android::dvr::DisplaySurfaceMetadata* data =
1205 graphics_context->surface_metadata;
1206 uint32_t buffer_index =
1207 graphics_context->current_buffer->surface_buffer_index();
1208 ALOGE_IF(TRACE, "write pose index %d %f %f", buffer_index,
1209 render_pose_orientation[0], render_pose_orientation[1]);
1210 data->orientation[buffer_index] = render_pose_orientation;
1211 data->translation[buffer_index] = render_pose_translation;
1212 }
1213
1214 return 0;
1215}
1216
1217int dvrBeginRenderFrameEds(DvrGraphicsContext* graphics_context,
1218 float32x4_t render_pose_orientation,
1219 float32x4_t render_pose_translation) {
1220 ATRACE_NAME("dvrBeginRenderFrameEds");
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001221 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001222 CHECK_GL();
1223 // Grab a buffer from the queue and set its pose.
1224 if (!graphics_context->current_buffer) {
1225 graphics_context->current_buffer =
1226 graphics_context->buffer_queue->Dequeue();
1227 }
1228
1229 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1230 render_pose_translation);
1231 if (ret < 0)
1232 return ret;
1233
1234 ATRACE_ASYNC_BEGIN("BufferDraw",
1235 graphics_context->current_buffer->buffer()->id());
1236
1237 {
1238 ATRACE_NAME("glEGLImageTargetTexture2DOES");
1239 // Bind the texture to the latest buffer in the queue.
1240 for (int i = 0; i < graphics_context->gl.texture_count; ++i) {
1241 glBindTexture(graphics_context->gl.texture_target_type,
1242 graphics_context->gl.texture_id[i]);
1243 glEGLImageTargetTexture2DOES(
1244 graphics_context->gl.texture_target_type,
1245 graphics_context->current_buffer->image_khr(i));
1246 }
1247 glBindTexture(graphics_context->gl.texture_target_type, 0);
1248 }
1249 CHECK_GL();
1250 return 0;
1251}
1252int dvrBeginRenderFrameEdsVk(DvrGraphicsContext* graphics_context,
1253 float32x4_t render_pose_orientation,
1254 float32x4_t render_pose_translation,
1255 VkSemaphore acquire_semaphore,
1256 VkFence acquire_fence,
1257 uint32_t* swapchain_image_index,
1258 VkImageView* swapchain_image_view) {
1259 ATRACE_NAME("dvrBeginRenderFrameEds");
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001260 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api !=
1261 DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001262
1263 // Acquire a swapchain image. This calls Dequeue() internally.
1264 VkResult result = vkAcquireNextImageKHR(
1265 graphics_context->vk.device, graphics_context->vk.swapchain, UINT64_MAX,
1266 acquire_semaphore, acquire_fence, swapchain_image_index);
1267 if (result != VK_SUCCESS)
1268 return -EINVAL;
1269
1270 // Set the pose pose.
1271 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1272 render_pose_translation);
1273 if (ret < 0)
1274 return ret;
1275 *swapchain_image_view =
1276 graphics_context->vk.swapchain_image_views[*swapchain_image_index];
1277 return 0;
1278}
1279
1280int dvrBeginRenderFrame(DvrGraphicsContext* graphics_context) {
1281 return dvrBeginRenderFrameEds(graphics_context, DVR_POSE_NO_EDS,
1282 DVR_POSE_NO_EDS);
1283}
1284int dvrBeginRenderFrameVk(DvrGraphicsContext* graphics_context,
1285 VkSemaphore acquire_semaphore, VkFence acquire_fence,
1286 uint32_t* swapchain_image_index,
1287 VkImageView* swapchain_image_view) {
1288 return dvrBeginRenderFrameEdsVk(
1289 graphics_context, DVR_POSE_NO_EDS, DVR_POSE_NO_EDS, acquire_semaphore,
1290 acquire_fence, swapchain_image_index, swapchain_image_view);
1291}
1292
1293int dvrBeginRenderFrameLateLatch(DvrGraphicsContext* graphics_context,
1294 uint32_t /*flags*/,
1295 uint32_t target_vsync_count, int num_views,
1296 const float** projection_matrices,
1297 const float** eye_from_head_matrices,
1298 const float** pose_offset_matrices,
1299 uint32_t* out_late_latch_buffer_id) {
1300 if (!graphics_context->late_latch) {
1301 return -EPERM;
1302 }
1303 if (num_views > DVR_GRAPHICS_SURFACE_MAX_VIEWS) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001304 ALOGE("dvrBeginRenderFrameLateLatch called with too many views.");
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001305 return -EINVAL;
1306 }
1307 dvrBeginRenderFrameEds(graphics_context, DVR_POSE_LATE_LATCH,
1308 DVR_POSE_LATE_LATCH);
1309 auto& ll = graphics_context->late_latch;
1310 // TODO(jbates) Need to change this shader so that it dumps the single
1311 // captured pose for both eyes into the display surface metadata buffer at
1312 // the right index.
1313 android::dvr::LateLatchInput input;
1314 memset(&input, 0, sizeof(input));
1315 for (int i = 0; i < num_views; ++i) {
1316 memcpy(input.proj_mat + i, *(projection_matrices + i), 16 * sizeof(float));
1317 memcpy(input.eye_from_head_mat + i, *(eye_from_head_matrices + i),
1318 16 * sizeof(float));
1319 memcpy(input.pose_offset + i, *(pose_offset_matrices + i),
1320 16 * sizeof(float));
1321 }
1322 input.pose_index =
1323 target_vsync_count & android::dvr::kPoseAsyncBufferIndexMask;
1324 input.render_pose_index =
1325 graphics_context->current_buffer->surface_buffer_index();
1326 ll->AddLateLatch(input);
1327 *out_late_latch_buffer_id = ll->output_buffer_id();
1328 return 0;
1329}
1330
1331extern "C" int dvrGraphicsWaitNextFrame(
1332 DvrGraphicsContext* graphics_context, int64_t start_delay_ns,
1333 DvrFrameSchedule* out_next_frame_schedule) {
1334 start_delay_ns = std::max(start_delay_ns, static_cast<int64_t>(0));
1335
1336 // We only do one-shot timers:
1337 int64_t wake_time_ns = 0;
1338
1339 uint32_t current_frame_vsync;
1340 int64_t current_frame_scheduled_finish_ns;
1341 int64_t vsync_period_ns;
1342
1343 int fetch_schedule_result = graphics_context->vsync_client->GetSchedInfo(
1344 &vsync_period_ns, &current_frame_scheduled_finish_ns,
1345 &current_frame_vsync);
1346 if (fetch_schedule_result == 0) {
1347 wake_time_ns = current_frame_scheduled_finish_ns + start_delay_ns;
1348 // If the last wakeup time is still in the future, use it instead to avoid
1349 // major schedule jumps when applications call WaitNextFrame with
1350 // aggressive offsets.
1351 int64_t now = android::dvr::GetSystemClockNs();
1352 if (android::dvr::TimestampGT(wake_time_ns - vsync_period_ns, now)) {
1353 wake_time_ns -= vsync_period_ns;
1354 --current_frame_vsync;
1355 }
1356 // If the next wakeup time is in the past, add a vsync period to keep the
1357 // application on schedule.
1358 if (android::dvr::TimestampLT(wake_time_ns, now)) {
1359 wake_time_ns += vsync_period_ns;
1360 ++current_frame_vsync;
1361 }
1362 } else {
1363 ALOGE("Error getting frame schedule because: %s",
1364 strerror(-fetch_schedule_result));
1365 // Sleep for a vsync period to avoid cascading failure.
1366 wake_time_ns = android::dvr::GetSystemClockNs() +
1367 graphics_context->display_metrics.vsync_period_ns;
1368 }
1369
1370 // Adjust nsec to [0..999,999,999].
1371 struct itimerspec wake_time;
1372 wake_time.it_interval.tv_sec = 0;
1373 wake_time.it_interval.tv_nsec = 0;
1374 wake_time.it_value = android::dvr::NsToTimespec(wake_time_ns);
1375 bool sleep_result =
1376 timerfd_settime(graphics_context->timerfd.Get(), TFD_TIMER_ABSTIME,
1377 &wake_time, nullptr) == 0;
1378 if (sleep_result) {
1379 ATRACE_NAME("sleep");
1380 uint64_t expirations = 0;
1381 sleep_result = read(graphics_context->timerfd.Get(), &expirations,
1382 sizeof(uint64_t)) == sizeof(uint64_t);
1383 if (!sleep_result) {
1384 ALOGE("Error: timerfd read failed");
1385 }
1386 } else {
1387 ALOGE("Error: timerfd_settime failed because: %s", strerror(errno));
1388 }
1389
1390 auto& frame_history = graphics_context->frame_history;
1391 frame_history.CheckForFinishedFrames();
1392 if (fetch_schedule_result == 0) {
1393 uint32_t next_frame_vsync =
1394 current_frame_vsync +
1395 frame_history.PredictNextFrameVsyncInterval(vsync_period_ns);
1396 int64_t next_frame_scheduled_finish =
1397 (wake_time_ns - start_delay_ns) + vsync_period_ns;
1398 frame_history.OnFrameStart(next_frame_vsync, next_frame_scheduled_finish);
1399 if (out_next_frame_schedule) {
1400 out_next_frame_schedule->vsync_count = next_frame_vsync;
1401 out_next_frame_schedule->scheduled_frame_finish_ns =
1402 next_frame_scheduled_finish;
1403 }
1404 } else {
1405 frame_history.OnFrameStart(UINT32_MAX, -1);
1406 }
1407
1408 return (fetch_schedule_result == 0 && sleep_result) ? 0 : -1;
1409}
1410
1411extern "C" void dvrGraphicsPostEarly(DvrGraphicsContext* graphics_context) {
1412 ATRACE_NAME("dvrGraphicsPostEarly");
1413 ALOGI_IF(TRACE, "dvrGraphicsPostEarly");
1414
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001415 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001416
1417 // Note that this function can be called before or after
1418 // dvrBeginRenderFrame.
1419 if (!graphics_context->buffer_already_posted) {
1420 graphics_context->buffer_already_posted = true;
1421
1422 if (!graphics_context->current_buffer) {
1423 graphics_context->current_buffer =
1424 graphics_context->buffer_queue->Dequeue();
1425 }
1426
1427 auto buffer = graphics_context->current_buffer->buffer().get();
1428 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1429 int result = buffer->Post<uint64_t>(LocalHandle(), 0);
1430 if (result < 0)
1431 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1432 }
1433}
1434
1435int dvrPresent(DvrGraphicsContext* graphics_context) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001436 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001437
1438 std::array<char, 128> buf;
1439 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1440 graphics_context->frame_history.GetCurrentFrameVsync());
1441 ATRACE_NAME(buf.data());
1442
1443 if (!graphics_context->current_buffer) {
1444 ALOGE("Error: dvrPresent called without dvrBeginRenderFrame");
1445 return -EPERM;
1446 }
1447
1448 LocalHandle fence_fd =
1449 android::dvr::CreateGLSyncAndFlush(graphics_context->gl.egl_display);
1450
1451 ALOGI_IF(TRACE, "PostBuffer: buffer_id=%d, fence_fd=%d",
1452 graphics_context->current_buffer->buffer()->id(), fence_fd.Get());
1453 ALOGW_IF(!graphics_context->display_surface->visible(),
1454 "PostBuffer: Posting buffer on invisible surface!!!");
1455
1456 auto buffer = graphics_context->current_buffer->buffer().get();
1457 ATRACE_ASYNC_END("BufferDraw", buffer->id());
1458 if (!graphics_context->buffer_already_posted) {
1459 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1460 int result = buffer->Post<uint64_t>(fence_fd, 0);
1461 if (result < 0)
1462 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1463 }
1464
1465 graphics_context->frame_history.OnFrameSubmit(std::move(fence_fd));
1466 graphics_context->buffer_already_posted = false;
1467 graphics_context->current_buffer = nullptr;
1468 return 0;
1469}
1470
1471int dvrPresentVk(DvrGraphicsContext* graphics_context,
1472 VkSemaphore submit_semaphore, uint32_t swapchain_image_index) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001473 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api !=
1474 DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001475
1476 std::array<char, 128> buf;
1477 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1478 graphics_context->frame_history.GetCurrentFrameVsync());
1479 ATRACE_NAME(buf.data());
1480
1481 if (!graphics_context->current_buffer) {
1482 ALOGE("Error: dvrPresentVk called without dvrBeginRenderFrameVk");
1483 return -EPERM;
1484 }
1485
1486 // Present the specified image. Internally, this gets a fence from the
1487 // Vulkan driver and passes it to DvrGraphicsContext::Post(),
1488 // which in turn passes it to buffer->Post() and adds it to frame_history.
1489 VkPresentInfoKHR present_info = {};
1490 present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
1491 present_info.swapchainCount = 1;
1492 present_info.pSwapchains = &graphics_context->vk.swapchain;
1493 present_info.pImageIndices = &swapchain_image_index;
1494 present_info.waitSemaphoreCount =
1495 (submit_semaphore != VK_NULL_HANDLE) ? 1 : 0;
1496 present_info.pWaitSemaphores = &submit_semaphore;
1497 VkResult result =
1498 vkQueuePresentKHR(graphics_context->vk.present_queue, &present_info);
1499 if (result != VK_SUCCESS) {
1500 return -EINVAL;
1501 }
1502
1503 return 0;
1504}
1505
1506extern "C" int dvrGetFrameScheduleResults(DvrGraphicsContext* context,
1507 DvrFrameScheduleResult* results,
1508 int in_result_count) {
1509 if (!context || !results)
1510 return -EINVAL;
1511
1512 return context->frame_history.GetPreviousFrameResults(results,
1513 in_result_count);
1514}
1515
1516extern "C" void dvrGraphicsSurfaceSetVisible(
1517 DvrGraphicsContext* graphics_context, int visible) {
1518 graphics_context->display_surface->SetVisible(visible);
1519}
1520
1521extern "C" int dvrGraphicsSurfaceGetVisible(
1522 DvrGraphicsContext* graphics_context) {
1523 return graphics_context->display_surface->visible() ? 1 : 0;
1524}
1525
1526extern "C" void dvrGraphicsSurfaceSetZOrder(
1527 DvrGraphicsContext* graphics_context, int z_order) {
1528 graphics_context->display_surface->SetZOrder(z_order);
1529}
1530
1531extern "C" int dvrGraphicsSurfaceGetZOrder(
1532 DvrGraphicsContext* graphics_context) {
1533 return graphics_context->display_surface->z_order();
1534}
1535
1536extern "C" DvrVideoMeshSurface* dvrGraphicsVideoMeshSurfaceCreate(
1537 DvrGraphicsContext* graphics_context) {
1538 auto display_surface = graphics_context->display_surface;
1539 // A DisplaySurface must be created prior to the creation of a
1540 // VideoMeshSurface.
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001541 LOG_ALWAYS_FATAL_IF(display_surface == nullptr);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001542
1543 LocalChannelHandle surface_handle = display_surface->CreateVideoMeshSurface();
1544 if (!surface_handle.valid()) {
1545 return nullptr;
1546 }
1547
1548 std::unique_ptr<DvrVideoMeshSurface> surface(new DvrVideoMeshSurface);
1549 surface->client =
1550 android::dvr::VideoMeshSurfaceClient::Import(std::move(surface_handle));
1551
1552 // TODO(jwcai) The next line is not needed...
1553 auto producer_queue = surface->client->GetProducerQueue();
1554 return surface.release();
1555}
1556
1557extern "C" void dvrGraphicsVideoMeshSurfaceDestroy(
1558 DvrVideoMeshSurface* surface) {
1559 delete surface;
1560}
1561
1562extern "C" void dvrGraphicsVideoMeshSurfacePresent(
1563 DvrGraphicsContext* graphics_context, DvrVideoMeshSurface* surface,
1564 const int eye, const float* transform) {
1565 volatile android::dvr::VideoMeshSurfaceMetadata* metadata =
1566 surface->client->GetMetadataBufferPtr();
1567
1568 const uint32_t graphics_buffer_index =
1569 graphics_context->current_buffer->surface_buffer_index();
1570
1571 for (int i = 0; i < 4; ++i) {
1572 metadata->transform[graphics_buffer_index][eye].val[i] = {
1573 transform[i + 0], transform[i + 4], transform[i + 8], transform[i + 12],
1574 };
1575 }
1576}