blob: d59961696a595bf3c41eb8eaaf2ab7d8ae974510 [file] [log] [blame]
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001#include <dvr/graphics.h>
2
3#include <sys/timerfd.h>
4#include <array>
5#include <vector>
6
7#include <cutils/log.h>
8#include <utils/Trace.h>
9
10#ifndef VK_USE_PLATFORM_ANDROID_KHR
11#define VK_USE_PLATFORM_ANDROID_KHR 1
12#endif
13#include <vulkan/vulkan.h>
14
15#include <pdx/file_handle.h>
16#include <private/dvr/clock_ns.h>
17#include <private/dvr/debug.h>
18#include <private/dvr/display_types.h>
19#include <private/dvr/frame_history.h>
20#include <private/dvr/gl_fenced_flush.h>
21#include <private/dvr/graphics/vr_gl_extensions.h>
22#include <private/dvr/graphics_private.h>
23#include <private/dvr/late_latch.h>
24#include <private/dvr/native_buffer_queue.h>
25#include <private/dvr/sensor_constants.h>
26#include <private/dvr/video_mesh_surface_client.h>
27#include <private/dvr/vsync_client.h>
28
29#include <android/native_window.h>
30
31#ifndef EGL_CONTEXT_MAJOR_VERSION
32#define EGL_CONTEXT_MAJOR_VERSION 0x3098
33#define EGL_CONTEXT_MINOR_VERSION 0x30FB
34#endif
35
36using android::pdx::LocalHandle;
37using android::pdx::LocalChannelHandle;
38
39using android::dvr::DisplaySurfaceAttributeEnum;
40using android::dvr::DisplaySurfaceAttributeValue;
41
42namespace {
43
44constexpr int kDefaultDisplaySurfaceUsage =
45 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
46constexpr int kDefaultDisplaySurfaceFormat = HAL_PIXEL_FORMAT_RGBA_8888;
47// TODO(alexst): revisit this count when HW encode is available for casting.
48constexpr int kDefaultBufferCount = 4;
49
50// Use with dvrBeginRenderFrame to disable EDS for the current frame.
51constexpr float32x4_t DVR_POSE_NO_EDS = {10.0f, 0.0f, 0.0f, 0.0f};
52
53// Use with dvrBeginRenderFrame to indicate that GPU late-latching is being used
54// for determining the render pose.
55constexpr float32x4_t DVR_POSE_LATE_LATCH = {20.0f, 0.0f, 0.0f, 0.0f};
56
57#ifndef NDEBUG
58
59static const char* GetGlCallbackType(GLenum type) {
60 switch (type) {
61 case GL_DEBUG_TYPE_ERROR_KHR:
62 return "ERROR";
63 case GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_KHR:
64 return "DEPRECATED_BEHAVIOR";
65 case GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_KHR:
66 return "UNDEFINED_BEHAVIOR";
67 case GL_DEBUG_TYPE_PORTABILITY_KHR:
68 return "PORTABILITY";
69 case GL_DEBUG_TYPE_PERFORMANCE_KHR:
70 return "PERFORMANCE";
71 case GL_DEBUG_TYPE_OTHER_KHR:
72 return "OTHER";
73 default:
74 return "UNKNOWN";
75 }
76}
77
78static void on_gl_error(GLenum /*source*/, GLenum type, GLuint /*id*/,
79 GLenum severity, GLsizei /*length*/,
80 const char* message, const void* /*user_param*/) {
81 char msg[400];
82 snprintf(msg, sizeof(msg), "[" __FILE__ ":%u] GL %s: %s", __LINE__,
83 GetGlCallbackType(type), message);
84 switch (severity) {
85 case GL_DEBUG_SEVERITY_LOW_KHR:
86 ALOGI("%s", msg);
87 break;
88 case GL_DEBUG_SEVERITY_MEDIUM_KHR:
89 ALOGW("%s", msg);
90 break;
91 case GL_DEBUG_SEVERITY_HIGH_KHR:
92 ALOGE("%s", msg);
93 break;
94 }
95 fprintf(stderr, "%s\n", msg);
96}
97
98#endif
99
100int DvrToHalSurfaceFormat(int dvr_surface_format) {
101 switch (dvr_surface_format) {
102 case DVR_SURFACE_FORMAT_RGBA_8888:
103 return HAL_PIXEL_FORMAT_RGBA_8888;
104 case DVR_SURFACE_FORMAT_RGB_565:
105 return HAL_PIXEL_FORMAT_RGB_565;
106 default:
107 return HAL_PIXEL_FORMAT_RGBA_8888;
108 }
109}
110
111int SelectEGLConfig(EGLDisplay dpy, EGLint* attr, unsigned format,
112 EGLConfig* config) {
113 std::array<EGLint, 4> desired_rgba;
114 switch (format) {
115 case HAL_PIXEL_FORMAT_RGBA_8888:
116 case HAL_PIXEL_FORMAT_BGRA_8888:
117 desired_rgba = {{8, 8, 8, 8}};
118 break;
119 case HAL_PIXEL_FORMAT_RGB_565:
120 desired_rgba = {{5, 6, 5, 0}};
121 break;
122 default:
123 ALOGE("Unsupported framebuffer pixel format %d", format);
124 return -1;
125 }
126
127 EGLint max_configs = 0;
128 if (eglGetConfigs(dpy, NULL, 0, &max_configs) == EGL_FALSE) {
129 ALOGE("No EGL configurations available?!");
130 return -1;
131 }
132
133 std::vector<EGLConfig> configs(max_configs);
134
135 EGLint num_configs;
136 if (eglChooseConfig(dpy, attr, &configs[0], max_configs, &num_configs) ==
137 EGL_FALSE) {
138 ALOGE("eglChooseConfig failed");
139 return -1;
140 }
141
142 std::array<EGLint, 4> config_rgba;
143 for (int i = 0; i < num_configs; i++) {
144 eglGetConfigAttrib(dpy, configs[i], EGL_RED_SIZE, &config_rgba[0]);
145 eglGetConfigAttrib(dpy, configs[i], EGL_GREEN_SIZE, &config_rgba[1]);
146 eglGetConfigAttrib(dpy, configs[i], EGL_BLUE_SIZE, &config_rgba[2]);
147 eglGetConfigAttrib(dpy, configs[i], EGL_ALPHA_SIZE, &config_rgba[3]);
148 if (config_rgba == desired_rgba) {
149 *config = configs[i];
150 return 0;
151 }
152 }
153
154 ALOGE("Cannot find a matching EGL config");
155 return -1;
156}
157
158void DestroyEglContext(EGLDisplay egl_display, EGLContext* egl_context) {
159 if (*egl_context != EGL_NO_CONTEXT) {
160 eglDestroyContext(egl_display, *egl_context);
161 *egl_context = EGL_NO_CONTEXT;
162 }
163}
164
165// Perform internal initialization. A GL context must be bound to the current
166// thread.
167// @param internally_created_context True if we created and own the GL context,
168// false if it was supplied by the application.
169// @return 0 if init was successful, or a negative error code on failure.
170int InitGl(bool internally_created_context) {
171 EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
172 if (egl_display == EGL_NO_DISPLAY) {
173 ALOGE("eglGetDisplay failed");
174 return -EINVAL;
175 }
176
177 EGLContext egl_context = eglGetCurrentContext();
178 if (egl_context == EGL_NO_CONTEXT) {
179 ALOGE("No GL context bound");
180 return -EINVAL;
181 }
182
183 glGetError(); // Clear the error state
184 GLint major_version, minor_version;
185 glGetIntegerv(GL_MAJOR_VERSION, &major_version);
186 glGetIntegerv(GL_MINOR_VERSION, &minor_version);
187 if (glGetError() != GL_NO_ERROR) {
188 // GL_MAJOR_VERSION and GL_MINOR_VERSION were added in GLES 3. If we get an
189 // error querying them it's almost certainly because it's GLES 1 or 2.
190 ALOGE("Error getting GL version. Must be GLES 3.2 or greater.");
191 return -EINVAL;
192 }
193
194 if (major_version < 3 || (major_version == 3 && minor_version < 2)) {
195 ALOGE("Invalid GL version: %d.%d. Must be GLES 3.2 or greater.",
196 major_version, minor_version);
197 return -EINVAL;
198 }
199
200#ifndef NDEBUG
201 if (internally_created_context) {
202 // Enable verbose GL debug output.
203 glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS_KHR);
204 glDebugMessageCallbackKHR(on_gl_error, NULL);
205 GLuint unused_ids = 0;
206 glDebugMessageControlKHR(GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0,
207 &unused_ids, GL_TRUE);
208 }
209#else
210 (void)internally_created_context;
211#endif
212
213 load_gl_extensions();
214 return 0;
215}
216
217int CreateEglContext(EGLDisplay egl_display, DvrSurfaceParameter* parameters,
218 EGLContext* egl_context) {
219 *egl_context = EGL_NO_CONTEXT;
220
221 EGLint major, minor;
222 if (!eglInitialize(egl_display, &major, &minor)) {
223 ALOGE("Failed to initialize EGL");
224 return -ENXIO;
225 }
226
227 ALOGI("EGL version: %d.%d\n", major, minor);
228
229 int buffer_format = kDefaultDisplaySurfaceFormat;
230
231 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
232 switch (p->key) {
233 case DVR_SURFACE_PARAMETER_FORMAT_IN:
234 buffer_format = DvrToHalSurfaceFormat(p->value);
235 break;
236 }
237 }
238
239 EGLint config_attrs[] = {EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
240 EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_NONE};
241 EGLConfig config = {0};
242
243 int ret = SelectEGLConfig(egl_display, config_attrs, buffer_format, &config);
244 if (ret < 0)
245 return ret;
246
247 ALOGI("EGL SelectEGLConfig ok.\n");
248
249 EGLint context_attrs[] = {EGL_CONTEXT_MAJOR_VERSION,
250 3,
251 EGL_CONTEXT_MINOR_VERSION,
252 2,
253#ifndef NDEBUG
254 EGL_CONTEXT_FLAGS_KHR,
255 EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR,
256#endif
257 EGL_NONE};
258
259 *egl_context =
260 eglCreateContext(egl_display, config, EGL_NO_CONTEXT, context_attrs);
261 if (*egl_context == EGL_NO_CONTEXT) {
262 ALOGE("eglCreateContext failed");
263 return -ENXIO;
264 }
265
266 ALOGI("eglCreateContext ok.\n");
267
268 if (!eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
269 *egl_context)) {
270 ALOGE("eglMakeCurrent failed");
271 DestroyEglContext(egl_display, egl_context);
272 return -EINVAL;
273 }
274
275 return 0;
276}
277
278} // anonymous namespace
279
280// TODO(hendrikw): When we remove the calls to this in native_window.cpp, move
281// this back into the anonymous namespace
282std::shared_ptr<android::dvr::DisplaySurfaceClient> CreateDisplaySurfaceClient(
283 struct DvrSurfaceParameter* parameters,
284 /*out*/ android::dvr::SystemDisplayMetrics* metrics) {
285 auto client = android::dvr::DisplayClient::Create();
286 if (!client) {
287 ALOGE("Failed to create display client!");
288 return nullptr;
289 }
290
291 const int ret = client->GetDisplayMetrics(metrics);
292 if (ret < 0) {
293 ALOGE("Failed to get display metrics: %s", strerror(-ret));
294 return nullptr;
295 }
296
297 // Parameters that may be modified by the parameters array. Some of these are
298 // here for future expansion.
299 int request_width = -1;
300 int request_height = -1;
301 int request_flags = 0;
302 bool disable_distortion = false;
303 bool disable_stabilization = false;
304 bool disable_cac = false;
305 bool request_visible = true;
306 bool vertical_flip = false;
307 int request_z_order = 0;
308 bool request_exclude_from_blur = false;
309 bool request_blur_behind = true;
310 int request_format = kDefaultDisplaySurfaceFormat;
311 int request_usage = kDefaultDisplaySurfaceUsage;
312 int geometry_type = DVR_SURFACE_GEOMETRY_SINGLE;
313
314 // Handle parameter inputs.
315 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
316 switch (p->key) {
317 case DVR_SURFACE_PARAMETER_WIDTH_IN:
318 request_width = p->value;
319 break;
320 case DVR_SURFACE_PARAMETER_HEIGHT_IN:
321 request_height = p->value;
322 break;
323 case DVR_SURFACE_PARAMETER_DISABLE_DISTORTION_IN:
324 disable_distortion = !!p->value;
325 break;
326 case DVR_SURFACE_PARAMETER_DISABLE_STABILIZATION_IN:
327 disable_stabilization = !!p->value;
328 break;
329 case DVR_SURFACE_PARAMETER_DISABLE_CAC_IN:
330 disable_cac = !!p->value;
331 break;
332 case DVR_SURFACE_PARAMETER_VISIBLE_IN:
333 request_visible = !!p->value;
334 break;
335 case DVR_SURFACE_PARAMETER_Z_ORDER_IN:
336 request_z_order = p->value;
337 break;
338 case DVR_SURFACE_PARAMETER_EXCLUDE_FROM_BLUR_IN:
339 request_exclude_from_blur = !!p->value;
340 break;
341 case DVR_SURFACE_PARAMETER_BLUR_BEHIND_IN:
342 request_blur_behind = !!p->value;
343 break;
344 case DVR_SURFACE_PARAMETER_VERTICAL_FLIP_IN:
345 vertical_flip = !!p->value;
346 break;
347 case DVR_SURFACE_PARAMETER_GEOMETRY_IN:
348 geometry_type = p->value;
349 break;
350 case DVR_SURFACE_PARAMETER_FORMAT_IN:
351 request_format = DvrToHalSurfaceFormat(p->value);
352 break;
353 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
354 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
355 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
356 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
357 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
358 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
359 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
360 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
361 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
362 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
363 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
364 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
365 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
366 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
367 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
368 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
369 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
370 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
371 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
372 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
373 break;
374 default:
375 ALOGE("Invalid display surface parameter: key=%d value=%ld", p->key,
376 p->value);
377 return nullptr;
378 }
379 }
380
381 request_flags |= disable_distortion
382 ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_DISTORTION
383 : 0;
384 request_flags |=
385 disable_stabilization ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_EDS : 0;
386 request_flags |=
387 disable_cac ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_CAC : 0;
388 request_flags |= vertical_flip ? DVR_DISPLAY_SURFACE_FLAGS_VERTICAL_FLIP : 0;
389 request_flags |= (geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2)
390 ? DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2
391 : 0;
392
393 if (request_width == -1) {
394 request_width = disable_distortion ? metrics->display_native_width
395 : metrics->distorted_width;
396 if (!disable_distortion &&
397 geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2) {
398 // The metrics always return the single wide buffer resolution.
399 // When split between eyes, we need to halve the width of the surface.
400 request_width /= 2;
401 }
402 }
403 if (request_height == -1) {
404 request_height = disable_distortion ? metrics->display_native_height
405 : metrics->distorted_height;
406 }
407
408 std::shared_ptr<android::dvr::DisplaySurfaceClient> surface =
409 client->CreateDisplaySurface(request_width, request_height,
410 request_format, request_usage,
411 request_flags);
412 surface->SetAttributes(
413 {{DisplaySurfaceAttributeEnum::Visible,
414 DisplaySurfaceAttributeValue{request_visible}},
415 {DisplaySurfaceAttributeEnum::ZOrder,
416 DisplaySurfaceAttributeValue{request_z_order}},
417 {DisplaySurfaceAttributeEnum::ExcludeFromBlur,
418 DisplaySurfaceAttributeValue{request_exclude_from_blur}},
419 {DisplaySurfaceAttributeEnum::BlurBehind,
420 DisplaySurfaceAttributeValue{request_blur_behind}}});
421
422 // Handle parameter output requests down here so we can return surface info.
423 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
424 switch (p->key) {
425 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
426 *static_cast<int32_t*>(p->value_out) = metrics->display_native_width;
427 break;
428 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
429 *static_cast<int32_t*>(p->value_out) = metrics->display_native_height;
430 break;
431 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
432 *static_cast<int32_t*>(p->value_out) = surface->width();
433 break;
434 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
435 *static_cast<int32_t*>(p->value_out) = surface->height();
436 break;
437 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
438 *static_cast<float*>(p->value_out) = metrics->inter_lens_distance_m;
439 break;
440 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
441 for (int i = 0; i < 4; ++i) {
442 float* float_values_out = static_cast<float*>(p->value_out);
443 float_values_out[i] = metrics->left_fov_lrbt[i];
444 }
445 break;
446 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
447 for (int i = 0; i < 4; ++i) {
448 float* float_values_out = static_cast<float*>(p->value_out);
449 float_values_out[i] = metrics->right_fov_lrbt[i];
450 }
451 break;
452 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
453 *static_cast<uint64_t*>(p->value_out) = metrics->vsync_period_ns;
454 break;
455 default:
456 break;
457 }
458 }
459
460 return surface;
461}
462
463extern "C" int dvrGetNativeDisplayDimensions(int* native_width,
464 int* native_height) {
465 int error = 0;
466 auto client = android::dvr::DisplayClient::Create(&error);
467 if (!client) {
468 ALOGE("Failed to create display client!");
469 return error;
470 }
471
472 android::dvr::SystemDisplayMetrics metrics;
473 const int ret = client->GetDisplayMetrics(&metrics);
474
475 if (ret != 0) {
476 ALOGE("Failed to get display metrics!");
477 return ret;
478 }
479
480 *native_width = static_cast<int>(metrics.display_native_width);
481 *native_height = static_cast<int>(metrics.display_native_height);
482 return 0;
483}
484
485extern "C" int dvrGetDisplaySurfaceInfo(EGLNativeWindowType win, int* width,
486 int* height, int* format) {
487 ANativeWindow* nwin = reinterpret_cast<ANativeWindow*>(win);
488 int w, h, f;
489
490 nwin->query(nwin, NATIVE_WINDOW_DEFAULT_WIDTH, &w);
491 nwin->query(nwin, NATIVE_WINDOW_DEFAULT_HEIGHT, &h);
492 nwin->query(nwin, NATIVE_WINDOW_FORMAT, &f);
493
494 if (width)
495 *width = w;
496 if (height)
497 *height = h;
498 if (format)
499 *format = f;
500
501 return 0;
502}
503
504struct DvrGraphicsContext : public android::ANativeObjectBase<
505 ANativeWindow, DvrGraphicsContext,
506 android::LightRefBase<DvrGraphicsContext>> {
507 public:
508 DvrGraphicsContext();
509 ~DvrGraphicsContext();
510
511 int graphics_api; // DVR_SURFACE_GRAPHICS_API_*
512
513 // GL specific members.
514 struct {
515 EGLDisplay egl_display;
516 EGLContext egl_context;
517 bool owns_egl_context;
518 GLuint texture_id[kSurfaceViewMaxCount];
519 int texture_count;
520 GLenum texture_target_type;
521 } gl;
522
523 // VK specific members
524 struct {
525 // These objects are passed in by the application, and are NOT owned
526 // by the context.
527 VkInstance instance;
528 VkPhysicalDevice physical_device;
529 VkDevice device;
530 VkQueue present_queue;
531 uint32_t present_queue_family;
532 const VkAllocationCallbacks* allocation_callbacks;
533 // These objects are owned by the context.
534 ANativeWindow* window;
535 VkSurfaceKHR surface;
536 VkSwapchainKHR swapchain;
537 std::vector<VkImage> swapchain_images;
538 std::vector<VkImageView> swapchain_image_views;
539 } vk;
540
541 // Display surface, metrics, and buffer management members.
542 std::shared_ptr<android::dvr::DisplaySurfaceClient> display_surface;
543 android::dvr::SystemDisplayMetrics display_metrics;
544 std::unique_ptr<android::dvr::NativeBufferQueue> buffer_queue;
545 android::dvr::NativeBufferProducer* current_buffer;
546 bool buffer_already_posted;
547
548 // Synchronization members.
549 std::unique_ptr<android::dvr::VSyncClient> vsync_client;
550 LocalHandle timerfd;
551
552 android::dvr::FrameHistory frame_history;
553
554 // Mapped surface metadata (ie: for pose delivery with presented frames).
555 volatile android::dvr::DisplaySurfaceMetadata* surface_metadata;
556
557 // LateLatch support.
558 std::unique_ptr<android::dvr::LateLatch> late_latch;
559
560 // Video mesh support.
561 std::vector<std::shared_ptr<android::dvr::VideoMeshSurfaceClient>>
562 video_mesh_surfaces;
563
564 private:
565 // ANativeWindow function implementations
566 std::mutex lock_;
567 int Post(android::dvr::NativeBufferProducer* buffer, int fence_fd);
568 static int SetSwapInterval(ANativeWindow* window, int interval);
569 static int DequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer,
570 int* fence_fd);
571 static int QueueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
572 int fence_fd);
573 static int CancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
574 int fence_fd);
575 static int Query(const ANativeWindow* window, int what, int* value);
576 static int Perform(ANativeWindow* window, int operation, ...);
577 static int DequeueBuffer_DEPRECATED(ANativeWindow* window,
578 ANativeWindowBuffer** buffer);
579 static int CancelBuffer_DEPRECATED(ANativeWindow* window,
580 ANativeWindowBuffer* buffer);
581 static int QueueBuffer_DEPRECATED(ANativeWindow* window,
582 ANativeWindowBuffer* buffer);
583 static int LockBuffer_DEPRECATED(ANativeWindow* window,
584 ANativeWindowBuffer* buffer);
585
586 DISALLOW_COPY_AND_ASSIGN(DvrGraphicsContext);
587};
588
589DvrGraphicsContext::DvrGraphicsContext()
590 : graphics_api(DVR_GRAPHICS_API_GLES),
591 gl{},
592 vk{},
593 current_buffer(nullptr),
594 buffer_already_posted(false),
595 surface_metadata(nullptr) {
596 gl.egl_display = EGL_NO_DISPLAY;
597 gl.egl_context = EGL_NO_CONTEXT;
598 gl.owns_egl_context = true;
599 gl.texture_target_type = GL_TEXTURE_2D;
600
601 ANativeWindow::setSwapInterval = SetSwapInterval;
602 ANativeWindow::dequeueBuffer = DequeueBuffer;
603 ANativeWindow::cancelBuffer = CancelBuffer;
604 ANativeWindow::queueBuffer = QueueBuffer;
605 ANativeWindow::query = Query;
606 ANativeWindow::perform = Perform;
607
608 ANativeWindow::dequeueBuffer_DEPRECATED = DequeueBuffer_DEPRECATED;
609 ANativeWindow::cancelBuffer_DEPRECATED = CancelBuffer_DEPRECATED;
610 ANativeWindow::lockBuffer_DEPRECATED = LockBuffer_DEPRECATED;
611 ANativeWindow::queueBuffer_DEPRECATED = QueueBuffer_DEPRECATED;
612}
613
614DvrGraphicsContext::~DvrGraphicsContext() {
615 if (graphics_api == DVR_GRAPHICS_API_GLES) {
616 glDeleteTextures(gl.texture_count, gl.texture_id);
617 if (gl.owns_egl_context)
618 DestroyEglContext(gl.egl_display, &gl.egl_context);
619 } else if (graphics_api == DVR_GRAPHICS_API_VULKAN) {
620 if (vk.swapchain != VK_NULL_HANDLE) {
621 for (auto view : vk.swapchain_image_views) {
622 vkDestroyImageView(vk.device, view, vk.allocation_callbacks);
623 }
624 vkDestroySwapchainKHR(vk.device, vk.swapchain, vk.allocation_callbacks);
625 vkDestroySurfaceKHR(vk.instance, vk.surface, vk.allocation_callbacks);
626 delete vk.window;
627 }
628 }
629}
630
631int dvrGraphicsContextCreate(struct DvrSurfaceParameter* parameters,
632 DvrGraphicsContext** return_graphics_context) {
633 std::unique_ptr<DvrGraphicsContext> context(new DvrGraphicsContext);
634
635 // See whether we're using GL or Vulkan
636 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
637 switch (p->key) {
638 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
639 context->graphics_api = p->value;
640 break;
641 }
642 }
643
644 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
645 context->gl.egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
646 if (context->gl.egl_display == EGL_NO_DISPLAY) {
647 ALOGE("eglGetDisplay failed");
648 return -ENXIO;
649 }
650
651 // See if we should create a GL context
652 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
653 switch (p->key) {
654 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
655 context->gl.owns_egl_context = p->value != 0;
656 break;
657 }
658 }
659
660 if (context->gl.owns_egl_context) {
661 int ret = CreateEglContext(context->gl.egl_display, parameters,
662 &context->gl.egl_context);
663 if (ret < 0)
664 return ret;
665 } else {
666 context->gl.egl_context = eglGetCurrentContext();
667 }
668
669 int ret = InitGl(context->gl.owns_egl_context);
670 if (ret < 0)
671 return ret;
672 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
673 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
674 switch (p->key) {
675 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
676 context->vk.instance = reinterpret_cast<VkInstance>(p->value);
677 break;
678 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
679 context->vk.physical_device =
680 reinterpret_cast<VkPhysicalDevice>(p->value);
681 break;
682 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
683 context->vk.device = reinterpret_cast<VkDevice>(p->value);
684 break;
685 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
686 context->vk.present_queue = reinterpret_cast<VkQueue>(p->value);
687 break;
688 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
689 context->vk.present_queue_family = static_cast<uint32_t>(p->value);
690 break;
691 }
692 }
693 } else {
694 ALOGE("Error: invalid graphics API type");
695 return -EINVAL;
696 }
697
698 context->display_surface =
699 CreateDisplaySurfaceClient(parameters, &context->display_metrics);
700 if (!context->display_surface) {
701 ALOGE("Error: failed to create display surface client");
702 return -ECOMM;
703 }
704
705 context->buffer_queue.reset(new android::dvr::NativeBufferQueue(
706 context->gl.egl_display, context->display_surface, kDefaultBufferCount));
707
708 // The way the call sequence works we need 1 more than the buffer queue
709 // capacity to store data for all pending frames
710 context->frame_history.Reset(context->buffer_queue->GetQueueCapacity() + 1);
711
712 context->vsync_client = android::dvr::VSyncClient::Create();
713 if (!context->vsync_client) {
714 ALOGE("Error: failed to create vsync client");
715 return -ECOMM;
716 }
717
718 context->timerfd.Reset(timerfd_create(CLOCK_MONOTONIC, 0));
719 if (!context->timerfd) {
720 ALOGE("Error: timerfd_create failed because: %s", strerror(errno));
721 return -EPERM;
722 }
723
724 context->surface_metadata = context->display_surface->GetMetadataBufferPtr();
725 if (!context->surface_metadata) {
726 ALOGE("Error: surface metadata allocation failed");
727 return -ENOMEM;
728 }
729
730 ALOGI("buffer: %d x %d\n", context->display_surface->width(),
731 context->display_surface->height());
732
733 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
734 context->gl.texture_count = (context->display_surface->flags() &
735 DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2)
736 ? 2
737 : 1;
738
739 // Create the GL textures.
740 glGenTextures(context->gl.texture_count, context->gl.texture_id);
741
742 // We must make sure that we have at least one buffer allocated at this time
743 // so that anyone who tries to bind an FBO to context->texture_id
744 // will not get an incomplete buffer.
745 context->current_buffer = context->buffer_queue->Dequeue();
746 CHECK(context->gl.texture_count ==
747 context->current_buffer->buffer()->slice_count());
748 for (int i = 0; i < context->gl.texture_count; ++i) {
749 glBindTexture(context->gl.texture_target_type, context->gl.texture_id[i]);
750 glEGLImageTargetTexture2DOES(context->gl.texture_target_type,
751 context->current_buffer->image_khr(i));
752 }
753 glBindTexture(context->gl.texture_target_type, 0);
754 CHECK_GL();
755
756 bool is_late_latch = false;
757
758 // Pass back the texture target type and id.
759 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
760 switch (p->key) {
761 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
762 is_late_latch = !!p->value;
763 break;
764 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
765 *static_cast<GLenum*>(p->value_out) = context->gl.texture_target_type;
766 break;
767 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
768 for (int i = 0; i < context->gl.texture_count; ++i) {
769 *(static_cast<GLuint*>(p->value_out) + i) =
770 context->gl.texture_id[i];
771 }
772 break;
773 }
774 }
775
776 // Initialize late latch.
777 if (is_late_latch) {
778 LocalHandle fd;
779 int ret = context->display_surface->GetMetadataBufferFd(&fd);
780 if (ret == 0) {
781 context->late_latch.reset(
782 new android::dvr::LateLatch(true, std::move(fd)));
783 } else {
784 ALOGE("Error: failed to get surface metadata buffer fd for late latch");
785 }
786 }
787 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
788 VkResult result = VK_SUCCESS;
789 // Create a VkSurfaceKHR from the ANativeWindow.
790 VkAndroidSurfaceCreateInfoKHR android_surface_ci = {};
791 android_surface_ci.sType =
792 VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
793 android_surface_ci.window = context.get();
794 result = vkCreateAndroidSurfaceKHR(
795 context->vk.instance, &android_surface_ci,
796 context->vk.allocation_callbacks, &context->vk.surface);
797 CHECK_EQ(result, VK_SUCCESS);
798 VkBool32 surface_supports_present = VK_FALSE;
799 result = vkGetPhysicalDeviceSurfaceSupportKHR(
800 context->vk.physical_device, context->vk.present_queue_family,
801 context->vk.surface, &surface_supports_present);
802 CHECK_EQ(result, VK_SUCCESS);
803 if (!surface_supports_present) {
804 ALOGE("Error: provided queue family (%u) does not support presentation",
805 context->vk.present_queue_family);
806 return -EPERM;
807 }
808 VkSurfaceCapabilitiesKHR surface_capabilities = {};
809 result = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
810 context->vk.physical_device, context->vk.surface,
811 &surface_capabilities);
812 CHECK_EQ(result, VK_SUCCESS);
813 // Determine the swapchain image format.
814 uint32_t device_surface_format_count = 0;
815 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
816 context->vk.physical_device, context->vk.surface,
817 &device_surface_format_count, nullptr);
818 CHECK_EQ(result, VK_SUCCESS);
819 std::vector<VkSurfaceFormatKHR> device_surface_formats(
820 device_surface_format_count);
821 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
822 context->vk.physical_device, context->vk.surface,
823 &device_surface_format_count, device_surface_formats.data());
824 CHECK_EQ(result, VK_SUCCESS);
825 CHECK_GT(device_surface_format_count, 0U);
826 CHECK_NE(device_surface_formats[0].format, VK_FORMAT_UNDEFINED);
827 VkSurfaceFormatKHR present_surface_format = device_surface_formats[0];
828 // Determine the swapchain present mode.
829 // TODO(cort): query device_present_modes to make sure MAILBOX is supported.
830 // But according to libvulkan, it is.
831 uint32_t device_present_mode_count = 0;
832 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
833 context->vk.physical_device, context->vk.surface,
834 &device_present_mode_count, nullptr);
835 CHECK_EQ(result, VK_SUCCESS);
836 std::vector<VkPresentModeKHR> device_present_modes(
837 device_present_mode_count);
838 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
839 context->vk.physical_device, context->vk.surface,
840 &device_present_mode_count, device_present_modes.data());
841 CHECK_EQ(result, VK_SUCCESS);
842 VkPresentModeKHR present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
843 // Extract presentation surface extents, image count, transform, usages,
844 // etc.
845 LOG_ASSERT(
846 static_cast<int>(surface_capabilities.currentExtent.width) != -1 &&
847 static_cast<int>(surface_capabilities.currentExtent.height) != -1);
848 VkExtent2D swapchain_extent = surface_capabilities.currentExtent;
849
850 uint32_t desired_image_count = surface_capabilities.minImageCount;
851 if (surface_capabilities.maxImageCount > 0 &&
852 desired_image_count > surface_capabilities.maxImageCount) {
853 desired_image_count = surface_capabilities.maxImageCount;
854 }
855 VkSurfaceTransformFlagBitsKHR surface_transform =
856 surface_capabilities.currentTransform;
857 VkImageUsageFlags image_usage_flags =
858 surface_capabilities.supportedUsageFlags;
859 CHECK_NE(surface_capabilities.supportedCompositeAlpha,
860 static_cast<VkFlags>(0));
861 VkCompositeAlphaFlagBitsKHR composite_alpha =
862 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
863 if (!(surface_capabilities.supportedCompositeAlpha &
864 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) {
865 composite_alpha = VkCompositeAlphaFlagBitsKHR(
866 static_cast<int>(surface_capabilities.supportedCompositeAlpha) &
867 -static_cast<int>(surface_capabilities.supportedCompositeAlpha));
868 }
869 // Create VkSwapchainKHR
870 VkSwapchainCreateInfoKHR swapchain_ci = {};
871 swapchain_ci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
872 swapchain_ci.pNext = nullptr;
873 swapchain_ci.surface = context->vk.surface;
874 swapchain_ci.minImageCount = desired_image_count;
875 swapchain_ci.imageFormat = present_surface_format.format;
876 swapchain_ci.imageColorSpace = present_surface_format.colorSpace;
877 swapchain_ci.imageExtent.width = swapchain_extent.width;
878 swapchain_ci.imageExtent.height = swapchain_extent.height;
879 swapchain_ci.imageUsage = image_usage_flags;
880 swapchain_ci.preTransform = surface_transform;
881 swapchain_ci.compositeAlpha = composite_alpha;
882 swapchain_ci.imageArrayLayers = 1;
883 swapchain_ci.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
884 swapchain_ci.queueFamilyIndexCount = 0;
885 swapchain_ci.pQueueFamilyIndices = nullptr;
886 swapchain_ci.presentMode = present_mode;
887 swapchain_ci.clipped = VK_TRUE;
888 swapchain_ci.oldSwapchain = VK_NULL_HANDLE;
889 result = vkCreateSwapchainKHR(context->vk.device, &swapchain_ci,
890 context->vk.allocation_callbacks,
891 &context->vk.swapchain);
892 CHECK_EQ(result, VK_SUCCESS);
893 // Create swapchain image views
894 uint32_t image_count = 0;
895 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
896 &image_count, nullptr);
897 CHECK_EQ(result, VK_SUCCESS);
898 CHECK_GT(image_count, 0U);
899 context->vk.swapchain_images.resize(image_count);
900 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
901 &image_count,
902 context->vk.swapchain_images.data());
903 CHECK_EQ(result, VK_SUCCESS);
904 context->vk.swapchain_image_views.resize(image_count);
905 VkImageViewCreateInfo image_view_ci = {};
906 image_view_ci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
907 image_view_ci.pNext = nullptr;
908 image_view_ci.flags = 0;
909 image_view_ci.format = swapchain_ci.imageFormat;
910 image_view_ci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
911 image_view_ci.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
912 image_view_ci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
913 image_view_ci.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
914 image_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
915 image_view_ci.subresourceRange.baseMipLevel = 0;
916 image_view_ci.subresourceRange.levelCount = 1;
917 image_view_ci.subresourceRange.baseArrayLayer = 0;
918 image_view_ci.subresourceRange.layerCount = 1;
919 image_view_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
920 image_view_ci.image = VK_NULL_HANDLE; // filled in below
921 for (uint32_t i = 0; i < image_count; ++i) {
922 image_view_ci.image = context->vk.swapchain_images[i];
923 result = vkCreateImageView(context->vk.device, &image_view_ci,
924 context->vk.allocation_callbacks,
925 &context->vk.swapchain_image_views[i]);
926 CHECK_EQ(result, VK_SUCCESS);
927 }
928 // Fill in any requested output parameters.
929 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
930 switch (p->key) {
931 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
932 *static_cast<uint32_t*>(p->value_out) = image_count;
933 break;
934 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
935 *static_cast<VkFormat*>(p->value_out) = swapchain_ci.imageFormat;
936 break;
937 }
938 }
939 }
940
941 *return_graphics_context = context.release();
942 return 0;
943}
944
945void dvrGraphicsContextDestroy(DvrGraphicsContext* graphics_context) {
946 delete graphics_context;
947}
948
949// ANativeWindow function implementations. These should only be used
950// by the Vulkan path.
951int DvrGraphicsContext::Post(android::dvr::NativeBufferProducer* buffer,
952 int fence_fd) {
953 LOG_ASSERT(graphics_api == DVR_GRAPHICS_API_VULKAN);
954 ATRACE_NAME(__PRETTY_FUNCTION__);
955 ALOGI_IF(TRACE, "DvrGraphicsContext::Post: buffer_id=%d, fence_fd=%d",
956 buffer->buffer()->id(), fence_fd);
957 ALOGW_IF(!display_surface->visible(),
958 "DvrGraphicsContext::Post: Posting buffer on invisible surface!!!");
959 // The NativeBufferProducer closes the fence fd, so dup it for tracking in the
960 // frame history.
961 frame_history.OnFrameSubmit(LocalHandle::AsDuplicate(fence_fd));
962 int result = buffer->Post(fence_fd, 0);
963 return result;
964}
965
966int DvrGraphicsContext::SetSwapInterval(ANativeWindow* window, int interval) {
967 ALOGI_IF(TRACE, "SetSwapInterval: window=%p interval=%d", window, interval);
968 DvrGraphicsContext* self = getSelf(window);
969 (void)self;
970 LOG_ASSERT(self->graphics_api == DVR_GRAPHICS_API_VULKAN);
971 return android::NO_ERROR;
972}
973
974int DvrGraphicsContext::DequeueBuffer(ANativeWindow* window,
975 ANativeWindowBuffer** buffer,
976 int* fence_fd) {
977 ATRACE_NAME(__PRETTY_FUNCTION__);
978
979 DvrGraphicsContext* self = getSelf(window);
980 LOG_ASSERT(self->graphics_api == DVR_GRAPHICS_API_VULKAN);
981 std::lock_guard<std::mutex> autolock(self->lock_);
982
983 if (!self->current_buffer) {
984 self->current_buffer = self->buffer_queue.get()->Dequeue();
985 }
986 ATRACE_ASYNC_BEGIN("BufferDraw", self->current_buffer->buffer()->id());
987 *fence_fd = self->current_buffer->ClaimReleaseFence().Release();
988 *buffer = self->current_buffer;
989
990 ALOGI_IF(TRACE, "DvrGraphicsContext::DequeueBuffer: fence_fd=%d", *fence_fd);
991 return android::NO_ERROR;
992}
993
994int DvrGraphicsContext::QueueBuffer(ANativeWindow* window,
995 ANativeWindowBuffer* buffer, int fence_fd) {
996 ATRACE_NAME("NativeWindow::QueueBuffer");
997 ALOGI_IF(TRACE, "NativeWindow::QueueBuffer: fence_fd=%d", fence_fd);
998
999 DvrGraphicsContext* self = getSelf(window);
1000 LOG_ASSERT(self->graphics_api == DVR_GRAPHICS_API_VULKAN);
1001 std::lock_guard<std::mutex> autolock(self->lock_);
1002
1003 android::dvr::NativeBufferProducer* native_buffer =
1004 static_cast<android::dvr::NativeBufferProducer*>(buffer);
1005 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
1006 bool do_post = true;
1007 if (self->buffer_already_posted) {
1008 // Check that the buffer is the one we expect, but handle it if this happens
1009 // in production by allowing this buffer to post on top of the previous one.
1010 DCHECK(native_buffer == self->current_buffer);
1011 if (native_buffer == self->current_buffer) {
1012 do_post = false;
1013 if (fence_fd >= 0)
1014 close(fence_fd);
1015 }
1016 }
1017 if (do_post) {
1018 ATRACE_ASYNC_BEGIN("BufferPost", native_buffer->buffer()->id());
1019 self->Post(native_buffer, fence_fd);
1020 }
1021 self->buffer_already_posted = false;
1022 self->current_buffer = nullptr;
1023
1024 return android::NO_ERROR;
1025}
1026
1027int DvrGraphicsContext::CancelBuffer(ANativeWindow* window,
1028 ANativeWindowBuffer* buffer,
1029 int fence_fd) {
1030 ATRACE_NAME("DvrGraphicsContext::CancelBuffer");
1031 ALOGI_IF(TRACE, "DvrGraphicsContext::CancelBuffer: fence_fd: %d", fence_fd);
1032
1033 DvrGraphicsContext* self = getSelf(window);
1034 LOG_ASSERT(self->graphics_api == DVR_GRAPHICS_API_VULKAN);
1035 std::lock_guard<std::mutex> autolock(self->lock_);
1036
1037 android::dvr::NativeBufferProducer* native_buffer =
1038 static_cast<android::dvr::NativeBufferProducer*>(buffer);
1039 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
1040 ATRACE_INT("CancelBuffer", native_buffer->buffer()->id());
1041 bool do_enqueue = true;
1042 if (self->buffer_already_posted) {
1043 // Check that the buffer is the one we expect, but handle it if this happens
1044 // in production by returning this buffer to the buffer queue.
1045 DCHECK(native_buffer == self->current_buffer);
1046 if (native_buffer == self->current_buffer) {
1047 do_enqueue = false;
1048 }
1049 }
1050 if (do_enqueue) {
1051 self->buffer_queue.get()->Enqueue(native_buffer);
1052 }
1053 if (fence_fd >= 0)
1054 close(fence_fd);
1055 self->buffer_already_posted = false;
1056 self->current_buffer = nullptr;
1057
1058 return android::NO_ERROR;
1059}
1060
1061int DvrGraphicsContext::Query(const ANativeWindow* window, int what,
1062 int* value) {
1063 DvrGraphicsContext* self = getSelf(const_cast<ANativeWindow*>(window));
1064 LOG_ASSERT(self->graphics_api == DVR_GRAPHICS_API_VULKAN);
1065 std::lock_guard<std::mutex> autolock(self->lock_);
1066
1067 switch (what) {
1068 case NATIVE_WINDOW_WIDTH:
1069 *value = self->display_surface->width();
1070 return android::NO_ERROR;
1071 case NATIVE_WINDOW_HEIGHT:
1072 *value = self->display_surface->height();
1073 return android::NO_ERROR;
1074 case NATIVE_WINDOW_FORMAT:
1075 *value = self->display_surface->format();
1076 return android::NO_ERROR;
1077 case NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS:
1078 *value = 1;
1079 return android::NO_ERROR;
1080 case NATIVE_WINDOW_CONCRETE_TYPE:
1081 *value = NATIVE_WINDOW_SURFACE;
1082 return android::NO_ERROR;
1083 case NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER:
1084 *value = 1;
1085 return android::NO_ERROR;
1086 case NATIVE_WINDOW_DEFAULT_WIDTH:
1087 *value = self->display_surface->width();
1088 return android::NO_ERROR;
1089 case NATIVE_WINDOW_DEFAULT_HEIGHT:
1090 *value = self->display_surface->height();
1091 return android::NO_ERROR;
1092 case NATIVE_WINDOW_TRANSFORM_HINT:
1093 *value = 0;
1094 return android::NO_ERROR;
1095 }
1096
1097 *value = 0;
1098 return android::BAD_VALUE;
1099}
1100
1101int DvrGraphicsContext::Perform(ANativeWindow* window, int operation, ...) {
1102 DvrGraphicsContext* self = getSelf(window);
1103 LOG_ASSERT(self->graphics_api == DVR_GRAPHICS_API_VULKAN);
1104 std::lock_guard<std::mutex> autolock(self->lock_);
1105
1106 va_list args;
1107 va_start(args, operation);
1108
1109 // TODO(eieio): The following operations are not used at this time. They are
1110 // included here to help document which operations may be useful and what
1111 // parameters they take.
1112 switch (operation) {
1113 case NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: {
1114 int w = va_arg(args, int);
1115 int h = va_arg(args, int);
1116 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: w=%d h=%d", w, h);
1117 return android::NO_ERROR;
1118 }
1119
1120 case NATIVE_WINDOW_SET_BUFFERS_FORMAT: {
1121 int format = va_arg(args, int);
1122 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_FORMAT: format=%d", format);
1123 return android::NO_ERROR;
1124 }
1125
1126 case NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: {
1127 int transform = va_arg(args, int);
1128 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: transform=%d",
1129 transform);
1130 return android::NO_ERROR;
1131 }
1132
1133 case NATIVE_WINDOW_SET_USAGE: {
1134 int usage = va_arg(args, int);
1135 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_USAGE: usage=%d", usage);
1136 return android::NO_ERROR;
1137 }
1138
1139 case NATIVE_WINDOW_CONNECT:
1140 case NATIVE_WINDOW_DISCONNECT:
1141 case NATIVE_WINDOW_SET_BUFFERS_GEOMETRY:
1142 case NATIVE_WINDOW_API_CONNECT:
1143 case NATIVE_WINDOW_API_DISCONNECT:
1144 // TODO(eieio): we should implement these
1145 return android::NO_ERROR;
1146
1147 case NATIVE_WINDOW_SET_BUFFER_COUNT: {
1148 int buffer_count = va_arg(args, int);
1149 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFER_COUNT: bufferCount=%d",
1150 buffer_count);
1151 return android::NO_ERROR;
1152 }
1153 case NATIVE_WINDOW_SET_BUFFERS_DATASPACE: {
1154 android_dataspace_t data_space =
1155 static_cast<android_dataspace_t>(va_arg(args, int));
1156 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DATASPACE: dataSpace=%d",
1157 data_space);
1158 return android::NO_ERROR;
1159 }
1160 case NATIVE_WINDOW_SET_SCALING_MODE: {
1161 int mode = va_arg(args, int);
1162 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_SCALING_MODE: mode=%d", mode);
1163 return android::NO_ERROR;
1164 }
1165
1166 case NATIVE_WINDOW_LOCK:
1167 case NATIVE_WINDOW_UNLOCK_AND_POST:
1168 case NATIVE_WINDOW_SET_CROP:
1169 case NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP:
1170 return android::INVALID_OPERATION;
1171 }
1172
1173 return android::NAME_NOT_FOUND;
1174}
1175
1176int DvrGraphicsContext::DequeueBuffer_DEPRECATED(ANativeWindow* window,
1177 ANativeWindowBuffer** buffer) {
1178 int fence_fd = -1;
1179 int ret = DequeueBuffer(window, buffer, &fence_fd);
1180
1181 // wait for fence
1182 if (ret == android::NO_ERROR && fence_fd != -1)
1183 close(fence_fd);
1184
1185 return ret;
1186}
1187
1188int DvrGraphicsContext::CancelBuffer_DEPRECATED(ANativeWindow* window,
1189 ANativeWindowBuffer* buffer) {
1190 return CancelBuffer(window, buffer, -1);
1191}
1192
1193int DvrGraphicsContext::QueueBuffer_DEPRECATED(ANativeWindow* window,
1194 ANativeWindowBuffer* buffer) {
1195 return QueueBuffer(window, buffer, -1);
1196}
1197
1198int DvrGraphicsContext::LockBuffer_DEPRECATED(ANativeWindow* /*window*/,
1199 ANativeWindowBuffer* /*buffer*/) {
1200 return android::NO_ERROR;
1201}
1202// End ANativeWindow implementation
1203
1204int dvrSetEdsPose(DvrGraphicsContext* graphics_context,
1205 float32x4_t render_pose_orientation,
1206 float32x4_t render_pose_translation) {
1207 ATRACE_NAME("dvrSetEdsPose");
1208 if (!graphics_context->current_buffer) {
1209 ALOGE("dvrBeginRenderFrame must be called before dvrSetEdsPose");
1210 return -EPERM;
1211 }
1212
1213 // When late-latching is enabled, the pose buffer is written by the GPU, so
1214 // we don't touch it here.
1215 float32x4_t is_late_latch = DVR_POSE_LATE_LATCH;
1216 if (render_pose_orientation[0] != is_late_latch[0]) {
1217 volatile android::dvr::DisplaySurfaceMetadata* data =
1218 graphics_context->surface_metadata;
1219 uint32_t buffer_index =
1220 graphics_context->current_buffer->surface_buffer_index();
1221 ALOGE_IF(TRACE, "write pose index %d %f %f", buffer_index,
1222 render_pose_orientation[0], render_pose_orientation[1]);
1223 data->orientation[buffer_index] = render_pose_orientation;
1224 data->translation[buffer_index] = render_pose_translation;
1225 }
1226
1227 return 0;
1228}
1229
1230int dvrBeginRenderFrameEds(DvrGraphicsContext* graphics_context,
1231 float32x4_t render_pose_orientation,
1232 float32x4_t render_pose_translation) {
1233 ATRACE_NAME("dvrBeginRenderFrameEds");
1234 LOG_ASSERT(graphics_context->graphics_api == DVR_GRAPHICS_API_GLES);
1235 CHECK_GL();
1236 // Grab a buffer from the queue and set its pose.
1237 if (!graphics_context->current_buffer) {
1238 graphics_context->current_buffer =
1239 graphics_context->buffer_queue->Dequeue();
1240 }
1241
1242 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1243 render_pose_translation);
1244 if (ret < 0)
1245 return ret;
1246
1247 ATRACE_ASYNC_BEGIN("BufferDraw",
1248 graphics_context->current_buffer->buffer()->id());
1249
1250 {
1251 ATRACE_NAME("glEGLImageTargetTexture2DOES");
1252 // Bind the texture to the latest buffer in the queue.
1253 for (int i = 0; i < graphics_context->gl.texture_count; ++i) {
1254 glBindTexture(graphics_context->gl.texture_target_type,
1255 graphics_context->gl.texture_id[i]);
1256 glEGLImageTargetTexture2DOES(
1257 graphics_context->gl.texture_target_type,
1258 graphics_context->current_buffer->image_khr(i));
1259 }
1260 glBindTexture(graphics_context->gl.texture_target_type, 0);
1261 }
1262 CHECK_GL();
1263 return 0;
1264}
1265int dvrBeginRenderFrameEdsVk(DvrGraphicsContext* graphics_context,
1266 float32x4_t render_pose_orientation,
1267 float32x4_t render_pose_translation,
1268 VkSemaphore acquire_semaphore,
1269 VkFence acquire_fence,
1270 uint32_t* swapchain_image_index,
1271 VkImageView* swapchain_image_view) {
1272 ATRACE_NAME("dvrBeginRenderFrameEds");
1273 LOG_ASSERT(graphics_context->graphics_api == DVR_GRAPHICS_API_VULKAN);
1274
1275 // Acquire a swapchain image. This calls Dequeue() internally.
1276 VkResult result = vkAcquireNextImageKHR(
1277 graphics_context->vk.device, graphics_context->vk.swapchain, UINT64_MAX,
1278 acquire_semaphore, acquire_fence, swapchain_image_index);
1279 if (result != VK_SUCCESS)
1280 return -EINVAL;
1281
1282 // Set the pose pose.
1283 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1284 render_pose_translation);
1285 if (ret < 0)
1286 return ret;
1287 *swapchain_image_view =
1288 graphics_context->vk.swapchain_image_views[*swapchain_image_index];
1289 return 0;
1290}
1291
1292int dvrBeginRenderFrame(DvrGraphicsContext* graphics_context) {
1293 return dvrBeginRenderFrameEds(graphics_context, DVR_POSE_NO_EDS,
1294 DVR_POSE_NO_EDS);
1295}
1296int dvrBeginRenderFrameVk(DvrGraphicsContext* graphics_context,
1297 VkSemaphore acquire_semaphore, VkFence acquire_fence,
1298 uint32_t* swapchain_image_index,
1299 VkImageView* swapchain_image_view) {
1300 return dvrBeginRenderFrameEdsVk(
1301 graphics_context, DVR_POSE_NO_EDS, DVR_POSE_NO_EDS, acquire_semaphore,
1302 acquire_fence, swapchain_image_index, swapchain_image_view);
1303}
1304
1305int dvrBeginRenderFrameLateLatch(DvrGraphicsContext* graphics_context,
1306 uint32_t /*flags*/,
1307 uint32_t target_vsync_count, int num_views,
1308 const float** projection_matrices,
1309 const float** eye_from_head_matrices,
1310 const float** pose_offset_matrices,
1311 uint32_t* out_late_latch_buffer_id) {
1312 if (!graphics_context->late_latch) {
1313 return -EPERM;
1314 }
1315 if (num_views > DVR_GRAPHICS_SURFACE_MAX_VIEWS) {
1316 LOG(ERROR) << "dvrBeginRenderFrameLateLatch called with too many views.";
1317 return -EINVAL;
1318 }
1319 dvrBeginRenderFrameEds(graphics_context, DVR_POSE_LATE_LATCH,
1320 DVR_POSE_LATE_LATCH);
1321 auto& ll = graphics_context->late_latch;
1322 // TODO(jbates) Need to change this shader so that it dumps the single
1323 // captured pose for both eyes into the display surface metadata buffer at
1324 // the right index.
1325 android::dvr::LateLatchInput input;
1326 memset(&input, 0, sizeof(input));
1327 for (int i = 0; i < num_views; ++i) {
1328 memcpy(input.proj_mat + i, *(projection_matrices + i), 16 * sizeof(float));
1329 memcpy(input.eye_from_head_mat + i, *(eye_from_head_matrices + i),
1330 16 * sizeof(float));
1331 memcpy(input.pose_offset + i, *(pose_offset_matrices + i),
1332 16 * sizeof(float));
1333 }
1334 input.pose_index =
1335 target_vsync_count & android::dvr::kPoseAsyncBufferIndexMask;
1336 input.render_pose_index =
1337 graphics_context->current_buffer->surface_buffer_index();
1338 ll->AddLateLatch(input);
1339 *out_late_latch_buffer_id = ll->output_buffer_id();
1340 return 0;
1341}
1342
1343extern "C" int dvrGraphicsWaitNextFrame(
1344 DvrGraphicsContext* graphics_context, int64_t start_delay_ns,
1345 DvrFrameSchedule* out_next_frame_schedule) {
1346 start_delay_ns = std::max(start_delay_ns, static_cast<int64_t>(0));
1347
1348 // We only do one-shot timers:
1349 int64_t wake_time_ns = 0;
1350
1351 uint32_t current_frame_vsync;
1352 int64_t current_frame_scheduled_finish_ns;
1353 int64_t vsync_period_ns;
1354
1355 int fetch_schedule_result = graphics_context->vsync_client->GetSchedInfo(
1356 &vsync_period_ns, &current_frame_scheduled_finish_ns,
1357 &current_frame_vsync);
1358 if (fetch_schedule_result == 0) {
1359 wake_time_ns = current_frame_scheduled_finish_ns + start_delay_ns;
1360 // If the last wakeup time is still in the future, use it instead to avoid
1361 // major schedule jumps when applications call WaitNextFrame with
1362 // aggressive offsets.
1363 int64_t now = android::dvr::GetSystemClockNs();
1364 if (android::dvr::TimestampGT(wake_time_ns - vsync_period_ns, now)) {
1365 wake_time_ns -= vsync_period_ns;
1366 --current_frame_vsync;
1367 }
1368 // If the next wakeup time is in the past, add a vsync period to keep the
1369 // application on schedule.
1370 if (android::dvr::TimestampLT(wake_time_ns, now)) {
1371 wake_time_ns += vsync_period_ns;
1372 ++current_frame_vsync;
1373 }
1374 } else {
1375 ALOGE("Error getting frame schedule because: %s",
1376 strerror(-fetch_schedule_result));
1377 // Sleep for a vsync period to avoid cascading failure.
1378 wake_time_ns = android::dvr::GetSystemClockNs() +
1379 graphics_context->display_metrics.vsync_period_ns;
1380 }
1381
1382 // Adjust nsec to [0..999,999,999].
1383 struct itimerspec wake_time;
1384 wake_time.it_interval.tv_sec = 0;
1385 wake_time.it_interval.tv_nsec = 0;
1386 wake_time.it_value = android::dvr::NsToTimespec(wake_time_ns);
1387 bool sleep_result =
1388 timerfd_settime(graphics_context->timerfd.Get(), TFD_TIMER_ABSTIME,
1389 &wake_time, nullptr) == 0;
1390 if (sleep_result) {
1391 ATRACE_NAME("sleep");
1392 uint64_t expirations = 0;
1393 sleep_result = read(graphics_context->timerfd.Get(), &expirations,
1394 sizeof(uint64_t)) == sizeof(uint64_t);
1395 if (!sleep_result) {
1396 ALOGE("Error: timerfd read failed");
1397 }
1398 } else {
1399 ALOGE("Error: timerfd_settime failed because: %s", strerror(errno));
1400 }
1401
1402 auto& frame_history = graphics_context->frame_history;
1403 frame_history.CheckForFinishedFrames();
1404 if (fetch_schedule_result == 0) {
1405 uint32_t next_frame_vsync =
1406 current_frame_vsync +
1407 frame_history.PredictNextFrameVsyncInterval(vsync_period_ns);
1408 int64_t next_frame_scheduled_finish =
1409 (wake_time_ns - start_delay_ns) + vsync_period_ns;
1410 frame_history.OnFrameStart(next_frame_vsync, next_frame_scheduled_finish);
1411 if (out_next_frame_schedule) {
1412 out_next_frame_schedule->vsync_count = next_frame_vsync;
1413 out_next_frame_schedule->scheduled_frame_finish_ns =
1414 next_frame_scheduled_finish;
1415 }
1416 } else {
1417 frame_history.OnFrameStart(UINT32_MAX, -1);
1418 }
1419
1420 return (fetch_schedule_result == 0 && sleep_result) ? 0 : -1;
1421}
1422
1423extern "C" void dvrGraphicsPostEarly(DvrGraphicsContext* graphics_context) {
1424 ATRACE_NAME("dvrGraphicsPostEarly");
1425 ALOGI_IF(TRACE, "dvrGraphicsPostEarly");
1426
1427 LOG_ASSERT(graphics_context->graphics_api == DVR_GRAPHICS_API_GLES);
1428
1429 // Note that this function can be called before or after
1430 // dvrBeginRenderFrame.
1431 if (!graphics_context->buffer_already_posted) {
1432 graphics_context->buffer_already_posted = true;
1433
1434 if (!graphics_context->current_buffer) {
1435 graphics_context->current_buffer =
1436 graphics_context->buffer_queue->Dequeue();
1437 }
1438
1439 auto buffer = graphics_context->current_buffer->buffer().get();
1440 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1441 int result = buffer->Post<uint64_t>(LocalHandle(), 0);
1442 if (result < 0)
1443 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1444 }
1445}
1446
1447int dvrPresent(DvrGraphicsContext* graphics_context) {
1448 LOG_ASSERT(graphics_context->graphics_api == DVR_GRAPHICS_API_GLES);
1449
1450 std::array<char, 128> buf;
1451 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1452 graphics_context->frame_history.GetCurrentFrameVsync());
1453 ATRACE_NAME(buf.data());
1454
1455 if (!graphics_context->current_buffer) {
1456 ALOGE("Error: dvrPresent called without dvrBeginRenderFrame");
1457 return -EPERM;
1458 }
1459
1460 LocalHandle fence_fd =
1461 android::dvr::CreateGLSyncAndFlush(graphics_context->gl.egl_display);
1462
1463 ALOGI_IF(TRACE, "PostBuffer: buffer_id=%d, fence_fd=%d",
1464 graphics_context->current_buffer->buffer()->id(), fence_fd.Get());
1465 ALOGW_IF(!graphics_context->display_surface->visible(),
1466 "PostBuffer: Posting buffer on invisible surface!!!");
1467
1468 auto buffer = graphics_context->current_buffer->buffer().get();
1469 ATRACE_ASYNC_END("BufferDraw", buffer->id());
1470 if (!graphics_context->buffer_already_posted) {
1471 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1472 int result = buffer->Post<uint64_t>(fence_fd, 0);
1473 if (result < 0)
1474 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1475 }
1476
1477 graphics_context->frame_history.OnFrameSubmit(std::move(fence_fd));
1478 graphics_context->buffer_already_posted = false;
1479 graphics_context->current_buffer = nullptr;
1480 return 0;
1481}
1482
1483int dvrPresentVk(DvrGraphicsContext* graphics_context,
1484 VkSemaphore submit_semaphore, uint32_t swapchain_image_index) {
1485 LOG_ASSERT(graphics_context->graphics_api == DVR_GRAPHICS_API_VULKAN);
1486
1487 std::array<char, 128> buf;
1488 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1489 graphics_context->frame_history.GetCurrentFrameVsync());
1490 ATRACE_NAME(buf.data());
1491
1492 if (!graphics_context->current_buffer) {
1493 ALOGE("Error: dvrPresentVk called without dvrBeginRenderFrameVk");
1494 return -EPERM;
1495 }
1496
1497 // Present the specified image. Internally, this gets a fence from the
1498 // Vulkan driver and passes it to DvrGraphicsContext::Post(),
1499 // which in turn passes it to buffer->Post() and adds it to frame_history.
1500 VkPresentInfoKHR present_info = {};
1501 present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
1502 present_info.swapchainCount = 1;
1503 present_info.pSwapchains = &graphics_context->vk.swapchain;
1504 present_info.pImageIndices = &swapchain_image_index;
1505 present_info.waitSemaphoreCount =
1506 (submit_semaphore != VK_NULL_HANDLE) ? 1 : 0;
1507 present_info.pWaitSemaphores = &submit_semaphore;
1508 VkResult result =
1509 vkQueuePresentKHR(graphics_context->vk.present_queue, &present_info);
1510 if (result != VK_SUCCESS) {
1511 return -EINVAL;
1512 }
1513
1514 return 0;
1515}
1516
1517extern "C" int dvrGetFrameScheduleResults(DvrGraphicsContext* context,
1518 DvrFrameScheduleResult* results,
1519 int in_result_count) {
1520 if (!context || !results)
1521 return -EINVAL;
1522
1523 return context->frame_history.GetPreviousFrameResults(results,
1524 in_result_count);
1525}
1526
1527extern "C" void dvrGraphicsSurfaceSetVisible(
1528 DvrGraphicsContext* graphics_context, int visible) {
1529 graphics_context->display_surface->SetVisible(visible);
1530}
1531
1532extern "C" int dvrGraphicsSurfaceGetVisible(
1533 DvrGraphicsContext* graphics_context) {
1534 return graphics_context->display_surface->visible() ? 1 : 0;
1535}
1536
1537extern "C" void dvrGraphicsSurfaceSetZOrder(
1538 DvrGraphicsContext* graphics_context, int z_order) {
1539 graphics_context->display_surface->SetZOrder(z_order);
1540}
1541
1542extern "C" int dvrGraphicsSurfaceGetZOrder(
1543 DvrGraphicsContext* graphics_context) {
1544 return graphics_context->display_surface->z_order();
1545}
1546
1547extern "C" DvrVideoMeshSurface* dvrGraphicsVideoMeshSurfaceCreate(
1548 DvrGraphicsContext* graphics_context) {
1549 auto display_surface = graphics_context->display_surface;
1550 // A DisplaySurface must be created prior to the creation of a
1551 // VideoMeshSurface.
1552 LOG_ASSERT(display_surface != nullptr);
1553
1554 LocalChannelHandle surface_handle = display_surface->CreateVideoMeshSurface();
1555 if (!surface_handle.valid()) {
1556 return nullptr;
1557 }
1558
1559 std::unique_ptr<DvrVideoMeshSurface> surface(new DvrVideoMeshSurface);
1560 surface->client =
1561 android::dvr::VideoMeshSurfaceClient::Import(std::move(surface_handle));
1562
1563 // TODO(jwcai) The next line is not needed...
1564 auto producer_queue = surface->client->GetProducerQueue();
1565 return surface.release();
1566}
1567
1568extern "C" void dvrGraphicsVideoMeshSurfaceDestroy(
1569 DvrVideoMeshSurface* surface) {
1570 delete surface;
1571}
1572
1573extern "C" void dvrGraphicsVideoMeshSurfacePresent(
1574 DvrGraphicsContext* graphics_context, DvrVideoMeshSurface* surface,
1575 const int eye, const float* transform) {
1576 volatile android::dvr::VideoMeshSurfaceMetadata* metadata =
1577 surface->client->GetMetadataBufferPtr();
1578
1579 const uint32_t graphics_buffer_index =
1580 graphics_context->current_buffer->surface_buffer_index();
1581
1582 for (int i = 0; i < 4; ++i) {
1583 metadata->transform[graphics_buffer_index][eye].val[i] = {
1584 transform[i + 0], transform[i + 4], transform[i + 8], transform[i + 12],
1585 };
1586 }
1587}