blob: bd3ed7e470dc94807a73846a610d3545f61d7a91 [file] [log] [blame]
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001#include <dvr/graphics.h>
2
Alex Vakulenko4fe60582017-02-02 11:35:59 -08003#include <inttypes.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08004#include <sys/timerfd.h>
5#include <array>
6#include <vector>
7
Alex Vakulenko4fe60582017-02-02 11:35:59 -08008#include <log/log.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08009#include <utils/Trace.h>
10
11#ifndef VK_USE_PLATFORM_ANDROID_KHR
12#define VK_USE_PLATFORM_ANDROID_KHR 1
13#endif
14#include <vulkan/vulkan.h>
15
16#include <pdx/file_handle.h>
17#include <private/dvr/clock_ns.h>
18#include <private/dvr/debug.h>
19#include <private/dvr/display_types.h>
20#include <private/dvr/frame_history.h>
21#include <private/dvr/gl_fenced_flush.h>
22#include <private/dvr/graphics/vr_gl_extensions.h>
23#include <private/dvr/graphics_private.h>
24#include <private/dvr/late_latch.h>
25#include <private/dvr/native_buffer_queue.h>
26#include <private/dvr/sensor_constants.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080027#include <private/dvr/vsync_client.h>
Mark Urbanusa6c1f922017-03-22 13:11:51 -070028#include <private/dvr/platform_defines.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080029
30#include <android/native_window.h>
31
32#ifndef EGL_CONTEXT_MAJOR_VERSION
33#define EGL_CONTEXT_MAJOR_VERSION 0x3098
34#define EGL_CONTEXT_MINOR_VERSION 0x30FB
35#endif
36
37using android::pdx::LocalHandle;
38using android::pdx::LocalChannelHandle;
39
40using android::dvr::DisplaySurfaceAttributeEnum;
41using android::dvr::DisplaySurfaceAttributeValue;
42
43namespace {
44
Mark Urbanusa6c1f922017-03-22 13:11:51 -070045// TODO(urbanus): revisit once we have per-platform usage config in place.
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080046constexpr int kDefaultDisplaySurfaceUsage =
Mark Urbanusa6c1f922017-03-22 13:11:51 -070047 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE |
48 GRALLOC_USAGE_QCOM_FRAMEBUFFER_COMPRESSION;
Alex Vakulenkoe4eec202017-01-27 14:41:04 -080049constexpr int kDefaultDisplaySurfaceFormat = HAL_PIXEL_FORMAT_RGBA_8888;
50// TODO(alexst): revisit this count when HW encode is available for casting.
51constexpr int kDefaultBufferCount = 4;
52
53// Use with dvrBeginRenderFrame to disable EDS for the current frame.
54constexpr float32x4_t DVR_POSE_NO_EDS = {10.0f, 0.0f, 0.0f, 0.0f};
55
56// Use with dvrBeginRenderFrame to indicate that GPU late-latching is being used
57// for determining the render pose.
58constexpr float32x4_t DVR_POSE_LATE_LATCH = {20.0f, 0.0f, 0.0f, 0.0f};
59
60#ifndef NDEBUG
61
62static const char* GetGlCallbackType(GLenum type) {
63 switch (type) {
64 case GL_DEBUG_TYPE_ERROR_KHR:
65 return "ERROR";
66 case GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_KHR:
67 return "DEPRECATED_BEHAVIOR";
68 case GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_KHR:
69 return "UNDEFINED_BEHAVIOR";
70 case GL_DEBUG_TYPE_PORTABILITY_KHR:
71 return "PORTABILITY";
72 case GL_DEBUG_TYPE_PERFORMANCE_KHR:
73 return "PERFORMANCE";
74 case GL_DEBUG_TYPE_OTHER_KHR:
75 return "OTHER";
76 default:
77 return "UNKNOWN";
78 }
79}
80
81static void on_gl_error(GLenum /*source*/, GLenum type, GLuint /*id*/,
82 GLenum severity, GLsizei /*length*/,
83 const char* message, const void* /*user_param*/) {
84 char msg[400];
85 snprintf(msg, sizeof(msg), "[" __FILE__ ":%u] GL %s: %s", __LINE__,
86 GetGlCallbackType(type), message);
87 switch (severity) {
88 case GL_DEBUG_SEVERITY_LOW_KHR:
89 ALOGI("%s", msg);
90 break;
91 case GL_DEBUG_SEVERITY_MEDIUM_KHR:
92 ALOGW("%s", msg);
93 break;
94 case GL_DEBUG_SEVERITY_HIGH_KHR:
95 ALOGE("%s", msg);
96 break;
97 }
98 fprintf(stderr, "%s\n", msg);
99}
100
101#endif
102
103int DvrToHalSurfaceFormat(int dvr_surface_format) {
104 switch (dvr_surface_format) {
105 case DVR_SURFACE_FORMAT_RGBA_8888:
106 return HAL_PIXEL_FORMAT_RGBA_8888;
107 case DVR_SURFACE_FORMAT_RGB_565:
108 return HAL_PIXEL_FORMAT_RGB_565;
109 default:
110 return HAL_PIXEL_FORMAT_RGBA_8888;
111 }
112}
113
114int SelectEGLConfig(EGLDisplay dpy, EGLint* attr, unsigned format,
115 EGLConfig* config) {
116 std::array<EGLint, 4> desired_rgba;
117 switch (format) {
118 case HAL_PIXEL_FORMAT_RGBA_8888:
119 case HAL_PIXEL_FORMAT_BGRA_8888:
120 desired_rgba = {{8, 8, 8, 8}};
121 break;
122 case HAL_PIXEL_FORMAT_RGB_565:
123 desired_rgba = {{5, 6, 5, 0}};
124 break;
125 default:
126 ALOGE("Unsupported framebuffer pixel format %d", format);
127 return -1;
128 }
129
130 EGLint max_configs = 0;
131 if (eglGetConfigs(dpy, NULL, 0, &max_configs) == EGL_FALSE) {
132 ALOGE("No EGL configurations available?!");
133 return -1;
134 }
135
136 std::vector<EGLConfig> configs(max_configs);
137
138 EGLint num_configs;
139 if (eglChooseConfig(dpy, attr, &configs[0], max_configs, &num_configs) ==
140 EGL_FALSE) {
141 ALOGE("eglChooseConfig failed");
142 return -1;
143 }
144
145 std::array<EGLint, 4> config_rgba;
146 for (int i = 0; i < num_configs; i++) {
147 eglGetConfigAttrib(dpy, configs[i], EGL_RED_SIZE, &config_rgba[0]);
148 eglGetConfigAttrib(dpy, configs[i], EGL_GREEN_SIZE, &config_rgba[1]);
149 eglGetConfigAttrib(dpy, configs[i], EGL_BLUE_SIZE, &config_rgba[2]);
150 eglGetConfigAttrib(dpy, configs[i], EGL_ALPHA_SIZE, &config_rgba[3]);
151 if (config_rgba == desired_rgba) {
152 *config = configs[i];
153 return 0;
154 }
155 }
156
157 ALOGE("Cannot find a matching EGL config");
158 return -1;
159}
160
161void DestroyEglContext(EGLDisplay egl_display, EGLContext* egl_context) {
162 if (*egl_context != EGL_NO_CONTEXT) {
163 eglDestroyContext(egl_display, *egl_context);
164 *egl_context = EGL_NO_CONTEXT;
165 }
166}
167
168// Perform internal initialization. A GL context must be bound to the current
169// thread.
170// @param internally_created_context True if we created and own the GL context,
171// false if it was supplied by the application.
172// @return 0 if init was successful, or a negative error code on failure.
173int InitGl(bool internally_created_context) {
174 EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
175 if (egl_display == EGL_NO_DISPLAY) {
176 ALOGE("eglGetDisplay failed");
177 return -EINVAL;
178 }
179
180 EGLContext egl_context = eglGetCurrentContext();
181 if (egl_context == EGL_NO_CONTEXT) {
182 ALOGE("No GL context bound");
183 return -EINVAL;
184 }
185
186 glGetError(); // Clear the error state
187 GLint major_version, minor_version;
188 glGetIntegerv(GL_MAJOR_VERSION, &major_version);
189 glGetIntegerv(GL_MINOR_VERSION, &minor_version);
190 if (glGetError() != GL_NO_ERROR) {
191 // GL_MAJOR_VERSION and GL_MINOR_VERSION were added in GLES 3. If we get an
192 // error querying them it's almost certainly because it's GLES 1 or 2.
193 ALOGE("Error getting GL version. Must be GLES 3.2 or greater.");
194 return -EINVAL;
195 }
196
197 if (major_version < 3 || (major_version == 3 && minor_version < 2)) {
198 ALOGE("Invalid GL version: %d.%d. Must be GLES 3.2 or greater.",
199 major_version, minor_version);
200 return -EINVAL;
201 }
202
203#ifndef NDEBUG
204 if (internally_created_context) {
205 // Enable verbose GL debug output.
206 glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS_KHR);
207 glDebugMessageCallbackKHR(on_gl_error, NULL);
208 GLuint unused_ids = 0;
209 glDebugMessageControlKHR(GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0,
210 &unused_ids, GL_TRUE);
211 }
212#else
213 (void)internally_created_context;
214#endif
215
216 load_gl_extensions();
217 return 0;
218}
219
220int CreateEglContext(EGLDisplay egl_display, DvrSurfaceParameter* parameters,
221 EGLContext* egl_context) {
222 *egl_context = EGL_NO_CONTEXT;
223
224 EGLint major, minor;
225 if (!eglInitialize(egl_display, &major, &minor)) {
226 ALOGE("Failed to initialize EGL");
227 return -ENXIO;
228 }
229
230 ALOGI("EGL version: %d.%d\n", major, minor);
231
232 int buffer_format = kDefaultDisplaySurfaceFormat;
233
234 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
235 switch (p->key) {
236 case DVR_SURFACE_PARAMETER_FORMAT_IN:
237 buffer_format = DvrToHalSurfaceFormat(p->value);
238 break;
239 }
240 }
241
242 EGLint config_attrs[] = {EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
243 EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_NONE};
244 EGLConfig config = {0};
245
246 int ret = SelectEGLConfig(egl_display, config_attrs, buffer_format, &config);
247 if (ret < 0)
248 return ret;
249
250 ALOGI("EGL SelectEGLConfig ok.\n");
251
252 EGLint context_attrs[] = {EGL_CONTEXT_MAJOR_VERSION,
253 3,
254 EGL_CONTEXT_MINOR_VERSION,
255 2,
256#ifndef NDEBUG
257 EGL_CONTEXT_FLAGS_KHR,
258 EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR,
259#endif
260 EGL_NONE};
261
262 *egl_context =
263 eglCreateContext(egl_display, config, EGL_NO_CONTEXT, context_attrs);
264 if (*egl_context == EGL_NO_CONTEXT) {
265 ALOGE("eglCreateContext failed");
266 return -ENXIO;
267 }
268
269 ALOGI("eglCreateContext ok.\n");
270
271 if (!eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
272 *egl_context)) {
273 ALOGE("eglMakeCurrent failed");
274 DestroyEglContext(egl_display, egl_context);
275 return -EINVAL;
276 }
277
278 return 0;
279}
280
281} // anonymous namespace
282
283// TODO(hendrikw): When we remove the calls to this in native_window.cpp, move
284// this back into the anonymous namespace
285std::shared_ptr<android::dvr::DisplaySurfaceClient> CreateDisplaySurfaceClient(
286 struct DvrSurfaceParameter* parameters,
287 /*out*/ android::dvr::SystemDisplayMetrics* metrics) {
288 auto client = android::dvr::DisplayClient::Create();
289 if (!client) {
290 ALOGE("Failed to create display client!");
291 return nullptr;
292 }
293
294 const int ret = client->GetDisplayMetrics(metrics);
295 if (ret < 0) {
296 ALOGE("Failed to get display metrics: %s", strerror(-ret));
297 return nullptr;
298 }
299
300 // Parameters that may be modified by the parameters array. Some of these are
301 // here for future expansion.
302 int request_width = -1;
303 int request_height = -1;
304 int request_flags = 0;
305 bool disable_distortion = false;
306 bool disable_stabilization = false;
307 bool disable_cac = false;
308 bool request_visible = true;
309 bool vertical_flip = false;
310 int request_z_order = 0;
311 bool request_exclude_from_blur = false;
312 bool request_blur_behind = true;
313 int request_format = kDefaultDisplaySurfaceFormat;
314 int request_usage = kDefaultDisplaySurfaceUsage;
315 int geometry_type = DVR_SURFACE_GEOMETRY_SINGLE;
316
317 // Handle parameter inputs.
318 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
319 switch (p->key) {
320 case DVR_SURFACE_PARAMETER_WIDTH_IN:
321 request_width = p->value;
322 break;
323 case DVR_SURFACE_PARAMETER_HEIGHT_IN:
324 request_height = p->value;
325 break;
326 case DVR_SURFACE_PARAMETER_DISABLE_DISTORTION_IN:
327 disable_distortion = !!p->value;
328 break;
329 case DVR_SURFACE_PARAMETER_DISABLE_STABILIZATION_IN:
330 disable_stabilization = !!p->value;
331 break;
332 case DVR_SURFACE_PARAMETER_DISABLE_CAC_IN:
333 disable_cac = !!p->value;
334 break;
335 case DVR_SURFACE_PARAMETER_VISIBLE_IN:
336 request_visible = !!p->value;
337 break;
338 case DVR_SURFACE_PARAMETER_Z_ORDER_IN:
339 request_z_order = p->value;
340 break;
341 case DVR_SURFACE_PARAMETER_EXCLUDE_FROM_BLUR_IN:
342 request_exclude_from_blur = !!p->value;
343 break;
344 case DVR_SURFACE_PARAMETER_BLUR_BEHIND_IN:
345 request_blur_behind = !!p->value;
346 break;
347 case DVR_SURFACE_PARAMETER_VERTICAL_FLIP_IN:
348 vertical_flip = !!p->value;
349 break;
350 case DVR_SURFACE_PARAMETER_GEOMETRY_IN:
351 geometry_type = p->value;
352 break;
353 case DVR_SURFACE_PARAMETER_FORMAT_IN:
354 request_format = DvrToHalSurfaceFormat(p->value);
355 break;
356 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
357 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
358 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
359 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
360 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
361 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
362 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
363 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
364 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
365 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
366 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
367 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
368 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
369 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
370 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
371 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
372 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
373 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
374 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
375 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
376 break;
377 default:
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800378 ALOGE("Invalid display surface parameter: key=%d value=%" PRId64,
379 p->key, p->value);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800380 return nullptr;
381 }
382 }
383
384 request_flags |= disable_distortion
385 ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_DISTORTION
386 : 0;
387 request_flags |=
388 disable_stabilization ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_EDS : 0;
389 request_flags |=
390 disable_cac ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_CAC : 0;
391 request_flags |= vertical_flip ? DVR_DISPLAY_SURFACE_FLAGS_VERTICAL_FLIP : 0;
392 request_flags |= (geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2)
393 ? DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2
394 : 0;
395
396 if (request_width == -1) {
397 request_width = disable_distortion ? metrics->display_native_width
398 : metrics->distorted_width;
399 if (!disable_distortion &&
400 geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2) {
401 // The metrics always return the single wide buffer resolution.
402 // When split between eyes, we need to halve the width of the surface.
403 request_width /= 2;
404 }
405 }
406 if (request_height == -1) {
407 request_height = disable_distortion ? metrics->display_native_height
408 : metrics->distorted_height;
409 }
410
411 std::shared_ptr<android::dvr::DisplaySurfaceClient> surface =
412 client->CreateDisplaySurface(request_width, request_height,
413 request_format, request_usage,
414 request_flags);
415 surface->SetAttributes(
416 {{DisplaySurfaceAttributeEnum::Visible,
417 DisplaySurfaceAttributeValue{request_visible}},
418 {DisplaySurfaceAttributeEnum::ZOrder,
419 DisplaySurfaceAttributeValue{request_z_order}},
420 {DisplaySurfaceAttributeEnum::ExcludeFromBlur,
421 DisplaySurfaceAttributeValue{request_exclude_from_blur}},
422 {DisplaySurfaceAttributeEnum::BlurBehind,
423 DisplaySurfaceAttributeValue{request_blur_behind}}});
424
425 // Handle parameter output requests down here so we can return surface info.
426 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
427 switch (p->key) {
428 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
429 *static_cast<int32_t*>(p->value_out) = metrics->display_native_width;
430 break;
431 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
432 *static_cast<int32_t*>(p->value_out) = metrics->display_native_height;
433 break;
434 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
435 *static_cast<int32_t*>(p->value_out) = surface->width();
436 break;
437 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
438 *static_cast<int32_t*>(p->value_out) = surface->height();
439 break;
440 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
441 *static_cast<float*>(p->value_out) = metrics->inter_lens_distance_m;
442 break;
443 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
444 for (int i = 0; i < 4; ++i) {
445 float* float_values_out = static_cast<float*>(p->value_out);
446 float_values_out[i] = metrics->left_fov_lrbt[i];
447 }
448 break;
449 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
450 for (int i = 0; i < 4; ++i) {
451 float* float_values_out = static_cast<float*>(p->value_out);
452 float_values_out[i] = metrics->right_fov_lrbt[i];
453 }
454 break;
455 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
456 *static_cast<uint64_t*>(p->value_out) = metrics->vsync_period_ns;
457 break;
458 default:
459 break;
460 }
461 }
462
463 return surface;
464}
465
466extern "C" int dvrGetNativeDisplayDimensions(int* native_width,
467 int* native_height) {
468 int error = 0;
469 auto client = android::dvr::DisplayClient::Create(&error);
470 if (!client) {
471 ALOGE("Failed to create display client!");
472 return error;
473 }
474
475 android::dvr::SystemDisplayMetrics metrics;
476 const int ret = client->GetDisplayMetrics(&metrics);
477
478 if (ret != 0) {
479 ALOGE("Failed to get display metrics!");
480 return ret;
481 }
482
483 *native_width = static_cast<int>(metrics.display_native_width);
484 *native_height = static_cast<int>(metrics.display_native_height);
485 return 0;
486}
487
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800488struct DvrGraphicsContext : public android::ANativeObjectBase<
489 ANativeWindow, DvrGraphicsContext,
490 android::LightRefBase<DvrGraphicsContext>> {
491 public:
492 DvrGraphicsContext();
493 ~DvrGraphicsContext();
494
495 int graphics_api; // DVR_SURFACE_GRAPHICS_API_*
496
497 // GL specific members.
498 struct {
499 EGLDisplay egl_display;
500 EGLContext egl_context;
501 bool owns_egl_context;
502 GLuint texture_id[kSurfaceViewMaxCount];
503 int texture_count;
504 GLenum texture_target_type;
505 } gl;
506
507 // VK specific members
508 struct {
509 // These objects are passed in by the application, and are NOT owned
510 // by the context.
511 VkInstance instance;
512 VkPhysicalDevice physical_device;
513 VkDevice device;
514 VkQueue present_queue;
515 uint32_t present_queue_family;
516 const VkAllocationCallbacks* allocation_callbacks;
517 // These objects are owned by the context.
518 ANativeWindow* window;
519 VkSurfaceKHR surface;
520 VkSwapchainKHR swapchain;
521 std::vector<VkImage> swapchain_images;
522 std::vector<VkImageView> swapchain_image_views;
523 } vk;
524
525 // Display surface, metrics, and buffer management members.
526 std::shared_ptr<android::dvr::DisplaySurfaceClient> display_surface;
527 android::dvr::SystemDisplayMetrics display_metrics;
528 std::unique_ptr<android::dvr::NativeBufferQueue> buffer_queue;
529 android::dvr::NativeBufferProducer* current_buffer;
530 bool buffer_already_posted;
531
532 // Synchronization members.
533 std::unique_ptr<android::dvr::VSyncClient> vsync_client;
534 LocalHandle timerfd;
535
536 android::dvr::FrameHistory frame_history;
537
538 // Mapped surface metadata (ie: for pose delivery with presented frames).
539 volatile android::dvr::DisplaySurfaceMetadata* surface_metadata;
540
541 // LateLatch support.
542 std::unique_ptr<android::dvr::LateLatch> late_latch;
543
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800544 private:
545 // ANativeWindow function implementations
546 std::mutex lock_;
547 int Post(android::dvr::NativeBufferProducer* buffer, int fence_fd);
548 static int SetSwapInterval(ANativeWindow* window, int interval);
549 static int DequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer,
550 int* fence_fd);
551 static int QueueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
552 int fence_fd);
553 static int CancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
554 int fence_fd);
555 static int Query(const ANativeWindow* window, int what, int* value);
556 static int Perform(ANativeWindow* window, int operation, ...);
557 static int DequeueBuffer_DEPRECATED(ANativeWindow* window,
558 ANativeWindowBuffer** buffer);
559 static int CancelBuffer_DEPRECATED(ANativeWindow* window,
560 ANativeWindowBuffer* buffer);
561 static int QueueBuffer_DEPRECATED(ANativeWindow* window,
562 ANativeWindowBuffer* buffer);
563 static int LockBuffer_DEPRECATED(ANativeWindow* window,
564 ANativeWindowBuffer* buffer);
565
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800566 DvrGraphicsContext(const DvrGraphicsContext&) = delete;
567 void operator=(const DvrGraphicsContext&) = delete;
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800568};
569
570DvrGraphicsContext::DvrGraphicsContext()
571 : graphics_api(DVR_GRAPHICS_API_GLES),
572 gl{},
573 vk{},
574 current_buffer(nullptr),
575 buffer_already_posted(false),
576 surface_metadata(nullptr) {
577 gl.egl_display = EGL_NO_DISPLAY;
578 gl.egl_context = EGL_NO_CONTEXT;
579 gl.owns_egl_context = true;
580 gl.texture_target_type = GL_TEXTURE_2D;
581
582 ANativeWindow::setSwapInterval = SetSwapInterval;
583 ANativeWindow::dequeueBuffer = DequeueBuffer;
584 ANativeWindow::cancelBuffer = CancelBuffer;
585 ANativeWindow::queueBuffer = QueueBuffer;
586 ANativeWindow::query = Query;
587 ANativeWindow::perform = Perform;
588
589 ANativeWindow::dequeueBuffer_DEPRECATED = DequeueBuffer_DEPRECATED;
590 ANativeWindow::cancelBuffer_DEPRECATED = CancelBuffer_DEPRECATED;
591 ANativeWindow::lockBuffer_DEPRECATED = LockBuffer_DEPRECATED;
592 ANativeWindow::queueBuffer_DEPRECATED = QueueBuffer_DEPRECATED;
593}
594
595DvrGraphicsContext::~DvrGraphicsContext() {
596 if (graphics_api == DVR_GRAPHICS_API_GLES) {
597 glDeleteTextures(gl.texture_count, gl.texture_id);
598 if (gl.owns_egl_context)
599 DestroyEglContext(gl.egl_display, &gl.egl_context);
600 } else if (graphics_api == DVR_GRAPHICS_API_VULKAN) {
601 if (vk.swapchain != VK_NULL_HANDLE) {
602 for (auto view : vk.swapchain_image_views) {
603 vkDestroyImageView(vk.device, view, vk.allocation_callbacks);
604 }
605 vkDestroySwapchainKHR(vk.device, vk.swapchain, vk.allocation_callbacks);
606 vkDestroySurfaceKHR(vk.instance, vk.surface, vk.allocation_callbacks);
607 delete vk.window;
608 }
609 }
610}
611
612int dvrGraphicsContextCreate(struct DvrSurfaceParameter* parameters,
613 DvrGraphicsContext** return_graphics_context) {
614 std::unique_ptr<DvrGraphicsContext> context(new DvrGraphicsContext);
615
616 // See whether we're using GL or Vulkan
617 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
618 switch (p->key) {
619 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
620 context->graphics_api = p->value;
621 break;
622 }
623 }
624
625 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
626 context->gl.egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
627 if (context->gl.egl_display == EGL_NO_DISPLAY) {
628 ALOGE("eglGetDisplay failed");
629 return -ENXIO;
630 }
631
632 // See if we should create a GL context
633 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
634 switch (p->key) {
635 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
636 context->gl.owns_egl_context = p->value != 0;
637 break;
638 }
639 }
640
641 if (context->gl.owns_egl_context) {
642 int ret = CreateEglContext(context->gl.egl_display, parameters,
643 &context->gl.egl_context);
644 if (ret < 0)
645 return ret;
646 } else {
647 context->gl.egl_context = eglGetCurrentContext();
648 }
649
650 int ret = InitGl(context->gl.owns_egl_context);
651 if (ret < 0)
652 return ret;
653 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
654 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
655 switch (p->key) {
656 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
657 context->vk.instance = reinterpret_cast<VkInstance>(p->value);
658 break;
659 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
660 context->vk.physical_device =
661 reinterpret_cast<VkPhysicalDevice>(p->value);
662 break;
663 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
664 context->vk.device = reinterpret_cast<VkDevice>(p->value);
665 break;
666 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
667 context->vk.present_queue = reinterpret_cast<VkQueue>(p->value);
668 break;
669 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
670 context->vk.present_queue_family = static_cast<uint32_t>(p->value);
671 break;
672 }
673 }
674 } else {
675 ALOGE("Error: invalid graphics API type");
676 return -EINVAL;
677 }
678
679 context->display_surface =
680 CreateDisplaySurfaceClient(parameters, &context->display_metrics);
681 if (!context->display_surface) {
682 ALOGE("Error: failed to create display surface client");
683 return -ECOMM;
684 }
685
686 context->buffer_queue.reset(new android::dvr::NativeBufferQueue(
687 context->gl.egl_display, context->display_surface, kDefaultBufferCount));
688
689 // The way the call sequence works we need 1 more than the buffer queue
690 // capacity to store data for all pending frames
691 context->frame_history.Reset(context->buffer_queue->GetQueueCapacity() + 1);
692
693 context->vsync_client = android::dvr::VSyncClient::Create();
694 if (!context->vsync_client) {
695 ALOGE("Error: failed to create vsync client");
696 return -ECOMM;
697 }
698
699 context->timerfd.Reset(timerfd_create(CLOCK_MONOTONIC, 0));
700 if (!context->timerfd) {
701 ALOGE("Error: timerfd_create failed because: %s", strerror(errno));
702 return -EPERM;
703 }
704
705 context->surface_metadata = context->display_surface->GetMetadataBufferPtr();
706 if (!context->surface_metadata) {
707 ALOGE("Error: surface metadata allocation failed");
708 return -ENOMEM;
709 }
710
711 ALOGI("buffer: %d x %d\n", context->display_surface->width(),
712 context->display_surface->height());
713
714 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
715 context->gl.texture_count = (context->display_surface->flags() &
716 DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2)
717 ? 2
718 : 1;
719
720 // Create the GL textures.
721 glGenTextures(context->gl.texture_count, context->gl.texture_id);
722
723 // We must make sure that we have at least one buffer allocated at this time
724 // so that anyone who tries to bind an FBO to context->texture_id
725 // will not get an incomplete buffer.
726 context->current_buffer = context->buffer_queue->Dequeue();
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800727 LOG_ALWAYS_FATAL_IF(context->gl.texture_count !=
728 context->current_buffer->buffer()->slice_count());
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800729 for (int i = 0; i < context->gl.texture_count; ++i) {
730 glBindTexture(context->gl.texture_target_type, context->gl.texture_id[i]);
731 glEGLImageTargetTexture2DOES(context->gl.texture_target_type,
732 context->current_buffer->image_khr(i));
733 }
734 glBindTexture(context->gl.texture_target_type, 0);
735 CHECK_GL();
736
737 bool is_late_latch = false;
738
739 // Pass back the texture target type and id.
740 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
741 switch (p->key) {
742 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
743 is_late_latch = !!p->value;
744 break;
745 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
746 *static_cast<GLenum*>(p->value_out) = context->gl.texture_target_type;
747 break;
748 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
749 for (int i = 0; i < context->gl.texture_count; ++i) {
750 *(static_cast<GLuint*>(p->value_out) + i) =
751 context->gl.texture_id[i];
752 }
753 break;
754 }
755 }
756
757 // Initialize late latch.
758 if (is_late_latch) {
759 LocalHandle fd;
760 int ret = context->display_surface->GetMetadataBufferFd(&fd);
761 if (ret == 0) {
762 context->late_latch.reset(
763 new android::dvr::LateLatch(true, std::move(fd)));
764 } else {
765 ALOGE("Error: failed to get surface metadata buffer fd for late latch");
766 }
767 }
768 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
769 VkResult result = VK_SUCCESS;
770 // Create a VkSurfaceKHR from the ANativeWindow.
771 VkAndroidSurfaceCreateInfoKHR android_surface_ci = {};
772 android_surface_ci.sType =
773 VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
774 android_surface_ci.window = context.get();
775 result = vkCreateAndroidSurfaceKHR(
776 context->vk.instance, &android_surface_ci,
777 context->vk.allocation_callbacks, &context->vk.surface);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800778 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800779 VkBool32 surface_supports_present = VK_FALSE;
780 result = vkGetPhysicalDeviceSurfaceSupportKHR(
781 context->vk.physical_device, context->vk.present_queue_family,
782 context->vk.surface, &surface_supports_present);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800783 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800784 if (!surface_supports_present) {
785 ALOGE("Error: provided queue family (%u) does not support presentation",
786 context->vk.present_queue_family);
787 return -EPERM;
788 }
789 VkSurfaceCapabilitiesKHR surface_capabilities = {};
790 result = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
791 context->vk.physical_device, context->vk.surface,
792 &surface_capabilities);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800793 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800794 // Determine the swapchain image format.
795 uint32_t device_surface_format_count = 0;
796 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
797 context->vk.physical_device, context->vk.surface,
798 &device_surface_format_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800799 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800800 std::vector<VkSurfaceFormatKHR> device_surface_formats(
801 device_surface_format_count);
802 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
803 context->vk.physical_device, context->vk.surface,
804 &device_surface_format_count, device_surface_formats.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800805 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
806 LOG_ALWAYS_FATAL_IF(device_surface_format_count == 0U);
807 LOG_ALWAYS_FATAL_IF(device_surface_formats[0].format ==
808 VK_FORMAT_UNDEFINED);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800809 VkSurfaceFormatKHR present_surface_format = device_surface_formats[0];
810 // Determine the swapchain present mode.
811 // TODO(cort): query device_present_modes to make sure MAILBOX is supported.
812 // But according to libvulkan, it is.
813 uint32_t device_present_mode_count = 0;
814 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
815 context->vk.physical_device, context->vk.surface,
816 &device_present_mode_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800817 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800818 std::vector<VkPresentModeKHR> device_present_modes(
819 device_present_mode_count);
820 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
821 context->vk.physical_device, context->vk.surface,
822 &device_present_mode_count, device_present_modes.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800823 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800824 VkPresentModeKHR present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
825 // Extract presentation surface extents, image count, transform, usages,
826 // etc.
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800827 LOG_ALWAYS_FATAL_IF(
828 static_cast<int>(surface_capabilities.currentExtent.width) == -1 ||
829 static_cast<int>(surface_capabilities.currentExtent.height) == -1);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800830 VkExtent2D swapchain_extent = surface_capabilities.currentExtent;
831
832 uint32_t desired_image_count = surface_capabilities.minImageCount;
833 if (surface_capabilities.maxImageCount > 0 &&
834 desired_image_count > surface_capabilities.maxImageCount) {
835 desired_image_count = surface_capabilities.maxImageCount;
836 }
837 VkSurfaceTransformFlagBitsKHR surface_transform =
838 surface_capabilities.currentTransform;
839 VkImageUsageFlags image_usage_flags =
840 surface_capabilities.supportedUsageFlags;
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800841 LOG_ALWAYS_FATAL_IF(surface_capabilities.supportedCompositeAlpha ==
842 static_cast<VkFlags>(0));
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800843 VkCompositeAlphaFlagBitsKHR composite_alpha =
844 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
845 if (!(surface_capabilities.supportedCompositeAlpha &
846 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) {
847 composite_alpha = VkCompositeAlphaFlagBitsKHR(
848 static_cast<int>(surface_capabilities.supportedCompositeAlpha) &
849 -static_cast<int>(surface_capabilities.supportedCompositeAlpha));
850 }
851 // Create VkSwapchainKHR
852 VkSwapchainCreateInfoKHR swapchain_ci = {};
853 swapchain_ci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
854 swapchain_ci.pNext = nullptr;
855 swapchain_ci.surface = context->vk.surface;
856 swapchain_ci.minImageCount = desired_image_count;
857 swapchain_ci.imageFormat = present_surface_format.format;
858 swapchain_ci.imageColorSpace = present_surface_format.colorSpace;
859 swapchain_ci.imageExtent.width = swapchain_extent.width;
860 swapchain_ci.imageExtent.height = swapchain_extent.height;
861 swapchain_ci.imageUsage = image_usage_flags;
862 swapchain_ci.preTransform = surface_transform;
863 swapchain_ci.compositeAlpha = composite_alpha;
864 swapchain_ci.imageArrayLayers = 1;
865 swapchain_ci.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
866 swapchain_ci.queueFamilyIndexCount = 0;
867 swapchain_ci.pQueueFamilyIndices = nullptr;
868 swapchain_ci.presentMode = present_mode;
869 swapchain_ci.clipped = VK_TRUE;
870 swapchain_ci.oldSwapchain = VK_NULL_HANDLE;
871 result = vkCreateSwapchainKHR(context->vk.device, &swapchain_ci,
872 context->vk.allocation_callbacks,
873 &context->vk.swapchain);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800874 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800875 // Create swapchain image views
876 uint32_t image_count = 0;
877 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
878 &image_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800879 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
880 LOG_ALWAYS_FATAL_IF(image_count == 0U);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800881 context->vk.swapchain_images.resize(image_count);
882 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
883 &image_count,
884 context->vk.swapchain_images.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800885 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800886 context->vk.swapchain_image_views.resize(image_count);
887 VkImageViewCreateInfo image_view_ci = {};
888 image_view_ci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
889 image_view_ci.pNext = nullptr;
890 image_view_ci.flags = 0;
891 image_view_ci.format = swapchain_ci.imageFormat;
892 image_view_ci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
893 image_view_ci.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
894 image_view_ci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
895 image_view_ci.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
896 image_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
897 image_view_ci.subresourceRange.baseMipLevel = 0;
898 image_view_ci.subresourceRange.levelCount = 1;
899 image_view_ci.subresourceRange.baseArrayLayer = 0;
900 image_view_ci.subresourceRange.layerCount = 1;
901 image_view_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
902 image_view_ci.image = VK_NULL_HANDLE; // filled in below
903 for (uint32_t i = 0; i < image_count; ++i) {
904 image_view_ci.image = context->vk.swapchain_images[i];
905 result = vkCreateImageView(context->vk.device, &image_view_ci,
906 context->vk.allocation_callbacks,
907 &context->vk.swapchain_image_views[i]);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800908 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800909 }
910 // Fill in any requested output parameters.
911 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
912 switch (p->key) {
913 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
914 *static_cast<uint32_t*>(p->value_out) = image_count;
915 break;
916 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
917 *static_cast<VkFormat*>(p->value_out) = swapchain_ci.imageFormat;
918 break;
919 }
920 }
921 }
922
923 *return_graphics_context = context.release();
924 return 0;
925}
926
927void dvrGraphicsContextDestroy(DvrGraphicsContext* graphics_context) {
928 delete graphics_context;
929}
930
931// ANativeWindow function implementations. These should only be used
932// by the Vulkan path.
933int DvrGraphicsContext::Post(android::dvr::NativeBufferProducer* buffer,
934 int fence_fd) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800935 LOG_ALWAYS_FATAL_IF(graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800936 ATRACE_NAME(__PRETTY_FUNCTION__);
937 ALOGI_IF(TRACE, "DvrGraphicsContext::Post: buffer_id=%d, fence_fd=%d",
938 buffer->buffer()->id(), fence_fd);
939 ALOGW_IF(!display_surface->visible(),
940 "DvrGraphicsContext::Post: Posting buffer on invisible surface!!!");
941 // The NativeBufferProducer closes the fence fd, so dup it for tracking in the
942 // frame history.
943 frame_history.OnFrameSubmit(LocalHandle::AsDuplicate(fence_fd));
944 int result = buffer->Post(fence_fd, 0);
945 return result;
946}
947
948int DvrGraphicsContext::SetSwapInterval(ANativeWindow* window, int interval) {
949 ALOGI_IF(TRACE, "SetSwapInterval: window=%p interval=%d", window, interval);
950 DvrGraphicsContext* self = getSelf(window);
951 (void)self;
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800952 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800953 return android::NO_ERROR;
954}
955
956int DvrGraphicsContext::DequeueBuffer(ANativeWindow* window,
957 ANativeWindowBuffer** buffer,
958 int* fence_fd) {
959 ATRACE_NAME(__PRETTY_FUNCTION__);
960
961 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800962 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800963 std::lock_guard<std::mutex> autolock(self->lock_);
964
965 if (!self->current_buffer) {
966 self->current_buffer = self->buffer_queue.get()->Dequeue();
967 }
968 ATRACE_ASYNC_BEGIN("BufferDraw", self->current_buffer->buffer()->id());
969 *fence_fd = self->current_buffer->ClaimReleaseFence().Release();
970 *buffer = self->current_buffer;
971
972 ALOGI_IF(TRACE, "DvrGraphicsContext::DequeueBuffer: fence_fd=%d", *fence_fd);
973 return android::NO_ERROR;
974}
975
976int DvrGraphicsContext::QueueBuffer(ANativeWindow* window,
977 ANativeWindowBuffer* buffer, int fence_fd) {
978 ATRACE_NAME("NativeWindow::QueueBuffer");
979 ALOGI_IF(TRACE, "NativeWindow::QueueBuffer: fence_fd=%d", fence_fd);
980
981 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800982 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800983 std::lock_guard<std::mutex> autolock(self->lock_);
984
985 android::dvr::NativeBufferProducer* native_buffer =
986 static_cast<android::dvr::NativeBufferProducer*>(buffer);
987 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
988 bool do_post = true;
989 if (self->buffer_already_posted) {
990 // Check that the buffer is the one we expect, but handle it if this happens
991 // in production by allowing this buffer to post on top of the previous one.
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800992 LOG_FATAL_IF(native_buffer != self->current_buffer);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800993 if (native_buffer == self->current_buffer) {
994 do_post = false;
995 if (fence_fd >= 0)
996 close(fence_fd);
997 }
998 }
999 if (do_post) {
1000 ATRACE_ASYNC_BEGIN("BufferPost", native_buffer->buffer()->id());
1001 self->Post(native_buffer, fence_fd);
1002 }
1003 self->buffer_already_posted = false;
1004 self->current_buffer = nullptr;
1005
1006 return android::NO_ERROR;
1007}
1008
1009int DvrGraphicsContext::CancelBuffer(ANativeWindow* window,
1010 ANativeWindowBuffer* buffer,
1011 int fence_fd) {
1012 ATRACE_NAME("DvrGraphicsContext::CancelBuffer");
1013 ALOGI_IF(TRACE, "DvrGraphicsContext::CancelBuffer: fence_fd: %d", fence_fd);
1014
1015 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001016 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001017 std::lock_guard<std::mutex> autolock(self->lock_);
1018
1019 android::dvr::NativeBufferProducer* native_buffer =
1020 static_cast<android::dvr::NativeBufferProducer*>(buffer);
1021 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
1022 ATRACE_INT("CancelBuffer", native_buffer->buffer()->id());
1023 bool do_enqueue = true;
1024 if (self->buffer_already_posted) {
1025 // Check that the buffer is the one we expect, but handle it if this happens
1026 // in production by returning this buffer to the buffer queue.
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001027 LOG_FATAL_IF(native_buffer != self->current_buffer);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001028 if (native_buffer == self->current_buffer) {
1029 do_enqueue = false;
1030 }
1031 }
1032 if (do_enqueue) {
1033 self->buffer_queue.get()->Enqueue(native_buffer);
1034 }
1035 if (fence_fd >= 0)
1036 close(fence_fd);
1037 self->buffer_already_posted = false;
1038 self->current_buffer = nullptr;
1039
1040 return android::NO_ERROR;
1041}
1042
1043int DvrGraphicsContext::Query(const ANativeWindow* window, int what,
1044 int* value) {
1045 DvrGraphicsContext* self = getSelf(const_cast<ANativeWindow*>(window));
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001046 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001047 std::lock_guard<std::mutex> autolock(self->lock_);
1048
1049 switch (what) {
1050 case NATIVE_WINDOW_WIDTH:
1051 *value = self->display_surface->width();
1052 return android::NO_ERROR;
1053 case NATIVE_WINDOW_HEIGHT:
1054 *value = self->display_surface->height();
1055 return android::NO_ERROR;
1056 case NATIVE_WINDOW_FORMAT:
1057 *value = self->display_surface->format();
1058 return android::NO_ERROR;
1059 case NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS:
1060 *value = 1;
1061 return android::NO_ERROR;
1062 case NATIVE_WINDOW_CONCRETE_TYPE:
1063 *value = NATIVE_WINDOW_SURFACE;
1064 return android::NO_ERROR;
1065 case NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER:
1066 *value = 1;
1067 return android::NO_ERROR;
1068 case NATIVE_WINDOW_DEFAULT_WIDTH:
1069 *value = self->display_surface->width();
1070 return android::NO_ERROR;
1071 case NATIVE_WINDOW_DEFAULT_HEIGHT:
1072 *value = self->display_surface->height();
1073 return android::NO_ERROR;
1074 case NATIVE_WINDOW_TRANSFORM_HINT:
1075 *value = 0;
1076 return android::NO_ERROR;
1077 }
1078
1079 *value = 0;
1080 return android::BAD_VALUE;
1081}
1082
1083int DvrGraphicsContext::Perform(ANativeWindow* window, int operation, ...) {
1084 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001085 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001086 std::lock_guard<std::mutex> autolock(self->lock_);
1087
1088 va_list args;
1089 va_start(args, operation);
1090
1091 // TODO(eieio): The following operations are not used at this time. They are
1092 // included here to help document which operations may be useful and what
1093 // parameters they take.
1094 switch (operation) {
1095 case NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: {
1096 int w = va_arg(args, int);
1097 int h = va_arg(args, int);
1098 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: w=%d h=%d", w, h);
1099 return android::NO_ERROR;
1100 }
1101
1102 case NATIVE_WINDOW_SET_BUFFERS_FORMAT: {
1103 int format = va_arg(args, int);
1104 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_FORMAT: format=%d", format);
1105 return android::NO_ERROR;
1106 }
1107
1108 case NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: {
1109 int transform = va_arg(args, int);
1110 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: transform=%d",
1111 transform);
1112 return android::NO_ERROR;
1113 }
1114
1115 case NATIVE_WINDOW_SET_USAGE: {
1116 int usage = va_arg(args, int);
1117 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_USAGE: usage=%d", usage);
1118 return android::NO_ERROR;
1119 }
1120
1121 case NATIVE_WINDOW_CONNECT:
1122 case NATIVE_WINDOW_DISCONNECT:
1123 case NATIVE_WINDOW_SET_BUFFERS_GEOMETRY:
1124 case NATIVE_WINDOW_API_CONNECT:
1125 case NATIVE_WINDOW_API_DISCONNECT:
1126 // TODO(eieio): we should implement these
1127 return android::NO_ERROR;
1128
1129 case NATIVE_WINDOW_SET_BUFFER_COUNT: {
1130 int buffer_count = va_arg(args, int);
1131 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFER_COUNT: bufferCount=%d",
1132 buffer_count);
1133 return android::NO_ERROR;
1134 }
1135 case NATIVE_WINDOW_SET_BUFFERS_DATASPACE: {
1136 android_dataspace_t data_space =
1137 static_cast<android_dataspace_t>(va_arg(args, int));
1138 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DATASPACE: dataSpace=%d",
1139 data_space);
1140 return android::NO_ERROR;
1141 }
1142 case NATIVE_WINDOW_SET_SCALING_MODE: {
1143 int mode = va_arg(args, int);
1144 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_SCALING_MODE: mode=%d", mode);
1145 return android::NO_ERROR;
1146 }
1147
1148 case NATIVE_WINDOW_LOCK:
1149 case NATIVE_WINDOW_UNLOCK_AND_POST:
1150 case NATIVE_WINDOW_SET_CROP:
1151 case NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP:
1152 return android::INVALID_OPERATION;
1153 }
1154
1155 return android::NAME_NOT_FOUND;
1156}
1157
1158int DvrGraphicsContext::DequeueBuffer_DEPRECATED(ANativeWindow* window,
1159 ANativeWindowBuffer** buffer) {
1160 int fence_fd = -1;
1161 int ret = DequeueBuffer(window, buffer, &fence_fd);
1162
1163 // wait for fence
1164 if (ret == android::NO_ERROR && fence_fd != -1)
1165 close(fence_fd);
1166
1167 return ret;
1168}
1169
1170int DvrGraphicsContext::CancelBuffer_DEPRECATED(ANativeWindow* window,
1171 ANativeWindowBuffer* buffer) {
1172 return CancelBuffer(window, buffer, -1);
1173}
1174
1175int DvrGraphicsContext::QueueBuffer_DEPRECATED(ANativeWindow* window,
1176 ANativeWindowBuffer* buffer) {
1177 return QueueBuffer(window, buffer, -1);
1178}
1179
1180int DvrGraphicsContext::LockBuffer_DEPRECATED(ANativeWindow* /*window*/,
1181 ANativeWindowBuffer* /*buffer*/) {
1182 return android::NO_ERROR;
1183}
1184// End ANativeWindow implementation
1185
1186int dvrSetEdsPose(DvrGraphicsContext* graphics_context,
1187 float32x4_t render_pose_orientation,
1188 float32x4_t render_pose_translation) {
1189 ATRACE_NAME("dvrSetEdsPose");
1190 if (!graphics_context->current_buffer) {
1191 ALOGE("dvrBeginRenderFrame must be called before dvrSetEdsPose");
1192 return -EPERM;
1193 }
1194
1195 // When late-latching is enabled, the pose buffer is written by the GPU, so
1196 // we don't touch it here.
1197 float32x4_t is_late_latch = DVR_POSE_LATE_LATCH;
1198 if (render_pose_orientation[0] != is_late_latch[0]) {
1199 volatile android::dvr::DisplaySurfaceMetadata* data =
1200 graphics_context->surface_metadata;
1201 uint32_t buffer_index =
1202 graphics_context->current_buffer->surface_buffer_index();
1203 ALOGE_IF(TRACE, "write pose index %d %f %f", buffer_index,
1204 render_pose_orientation[0], render_pose_orientation[1]);
1205 data->orientation[buffer_index] = render_pose_orientation;
1206 data->translation[buffer_index] = render_pose_translation;
1207 }
1208
1209 return 0;
1210}
1211
1212int dvrBeginRenderFrameEds(DvrGraphicsContext* graphics_context,
1213 float32x4_t render_pose_orientation,
1214 float32x4_t render_pose_translation) {
1215 ATRACE_NAME("dvrBeginRenderFrameEds");
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001216 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001217 CHECK_GL();
1218 // Grab a buffer from the queue and set its pose.
1219 if (!graphics_context->current_buffer) {
1220 graphics_context->current_buffer =
1221 graphics_context->buffer_queue->Dequeue();
1222 }
1223
1224 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1225 render_pose_translation);
1226 if (ret < 0)
1227 return ret;
1228
1229 ATRACE_ASYNC_BEGIN("BufferDraw",
1230 graphics_context->current_buffer->buffer()->id());
1231
1232 {
1233 ATRACE_NAME("glEGLImageTargetTexture2DOES");
1234 // Bind the texture to the latest buffer in the queue.
1235 for (int i = 0; i < graphics_context->gl.texture_count; ++i) {
1236 glBindTexture(graphics_context->gl.texture_target_type,
1237 graphics_context->gl.texture_id[i]);
1238 glEGLImageTargetTexture2DOES(
1239 graphics_context->gl.texture_target_type,
1240 graphics_context->current_buffer->image_khr(i));
1241 }
1242 glBindTexture(graphics_context->gl.texture_target_type, 0);
1243 }
1244 CHECK_GL();
1245 return 0;
1246}
1247int dvrBeginRenderFrameEdsVk(DvrGraphicsContext* graphics_context,
1248 float32x4_t render_pose_orientation,
1249 float32x4_t render_pose_translation,
1250 VkSemaphore acquire_semaphore,
1251 VkFence acquire_fence,
1252 uint32_t* swapchain_image_index,
1253 VkImageView* swapchain_image_view) {
1254 ATRACE_NAME("dvrBeginRenderFrameEds");
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001255 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api !=
1256 DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001257
1258 // Acquire a swapchain image. This calls Dequeue() internally.
1259 VkResult result = vkAcquireNextImageKHR(
1260 graphics_context->vk.device, graphics_context->vk.swapchain, UINT64_MAX,
1261 acquire_semaphore, acquire_fence, swapchain_image_index);
1262 if (result != VK_SUCCESS)
1263 return -EINVAL;
1264
1265 // Set the pose pose.
1266 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1267 render_pose_translation);
1268 if (ret < 0)
1269 return ret;
1270 *swapchain_image_view =
1271 graphics_context->vk.swapchain_image_views[*swapchain_image_index];
1272 return 0;
1273}
1274
1275int dvrBeginRenderFrame(DvrGraphicsContext* graphics_context) {
1276 return dvrBeginRenderFrameEds(graphics_context, DVR_POSE_NO_EDS,
1277 DVR_POSE_NO_EDS);
1278}
1279int dvrBeginRenderFrameVk(DvrGraphicsContext* graphics_context,
1280 VkSemaphore acquire_semaphore, VkFence acquire_fence,
1281 uint32_t* swapchain_image_index,
1282 VkImageView* swapchain_image_view) {
1283 return dvrBeginRenderFrameEdsVk(
1284 graphics_context, DVR_POSE_NO_EDS, DVR_POSE_NO_EDS, acquire_semaphore,
1285 acquire_fence, swapchain_image_index, swapchain_image_view);
1286}
1287
1288int dvrBeginRenderFrameLateLatch(DvrGraphicsContext* graphics_context,
1289 uint32_t /*flags*/,
1290 uint32_t target_vsync_count, int num_views,
1291 const float** projection_matrices,
1292 const float** eye_from_head_matrices,
1293 const float** pose_offset_matrices,
1294 uint32_t* out_late_latch_buffer_id) {
1295 if (!graphics_context->late_latch) {
1296 return -EPERM;
1297 }
1298 if (num_views > DVR_GRAPHICS_SURFACE_MAX_VIEWS) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001299 ALOGE("dvrBeginRenderFrameLateLatch called with too many views.");
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001300 return -EINVAL;
1301 }
1302 dvrBeginRenderFrameEds(graphics_context, DVR_POSE_LATE_LATCH,
1303 DVR_POSE_LATE_LATCH);
1304 auto& ll = graphics_context->late_latch;
1305 // TODO(jbates) Need to change this shader so that it dumps the single
1306 // captured pose for both eyes into the display surface metadata buffer at
1307 // the right index.
1308 android::dvr::LateLatchInput input;
1309 memset(&input, 0, sizeof(input));
1310 for (int i = 0; i < num_views; ++i) {
1311 memcpy(input.proj_mat + i, *(projection_matrices + i), 16 * sizeof(float));
1312 memcpy(input.eye_from_head_mat + i, *(eye_from_head_matrices + i),
1313 16 * sizeof(float));
1314 memcpy(input.pose_offset + i, *(pose_offset_matrices + i),
1315 16 * sizeof(float));
1316 }
1317 input.pose_index =
1318 target_vsync_count & android::dvr::kPoseAsyncBufferIndexMask;
1319 input.render_pose_index =
1320 graphics_context->current_buffer->surface_buffer_index();
1321 ll->AddLateLatch(input);
1322 *out_late_latch_buffer_id = ll->output_buffer_id();
1323 return 0;
1324}
1325
1326extern "C" int dvrGraphicsWaitNextFrame(
1327 DvrGraphicsContext* graphics_context, int64_t start_delay_ns,
1328 DvrFrameSchedule* out_next_frame_schedule) {
1329 start_delay_ns = std::max(start_delay_ns, static_cast<int64_t>(0));
1330
1331 // We only do one-shot timers:
1332 int64_t wake_time_ns = 0;
1333
1334 uint32_t current_frame_vsync;
1335 int64_t current_frame_scheduled_finish_ns;
1336 int64_t vsync_period_ns;
1337
1338 int fetch_schedule_result = graphics_context->vsync_client->GetSchedInfo(
1339 &vsync_period_ns, &current_frame_scheduled_finish_ns,
1340 &current_frame_vsync);
1341 if (fetch_schedule_result == 0) {
1342 wake_time_ns = current_frame_scheduled_finish_ns + start_delay_ns;
1343 // If the last wakeup time is still in the future, use it instead to avoid
1344 // major schedule jumps when applications call WaitNextFrame with
1345 // aggressive offsets.
1346 int64_t now = android::dvr::GetSystemClockNs();
1347 if (android::dvr::TimestampGT(wake_time_ns - vsync_period_ns, now)) {
1348 wake_time_ns -= vsync_period_ns;
1349 --current_frame_vsync;
1350 }
1351 // If the next wakeup time is in the past, add a vsync period to keep the
1352 // application on schedule.
1353 if (android::dvr::TimestampLT(wake_time_ns, now)) {
1354 wake_time_ns += vsync_period_ns;
1355 ++current_frame_vsync;
1356 }
1357 } else {
1358 ALOGE("Error getting frame schedule because: %s",
1359 strerror(-fetch_schedule_result));
1360 // Sleep for a vsync period to avoid cascading failure.
1361 wake_time_ns = android::dvr::GetSystemClockNs() +
1362 graphics_context->display_metrics.vsync_period_ns;
1363 }
1364
1365 // Adjust nsec to [0..999,999,999].
1366 struct itimerspec wake_time;
1367 wake_time.it_interval.tv_sec = 0;
1368 wake_time.it_interval.tv_nsec = 0;
1369 wake_time.it_value = android::dvr::NsToTimespec(wake_time_ns);
1370 bool sleep_result =
1371 timerfd_settime(graphics_context->timerfd.Get(), TFD_TIMER_ABSTIME,
1372 &wake_time, nullptr) == 0;
1373 if (sleep_result) {
1374 ATRACE_NAME("sleep");
1375 uint64_t expirations = 0;
1376 sleep_result = read(graphics_context->timerfd.Get(), &expirations,
1377 sizeof(uint64_t)) == sizeof(uint64_t);
1378 if (!sleep_result) {
1379 ALOGE("Error: timerfd read failed");
1380 }
1381 } else {
1382 ALOGE("Error: timerfd_settime failed because: %s", strerror(errno));
1383 }
1384
1385 auto& frame_history = graphics_context->frame_history;
1386 frame_history.CheckForFinishedFrames();
1387 if (fetch_schedule_result == 0) {
1388 uint32_t next_frame_vsync =
1389 current_frame_vsync +
1390 frame_history.PredictNextFrameVsyncInterval(vsync_period_ns);
1391 int64_t next_frame_scheduled_finish =
1392 (wake_time_ns - start_delay_ns) + vsync_period_ns;
1393 frame_history.OnFrameStart(next_frame_vsync, next_frame_scheduled_finish);
1394 if (out_next_frame_schedule) {
1395 out_next_frame_schedule->vsync_count = next_frame_vsync;
1396 out_next_frame_schedule->scheduled_frame_finish_ns =
1397 next_frame_scheduled_finish;
1398 }
1399 } else {
1400 frame_history.OnFrameStart(UINT32_MAX, -1);
1401 }
1402
1403 return (fetch_schedule_result == 0 && sleep_result) ? 0 : -1;
1404}
1405
1406extern "C" void dvrGraphicsPostEarly(DvrGraphicsContext* graphics_context) {
1407 ATRACE_NAME("dvrGraphicsPostEarly");
1408 ALOGI_IF(TRACE, "dvrGraphicsPostEarly");
1409
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001410 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001411
1412 // Note that this function can be called before or after
1413 // dvrBeginRenderFrame.
1414 if (!graphics_context->buffer_already_posted) {
1415 graphics_context->buffer_already_posted = true;
1416
1417 if (!graphics_context->current_buffer) {
1418 graphics_context->current_buffer =
1419 graphics_context->buffer_queue->Dequeue();
1420 }
1421
1422 auto buffer = graphics_context->current_buffer->buffer().get();
1423 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1424 int result = buffer->Post<uint64_t>(LocalHandle(), 0);
1425 if (result < 0)
1426 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1427 }
1428}
1429
1430int dvrPresent(DvrGraphicsContext* graphics_context) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001431 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001432
1433 std::array<char, 128> buf;
1434 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1435 graphics_context->frame_history.GetCurrentFrameVsync());
1436 ATRACE_NAME(buf.data());
1437
1438 if (!graphics_context->current_buffer) {
1439 ALOGE("Error: dvrPresent called without dvrBeginRenderFrame");
1440 return -EPERM;
1441 }
1442
1443 LocalHandle fence_fd =
1444 android::dvr::CreateGLSyncAndFlush(graphics_context->gl.egl_display);
1445
1446 ALOGI_IF(TRACE, "PostBuffer: buffer_id=%d, fence_fd=%d",
1447 graphics_context->current_buffer->buffer()->id(), fence_fd.Get());
1448 ALOGW_IF(!graphics_context->display_surface->visible(),
1449 "PostBuffer: Posting buffer on invisible surface!!!");
1450
1451 auto buffer = graphics_context->current_buffer->buffer().get();
1452 ATRACE_ASYNC_END("BufferDraw", buffer->id());
1453 if (!graphics_context->buffer_already_posted) {
1454 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1455 int result = buffer->Post<uint64_t>(fence_fd, 0);
1456 if (result < 0)
1457 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1458 }
1459
1460 graphics_context->frame_history.OnFrameSubmit(std::move(fence_fd));
1461 graphics_context->buffer_already_posted = false;
1462 graphics_context->current_buffer = nullptr;
1463 return 0;
1464}
1465
1466int dvrPresentVk(DvrGraphicsContext* graphics_context,
1467 VkSemaphore submit_semaphore, uint32_t swapchain_image_index) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001468 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api !=
1469 DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001470
1471 std::array<char, 128> buf;
1472 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1473 graphics_context->frame_history.GetCurrentFrameVsync());
1474 ATRACE_NAME(buf.data());
1475
1476 if (!graphics_context->current_buffer) {
1477 ALOGE("Error: dvrPresentVk called without dvrBeginRenderFrameVk");
1478 return -EPERM;
1479 }
1480
1481 // Present the specified image. Internally, this gets a fence from the
1482 // Vulkan driver and passes it to DvrGraphicsContext::Post(),
1483 // which in turn passes it to buffer->Post() and adds it to frame_history.
1484 VkPresentInfoKHR present_info = {};
1485 present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
1486 present_info.swapchainCount = 1;
1487 present_info.pSwapchains = &graphics_context->vk.swapchain;
1488 present_info.pImageIndices = &swapchain_image_index;
1489 present_info.waitSemaphoreCount =
1490 (submit_semaphore != VK_NULL_HANDLE) ? 1 : 0;
1491 present_info.pWaitSemaphores = &submit_semaphore;
1492 VkResult result =
1493 vkQueuePresentKHR(graphics_context->vk.present_queue, &present_info);
1494 if (result != VK_SUCCESS) {
1495 return -EINVAL;
1496 }
1497
1498 return 0;
1499}
1500
1501extern "C" int dvrGetFrameScheduleResults(DvrGraphicsContext* context,
1502 DvrFrameScheduleResult* results,
1503 int in_result_count) {
1504 if (!context || !results)
1505 return -EINVAL;
1506
1507 return context->frame_history.GetPreviousFrameResults(results,
1508 in_result_count);
1509}
1510
1511extern "C" void dvrGraphicsSurfaceSetVisible(
1512 DvrGraphicsContext* graphics_context, int visible) {
1513 graphics_context->display_surface->SetVisible(visible);
1514}
1515
1516extern "C" int dvrGraphicsSurfaceGetVisible(
1517 DvrGraphicsContext* graphics_context) {
1518 return graphics_context->display_surface->visible() ? 1 : 0;
1519}
1520
1521extern "C" void dvrGraphicsSurfaceSetZOrder(
1522 DvrGraphicsContext* graphics_context, int z_order) {
1523 graphics_context->display_surface->SetZOrder(z_order);
1524}
1525
1526extern "C" int dvrGraphicsSurfaceGetZOrder(
1527 DvrGraphicsContext* graphics_context) {
1528 return graphics_context->display_surface->z_order();
1529}
1530