blob: d0557a98ebf9f2b588d0d5aa8969c0b899c2415a [file] [log] [blame]
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001#include <dvr/graphics.h>
2
Alex Vakulenko4fe60582017-02-02 11:35:59 -08003#include <inttypes.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08004#include <sys/timerfd.h>
5#include <array>
6#include <vector>
7
Alex Vakulenko4fe60582017-02-02 11:35:59 -08008#include <log/log.h>
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08009#include <utils/Trace.h>
10
11#ifndef VK_USE_PLATFORM_ANDROID_KHR
12#define VK_USE_PLATFORM_ANDROID_KHR 1
13#endif
14#include <vulkan/vulkan.h>
15
16#include <pdx/file_handle.h>
17#include <private/dvr/clock_ns.h>
18#include <private/dvr/debug.h>
19#include <private/dvr/display_types.h>
20#include <private/dvr/frame_history.h>
21#include <private/dvr/gl_fenced_flush.h>
22#include <private/dvr/graphics/vr_gl_extensions.h>
23#include <private/dvr/graphics_private.h>
24#include <private/dvr/late_latch.h>
25#include <private/dvr/native_buffer_queue.h>
26#include <private/dvr/sensor_constants.h>
27#include <private/dvr/video_mesh_surface_client.h>
28#include <private/dvr/vsync_client.h>
29
30#include <android/native_window.h>
31
32#ifndef EGL_CONTEXT_MAJOR_VERSION
33#define EGL_CONTEXT_MAJOR_VERSION 0x3098
34#define EGL_CONTEXT_MINOR_VERSION 0x30FB
35#endif
36
37using android::pdx::LocalHandle;
38using android::pdx::LocalChannelHandle;
39
40using android::dvr::DisplaySurfaceAttributeEnum;
41using android::dvr::DisplaySurfaceAttributeValue;
42
43namespace {
44
45constexpr int kDefaultDisplaySurfaceUsage =
46 GRALLOC_USAGE_HW_RENDER | GRALLOC_USAGE_HW_TEXTURE;
47constexpr int kDefaultDisplaySurfaceFormat = HAL_PIXEL_FORMAT_RGBA_8888;
48// TODO(alexst): revisit this count when HW encode is available for casting.
49constexpr int kDefaultBufferCount = 4;
50
51// Use with dvrBeginRenderFrame to disable EDS for the current frame.
52constexpr float32x4_t DVR_POSE_NO_EDS = {10.0f, 0.0f, 0.0f, 0.0f};
53
54// Use with dvrBeginRenderFrame to indicate that GPU late-latching is being used
55// for determining the render pose.
56constexpr float32x4_t DVR_POSE_LATE_LATCH = {20.0f, 0.0f, 0.0f, 0.0f};
57
58#ifndef NDEBUG
59
60static const char* GetGlCallbackType(GLenum type) {
61 switch (type) {
62 case GL_DEBUG_TYPE_ERROR_KHR:
63 return "ERROR";
64 case GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_KHR:
65 return "DEPRECATED_BEHAVIOR";
66 case GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_KHR:
67 return "UNDEFINED_BEHAVIOR";
68 case GL_DEBUG_TYPE_PORTABILITY_KHR:
69 return "PORTABILITY";
70 case GL_DEBUG_TYPE_PERFORMANCE_KHR:
71 return "PERFORMANCE";
72 case GL_DEBUG_TYPE_OTHER_KHR:
73 return "OTHER";
74 default:
75 return "UNKNOWN";
76 }
77}
78
79static void on_gl_error(GLenum /*source*/, GLenum type, GLuint /*id*/,
80 GLenum severity, GLsizei /*length*/,
81 const char* message, const void* /*user_param*/) {
82 char msg[400];
83 snprintf(msg, sizeof(msg), "[" __FILE__ ":%u] GL %s: %s", __LINE__,
84 GetGlCallbackType(type), message);
85 switch (severity) {
86 case GL_DEBUG_SEVERITY_LOW_KHR:
87 ALOGI("%s", msg);
88 break;
89 case GL_DEBUG_SEVERITY_MEDIUM_KHR:
90 ALOGW("%s", msg);
91 break;
92 case GL_DEBUG_SEVERITY_HIGH_KHR:
93 ALOGE("%s", msg);
94 break;
95 }
96 fprintf(stderr, "%s\n", msg);
97}
98
99#endif
100
101int DvrToHalSurfaceFormat(int dvr_surface_format) {
102 switch (dvr_surface_format) {
103 case DVR_SURFACE_FORMAT_RGBA_8888:
104 return HAL_PIXEL_FORMAT_RGBA_8888;
105 case DVR_SURFACE_FORMAT_RGB_565:
106 return HAL_PIXEL_FORMAT_RGB_565;
107 default:
108 return HAL_PIXEL_FORMAT_RGBA_8888;
109 }
110}
111
112int SelectEGLConfig(EGLDisplay dpy, EGLint* attr, unsigned format,
113 EGLConfig* config) {
114 std::array<EGLint, 4> desired_rgba;
115 switch (format) {
116 case HAL_PIXEL_FORMAT_RGBA_8888:
117 case HAL_PIXEL_FORMAT_BGRA_8888:
118 desired_rgba = {{8, 8, 8, 8}};
119 break;
120 case HAL_PIXEL_FORMAT_RGB_565:
121 desired_rgba = {{5, 6, 5, 0}};
122 break;
123 default:
124 ALOGE("Unsupported framebuffer pixel format %d", format);
125 return -1;
126 }
127
128 EGLint max_configs = 0;
129 if (eglGetConfigs(dpy, NULL, 0, &max_configs) == EGL_FALSE) {
130 ALOGE("No EGL configurations available?!");
131 return -1;
132 }
133
134 std::vector<EGLConfig> configs(max_configs);
135
136 EGLint num_configs;
137 if (eglChooseConfig(dpy, attr, &configs[0], max_configs, &num_configs) ==
138 EGL_FALSE) {
139 ALOGE("eglChooseConfig failed");
140 return -1;
141 }
142
143 std::array<EGLint, 4> config_rgba;
144 for (int i = 0; i < num_configs; i++) {
145 eglGetConfigAttrib(dpy, configs[i], EGL_RED_SIZE, &config_rgba[0]);
146 eglGetConfigAttrib(dpy, configs[i], EGL_GREEN_SIZE, &config_rgba[1]);
147 eglGetConfigAttrib(dpy, configs[i], EGL_BLUE_SIZE, &config_rgba[2]);
148 eglGetConfigAttrib(dpy, configs[i], EGL_ALPHA_SIZE, &config_rgba[3]);
149 if (config_rgba == desired_rgba) {
150 *config = configs[i];
151 return 0;
152 }
153 }
154
155 ALOGE("Cannot find a matching EGL config");
156 return -1;
157}
158
159void DestroyEglContext(EGLDisplay egl_display, EGLContext* egl_context) {
160 if (*egl_context != EGL_NO_CONTEXT) {
161 eglDestroyContext(egl_display, *egl_context);
162 *egl_context = EGL_NO_CONTEXT;
163 }
164}
165
166// Perform internal initialization. A GL context must be bound to the current
167// thread.
168// @param internally_created_context True if we created and own the GL context,
169// false if it was supplied by the application.
170// @return 0 if init was successful, or a negative error code on failure.
171int InitGl(bool internally_created_context) {
172 EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
173 if (egl_display == EGL_NO_DISPLAY) {
174 ALOGE("eglGetDisplay failed");
175 return -EINVAL;
176 }
177
178 EGLContext egl_context = eglGetCurrentContext();
179 if (egl_context == EGL_NO_CONTEXT) {
180 ALOGE("No GL context bound");
181 return -EINVAL;
182 }
183
184 glGetError(); // Clear the error state
185 GLint major_version, minor_version;
186 glGetIntegerv(GL_MAJOR_VERSION, &major_version);
187 glGetIntegerv(GL_MINOR_VERSION, &minor_version);
188 if (glGetError() != GL_NO_ERROR) {
189 // GL_MAJOR_VERSION and GL_MINOR_VERSION were added in GLES 3. If we get an
190 // error querying them it's almost certainly because it's GLES 1 or 2.
191 ALOGE("Error getting GL version. Must be GLES 3.2 or greater.");
192 return -EINVAL;
193 }
194
195 if (major_version < 3 || (major_version == 3 && minor_version < 2)) {
196 ALOGE("Invalid GL version: %d.%d. Must be GLES 3.2 or greater.",
197 major_version, minor_version);
198 return -EINVAL;
199 }
200
201#ifndef NDEBUG
202 if (internally_created_context) {
203 // Enable verbose GL debug output.
204 glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS_KHR);
205 glDebugMessageCallbackKHR(on_gl_error, NULL);
206 GLuint unused_ids = 0;
207 glDebugMessageControlKHR(GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0,
208 &unused_ids, GL_TRUE);
209 }
210#else
211 (void)internally_created_context;
212#endif
213
214 load_gl_extensions();
215 return 0;
216}
217
218int CreateEglContext(EGLDisplay egl_display, DvrSurfaceParameter* parameters,
219 EGLContext* egl_context) {
220 *egl_context = EGL_NO_CONTEXT;
221
222 EGLint major, minor;
223 if (!eglInitialize(egl_display, &major, &minor)) {
224 ALOGE("Failed to initialize EGL");
225 return -ENXIO;
226 }
227
228 ALOGI("EGL version: %d.%d\n", major, minor);
229
230 int buffer_format = kDefaultDisplaySurfaceFormat;
231
232 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
233 switch (p->key) {
234 case DVR_SURFACE_PARAMETER_FORMAT_IN:
235 buffer_format = DvrToHalSurfaceFormat(p->value);
236 break;
237 }
238 }
239
240 EGLint config_attrs[] = {EGL_SURFACE_TYPE, EGL_WINDOW_BIT,
241 EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_NONE};
242 EGLConfig config = {0};
243
244 int ret = SelectEGLConfig(egl_display, config_attrs, buffer_format, &config);
245 if (ret < 0)
246 return ret;
247
248 ALOGI("EGL SelectEGLConfig ok.\n");
249
250 EGLint context_attrs[] = {EGL_CONTEXT_MAJOR_VERSION,
251 3,
252 EGL_CONTEXT_MINOR_VERSION,
253 2,
254#ifndef NDEBUG
255 EGL_CONTEXT_FLAGS_KHR,
256 EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR,
257#endif
258 EGL_NONE};
259
260 *egl_context =
261 eglCreateContext(egl_display, config, EGL_NO_CONTEXT, context_attrs);
262 if (*egl_context == EGL_NO_CONTEXT) {
263 ALOGE("eglCreateContext failed");
264 return -ENXIO;
265 }
266
267 ALOGI("eglCreateContext ok.\n");
268
269 if (!eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE,
270 *egl_context)) {
271 ALOGE("eglMakeCurrent failed");
272 DestroyEglContext(egl_display, egl_context);
273 return -EINVAL;
274 }
275
276 return 0;
277}
278
279} // anonymous namespace
280
281// TODO(hendrikw): When we remove the calls to this in native_window.cpp, move
282// this back into the anonymous namespace
283std::shared_ptr<android::dvr::DisplaySurfaceClient> CreateDisplaySurfaceClient(
284 struct DvrSurfaceParameter* parameters,
285 /*out*/ android::dvr::SystemDisplayMetrics* metrics) {
286 auto client = android::dvr::DisplayClient::Create();
287 if (!client) {
288 ALOGE("Failed to create display client!");
289 return nullptr;
290 }
291
292 const int ret = client->GetDisplayMetrics(metrics);
293 if (ret < 0) {
294 ALOGE("Failed to get display metrics: %s", strerror(-ret));
295 return nullptr;
296 }
297
298 // Parameters that may be modified by the parameters array. Some of these are
299 // here for future expansion.
300 int request_width = -1;
301 int request_height = -1;
302 int request_flags = 0;
303 bool disable_distortion = false;
304 bool disable_stabilization = false;
305 bool disable_cac = false;
306 bool request_visible = true;
307 bool vertical_flip = false;
308 int request_z_order = 0;
309 bool request_exclude_from_blur = false;
310 bool request_blur_behind = true;
311 int request_format = kDefaultDisplaySurfaceFormat;
312 int request_usage = kDefaultDisplaySurfaceUsage;
313 int geometry_type = DVR_SURFACE_GEOMETRY_SINGLE;
314
315 // Handle parameter inputs.
316 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
317 switch (p->key) {
318 case DVR_SURFACE_PARAMETER_WIDTH_IN:
319 request_width = p->value;
320 break;
321 case DVR_SURFACE_PARAMETER_HEIGHT_IN:
322 request_height = p->value;
323 break;
324 case DVR_SURFACE_PARAMETER_DISABLE_DISTORTION_IN:
325 disable_distortion = !!p->value;
326 break;
327 case DVR_SURFACE_PARAMETER_DISABLE_STABILIZATION_IN:
328 disable_stabilization = !!p->value;
329 break;
330 case DVR_SURFACE_PARAMETER_DISABLE_CAC_IN:
331 disable_cac = !!p->value;
332 break;
333 case DVR_SURFACE_PARAMETER_VISIBLE_IN:
334 request_visible = !!p->value;
335 break;
336 case DVR_SURFACE_PARAMETER_Z_ORDER_IN:
337 request_z_order = p->value;
338 break;
339 case DVR_SURFACE_PARAMETER_EXCLUDE_FROM_BLUR_IN:
340 request_exclude_from_blur = !!p->value;
341 break;
342 case DVR_SURFACE_PARAMETER_BLUR_BEHIND_IN:
343 request_blur_behind = !!p->value;
344 break;
345 case DVR_SURFACE_PARAMETER_VERTICAL_FLIP_IN:
346 vertical_flip = !!p->value;
347 break;
348 case DVR_SURFACE_PARAMETER_GEOMETRY_IN:
349 geometry_type = p->value;
350 break;
351 case DVR_SURFACE_PARAMETER_FORMAT_IN:
352 request_format = DvrToHalSurfaceFormat(p->value);
353 break;
354 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
355 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
356 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
357 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
358 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
359 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
360 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
361 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
362 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
363 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
364 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
365 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
366 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
367 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
368 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
369 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
370 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
371 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
372 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
373 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
374 break;
375 default:
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800376 ALOGE("Invalid display surface parameter: key=%d value=%" PRId64,
377 p->key, p->value);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800378 return nullptr;
379 }
380 }
381
382 request_flags |= disable_distortion
383 ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_DISTORTION
384 : 0;
385 request_flags |=
386 disable_stabilization ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_EDS : 0;
387 request_flags |=
388 disable_cac ? DVR_DISPLAY_SURFACE_FLAGS_DISABLE_SYSTEM_CAC : 0;
389 request_flags |= vertical_flip ? DVR_DISPLAY_SURFACE_FLAGS_VERTICAL_FLIP : 0;
390 request_flags |= (geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2)
391 ? DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2
392 : 0;
393
394 if (request_width == -1) {
395 request_width = disable_distortion ? metrics->display_native_width
396 : metrics->distorted_width;
397 if (!disable_distortion &&
398 geometry_type == DVR_SURFACE_GEOMETRY_SEPARATE_2) {
399 // The metrics always return the single wide buffer resolution.
400 // When split between eyes, we need to halve the width of the surface.
401 request_width /= 2;
402 }
403 }
404 if (request_height == -1) {
405 request_height = disable_distortion ? metrics->display_native_height
406 : metrics->distorted_height;
407 }
408
409 std::shared_ptr<android::dvr::DisplaySurfaceClient> surface =
410 client->CreateDisplaySurface(request_width, request_height,
411 request_format, request_usage,
412 request_flags);
413 surface->SetAttributes(
414 {{DisplaySurfaceAttributeEnum::Visible,
415 DisplaySurfaceAttributeValue{request_visible}},
416 {DisplaySurfaceAttributeEnum::ZOrder,
417 DisplaySurfaceAttributeValue{request_z_order}},
418 {DisplaySurfaceAttributeEnum::ExcludeFromBlur,
419 DisplaySurfaceAttributeValue{request_exclude_from_blur}},
420 {DisplaySurfaceAttributeEnum::BlurBehind,
421 DisplaySurfaceAttributeValue{request_blur_behind}}});
422
423 // Handle parameter output requests down here so we can return surface info.
424 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
425 switch (p->key) {
426 case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT:
427 *static_cast<int32_t*>(p->value_out) = metrics->display_native_width;
428 break;
429 case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT:
430 *static_cast<int32_t*>(p->value_out) = metrics->display_native_height;
431 break;
432 case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT:
433 *static_cast<int32_t*>(p->value_out) = surface->width();
434 break;
435 case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT:
436 *static_cast<int32_t*>(p->value_out) = surface->height();
437 break;
438 case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT:
439 *static_cast<float*>(p->value_out) = metrics->inter_lens_distance_m;
440 break;
441 case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT:
442 for (int i = 0; i < 4; ++i) {
443 float* float_values_out = static_cast<float*>(p->value_out);
444 float_values_out[i] = metrics->left_fov_lrbt[i];
445 }
446 break;
447 case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT:
448 for (int i = 0; i < 4; ++i) {
449 float* float_values_out = static_cast<float*>(p->value_out);
450 float_values_out[i] = metrics->right_fov_lrbt[i];
451 }
452 break;
453 case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT:
454 *static_cast<uint64_t*>(p->value_out) = metrics->vsync_period_ns;
455 break;
456 default:
457 break;
458 }
459 }
460
461 return surface;
462}
463
464extern "C" int dvrGetNativeDisplayDimensions(int* native_width,
465 int* native_height) {
466 int error = 0;
467 auto client = android::dvr::DisplayClient::Create(&error);
468 if (!client) {
469 ALOGE("Failed to create display client!");
470 return error;
471 }
472
473 android::dvr::SystemDisplayMetrics metrics;
474 const int ret = client->GetDisplayMetrics(&metrics);
475
476 if (ret != 0) {
477 ALOGE("Failed to get display metrics!");
478 return ret;
479 }
480
481 *native_width = static_cast<int>(metrics.display_native_width);
482 *native_height = static_cast<int>(metrics.display_native_height);
483 return 0;
484}
485
486extern "C" int dvrGetDisplaySurfaceInfo(EGLNativeWindowType win, int* width,
487 int* height, int* format) {
488 ANativeWindow* nwin = reinterpret_cast<ANativeWindow*>(win);
489 int w, h, f;
490
491 nwin->query(nwin, NATIVE_WINDOW_DEFAULT_WIDTH, &w);
492 nwin->query(nwin, NATIVE_WINDOW_DEFAULT_HEIGHT, &h);
493 nwin->query(nwin, NATIVE_WINDOW_FORMAT, &f);
494
495 if (width)
496 *width = w;
497 if (height)
498 *height = h;
499 if (format)
500 *format = f;
501
502 return 0;
503}
504
505struct DvrGraphicsContext : public android::ANativeObjectBase<
506 ANativeWindow, DvrGraphicsContext,
507 android::LightRefBase<DvrGraphicsContext>> {
508 public:
509 DvrGraphicsContext();
510 ~DvrGraphicsContext();
511
512 int graphics_api; // DVR_SURFACE_GRAPHICS_API_*
513
514 // GL specific members.
515 struct {
516 EGLDisplay egl_display;
517 EGLContext egl_context;
518 bool owns_egl_context;
519 GLuint texture_id[kSurfaceViewMaxCount];
520 int texture_count;
521 GLenum texture_target_type;
522 } gl;
523
524 // VK specific members
525 struct {
526 // These objects are passed in by the application, and are NOT owned
527 // by the context.
528 VkInstance instance;
529 VkPhysicalDevice physical_device;
530 VkDevice device;
531 VkQueue present_queue;
532 uint32_t present_queue_family;
533 const VkAllocationCallbacks* allocation_callbacks;
534 // These objects are owned by the context.
535 ANativeWindow* window;
536 VkSurfaceKHR surface;
537 VkSwapchainKHR swapchain;
538 std::vector<VkImage> swapchain_images;
539 std::vector<VkImageView> swapchain_image_views;
540 } vk;
541
542 // Display surface, metrics, and buffer management members.
543 std::shared_ptr<android::dvr::DisplaySurfaceClient> display_surface;
544 android::dvr::SystemDisplayMetrics display_metrics;
545 std::unique_ptr<android::dvr::NativeBufferQueue> buffer_queue;
546 android::dvr::NativeBufferProducer* current_buffer;
547 bool buffer_already_posted;
548
549 // Synchronization members.
550 std::unique_ptr<android::dvr::VSyncClient> vsync_client;
551 LocalHandle timerfd;
552
553 android::dvr::FrameHistory frame_history;
554
555 // Mapped surface metadata (ie: for pose delivery with presented frames).
556 volatile android::dvr::DisplaySurfaceMetadata* surface_metadata;
557
558 // LateLatch support.
559 std::unique_ptr<android::dvr::LateLatch> late_latch;
560
561 // Video mesh support.
562 std::vector<std::shared_ptr<android::dvr::VideoMeshSurfaceClient>>
563 video_mesh_surfaces;
564
565 private:
566 // ANativeWindow function implementations
567 std::mutex lock_;
568 int Post(android::dvr::NativeBufferProducer* buffer, int fence_fd);
569 static int SetSwapInterval(ANativeWindow* window, int interval);
570 static int DequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer,
571 int* fence_fd);
572 static int QueueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
573 int fence_fd);
574 static int CancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer,
575 int fence_fd);
576 static int Query(const ANativeWindow* window, int what, int* value);
577 static int Perform(ANativeWindow* window, int operation, ...);
578 static int DequeueBuffer_DEPRECATED(ANativeWindow* window,
579 ANativeWindowBuffer** buffer);
580 static int CancelBuffer_DEPRECATED(ANativeWindow* window,
581 ANativeWindowBuffer* buffer);
582 static int QueueBuffer_DEPRECATED(ANativeWindow* window,
583 ANativeWindowBuffer* buffer);
584 static int LockBuffer_DEPRECATED(ANativeWindow* window,
585 ANativeWindowBuffer* buffer);
586
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800587 DvrGraphicsContext(const DvrGraphicsContext&) = delete;
588 void operator=(const DvrGraphicsContext&) = delete;
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800589};
590
591DvrGraphicsContext::DvrGraphicsContext()
592 : graphics_api(DVR_GRAPHICS_API_GLES),
593 gl{},
594 vk{},
595 current_buffer(nullptr),
596 buffer_already_posted(false),
597 surface_metadata(nullptr) {
598 gl.egl_display = EGL_NO_DISPLAY;
599 gl.egl_context = EGL_NO_CONTEXT;
600 gl.owns_egl_context = true;
601 gl.texture_target_type = GL_TEXTURE_2D;
602
603 ANativeWindow::setSwapInterval = SetSwapInterval;
604 ANativeWindow::dequeueBuffer = DequeueBuffer;
605 ANativeWindow::cancelBuffer = CancelBuffer;
606 ANativeWindow::queueBuffer = QueueBuffer;
607 ANativeWindow::query = Query;
608 ANativeWindow::perform = Perform;
609
610 ANativeWindow::dequeueBuffer_DEPRECATED = DequeueBuffer_DEPRECATED;
611 ANativeWindow::cancelBuffer_DEPRECATED = CancelBuffer_DEPRECATED;
612 ANativeWindow::lockBuffer_DEPRECATED = LockBuffer_DEPRECATED;
613 ANativeWindow::queueBuffer_DEPRECATED = QueueBuffer_DEPRECATED;
614}
615
616DvrGraphicsContext::~DvrGraphicsContext() {
617 if (graphics_api == DVR_GRAPHICS_API_GLES) {
618 glDeleteTextures(gl.texture_count, gl.texture_id);
619 if (gl.owns_egl_context)
620 DestroyEglContext(gl.egl_display, &gl.egl_context);
621 } else if (graphics_api == DVR_GRAPHICS_API_VULKAN) {
622 if (vk.swapchain != VK_NULL_HANDLE) {
623 for (auto view : vk.swapchain_image_views) {
624 vkDestroyImageView(vk.device, view, vk.allocation_callbacks);
625 }
626 vkDestroySwapchainKHR(vk.device, vk.swapchain, vk.allocation_callbacks);
627 vkDestroySurfaceKHR(vk.instance, vk.surface, vk.allocation_callbacks);
628 delete vk.window;
629 }
630 }
631}
632
633int dvrGraphicsContextCreate(struct DvrSurfaceParameter* parameters,
634 DvrGraphicsContext** return_graphics_context) {
635 std::unique_ptr<DvrGraphicsContext> context(new DvrGraphicsContext);
636
637 // See whether we're using GL or Vulkan
638 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
639 switch (p->key) {
640 case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN:
641 context->graphics_api = p->value;
642 break;
643 }
644 }
645
646 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
647 context->gl.egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
648 if (context->gl.egl_display == EGL_NO_DISPLAY) {
649 ALOGE("eglGetDisplay failed");
650 return -ENXIO;
651 }
652
653 // See if we should create a GL context
654 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
655 switch (p->key) {
656 case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN:
657 context->gl.owns_egl_context = p->value != 0;
658 break;
659 }
660 }
661
662 if (context->gl.owns_egl_context) {
663 int ret = CreateEglContext(context->gl.egl_display, parameters,
664 &context->gl.egl_context);
665 if (ret < 0)
666 return ret;
667 } else {
668 context->gl.egl_context = eglGetCurrentContext();
669 }
670
671 int ret = InitGl(context->gl.owns_egl_context);
672 if (ret < 0)
673 return ret;
674 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
675 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
676 switch (p->key) {
677 case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN:
678 context->vk.instance = reinterpret_cast<VkInstance>(p->value);
679 break;
680 case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN:
681 context->vk.physical_device =
682 reinterpret_cast<VkPhysicalDevice>(p->value);
683 break;
684 case DVR_SURFACE_PARAMETER_VK_DEVICE_IN:
685 context->vk.device = reinterpret_cast<VkDevice>(p->value);
686 break;
687 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN:
688 context->vk.present_queue = reinterpret_cast<VkQueue>(p->value);
689 break;
690 case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN:
691 context->vk.present_queue_family = static_cast<uint32_t>(p->value);
692 break;
693 }
694 }
695 } else {
696 ALOGE("Error: invalid graphics API type");
697 return -EINVAL;
698 }
699
700 context->display_surface =
701 CreateDisplaySurfaceClient(parameters, &context->display_metrics);
702 if (!context->display_surface) {
703 ALOGE("Error: failed to create display surface client");
704 return -ECOMM;
705 }
706
707 context->buffer_queue.reset(new android::dvr::NativeBufferQueue(
708 context->gl.egl_display, context->display_surface, kDefaultBufferCount));
709
710 // The way the call sequence works we need 1 more than the buffer queue
711 // capacity to store data for all pending frames
712 context->frame_history.Reset(context->buffer_queue->GetQueueCapacity() + 1);
713
714 context->vsync_client = android::dvr::VSyncClient::Create();
715 if (!context->vsync_client) {
716 ALOGE("Error: failed to create vsync client");
717 return -ECOMM;
718 }
719
720 context->timerfd.Reset(timerfd_create(CLOCK_MONOTONIC, 0));
721 if (!context->timerfd) {
722 ALOGE("Error: timerfd_create failed because: %s", strerror(errno));
723 return -EPERM;
724 }
725
726 context->surface_metadata = context->display_surface->GetMetadataBufferPtr();
727 if (!context->surface_metadata) {
728 ALOGE("Error: surface metadata allocation failed");
729 return -ENOMEM;
730 }
731
732 ALOGI("buffer: %d x %d\n", context->display_surface->width(),
733 context->display_surface->height());
734
735 if (context->graphics_api == DVR_GRAPHICS_API_GLES) {
736 context->gl.texture_count = (context->display_surface->flags() &
737 DVR_DISPLAY_SURFACE_FLAGS_GEOMETRY_SEPARATE_2)
738 ? 2
739 : 1;
740
741 // Create the GL textures.
742 glGenTextures(context->gl.texture_count, context->gl.texture_id);
743
744 // We must make sure that we have at least one buffer allocated at this time
745 // so that anyone who tries to bind an FBO to context->texture_id
746 // will not get an incomplete buffer.
747 context->current_buffer = context->buffer_queue->Dequeue();
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800748 LOG_ALWAYS_FATAL_IF(context->gl.texture_count !=
749 context->current_buffer->buffer()->slice_count());
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800750 for (int i = 0; i < context->gl.texture_count; ++i) {
751 glBindTexture(context->gl.texture_target_type, context->gl.texture_id[i]);
752 glEGLImageTargetTexture2DOES(context->gl.texture_target_type,
753 context->current_buffer->image_khr(i));
754 }
755 glBindTexture(context->gl.texture_target_type, 0);
756 CHECK_GL();
757
758 bool is_late_latch = false;
759
760 // Pass back the texture target type and id.
761 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
762 switch (p->key) {
763 case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN:
764 is_late_latch = !!p->value;
765 break;
766 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT:
767 *static_cast<GLenum*>(p->value_out) = context->gl.texture_target_type;
768 break;
769 case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT:
770 for (int i = 0; i < context->gl.texture_count; ++i) {
771 *(static_cast<GLuint*>(p->value_out) + i) =
772 context->gl.texture_id[i];
773 }
774 break;
775 }
776 }
777
778 // Initialize late latch.
779 if (is_late_latch) {
780 LocalHandle fd;
781 int ret = context->display_surface->GetMetadataBufferFd(&fd);
782 if (ret == 0) {
783 context->late_latch.reset(
784 new android::dvr::LateLatch(true, std::move(fd)));
785 } else {
786 ALOGE("Error: failed to get surface metadata buffer fd for late latch");
787 }
788 }
789 } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) {
790 VkResult result = VK_SUCCESS;
791 // Create a VkSurfaceKHR from the ANativeWindow.
792 VkAndroidSurfaceCreateInfoKHR android_surface_ci = {};
793 android_surface_ci.sType =
794 VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR;
795 android_surface_ci.window = context.get();
796 result = vkCreateAndroidSurfaceKHR(
797 context->vk.instance, &android_surface_ci,
798 context->vk.allocation_callbacks, &context->vk.surface);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800799 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800800 VkBool32 surface_supports_present = VK_FALSE;
801 result = vkGetPhysicalDeviceSurfaceSupportKHR(
802 context->vk.physical_device, context->vk.present_queue_family,
803 context->vk.surface, &surface_supports_present);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800804 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800805 if (!surface_supports_present) {
806 ALOGE("Error: provided queue family (%u) does not support presentation",
807 context->vk.present_queue_family);
808 return -EPERM;
809 }
810 VkSurfaceCapabilitiesKHR surface_capabilities = {};
811 result = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
812 context->vk.physical_device, context->vk.surface,
813 &surface_capabilities);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800814 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800815 // Determine the swapchain image format.
816 uint32_t device_surface_format_count = 0;
817 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
818 context->vk.physical_device, context->vk.surface,
819 &device_surface_format_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800820 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800821 std::vector<VkSurfaceFormatKHR> device_surface_formats(
822 device_surface_format_count);
823 result = vkGetPhysicalDeviceSurfaceFormatsKHR(
824 context->vk.physical_device, context->vk.surface,
825 &device_surface_format_count, device_surface_formats.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800826 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
827 LOG_ALWAYS_FATAL_IF(device_surface_format_count == 0U);
828 LOG_ALWAYS_FATAL_IF(device_surface_formats[0].format ==
829 VK_FORMAT_UNDEFINED);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800830 VkSurfaceFormatKHR present_surface_format = device_surface_formats[0];
831 // Determine the swapchain present mode.
832 // TODO(cort): query device_present_modes to make sure MAILBOX is supported.
833 // But according to libvulkan, it is.
834 uint32_t device_present_mode_count = 0;
835 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
836 context->vk.physical_device, context->vk.surface,
837 &device_present_mode_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800838 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800839 std::vector<VkPresentModeKHR> device_present_modes(
840 device_present_mode_count);
841 result = vkGetPhysicalDeviceSurfacePresentModesKHR(
842 context->vk.physical_device, context->vk.surface,
843 &device_present_mode_count, device_present_modes.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800844 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800845 VkPresentModeKHR present_mode = VK_PRESENT_MODE_MAILBOX_KHR;
846 // Extract presentation surface extents, image count, transform, usages,
847 // etc.
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800848 LOG_ALWAYS_FATAL_IF(
849 static_cast<int>(surface_capabilities.currentExtent.width) == -1 ||
850 static_cast<int>(surface_capabilities.currentExtent.height) == -1);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800851 VkExtent2D swapchain_extent = surface_capabilities.currentExtent;
852
853 uint32_t desired_image_count = surface_capabilities.minImageCount;
854 if (surface_capabilities.maxImageCount > 0 &&
855 desired_image_count > surface_capabilities.maxImageCount) {
856 desired_image_count = surface_capabilities.maxImageCount;
857 }
858 VkSurfaceTransformFlagBitsKHR surface_transform =
859 surface_capabilities.currentTransform;
860 VkImageUsageFlags image_usage_flags =
861 surface_capabilities.supportedUsageFlags;
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800862 LOG_ALWAYS_FATAL_IF(surface_capabilities.supportedCompositeAlpha ==
863 static_cast<VkFlags>(0));
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800864 VkCompositeAlphaFlagBitsKHR composite_alpha =
865 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
866 if (!(surface_capabilities.supportedCompositeAlpha &
867 VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) {
868 composite_alpha = VkCompositeAlphaFlagBitsKHR(
869 static_cast<int>(surface_capabilities.supportedCompositeAlpha) &
870 -static_cast<int>(surface_capabilities.supportedCompositeAlpha));
871 }
872 // Create VkSwapchainKHR
873 VkSwapchainCreateInfoKHR swapchain_ci = {};
874 swapchain_ci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
875 swapchain_ci.pNext = nullptr;
876 swapchain_ci.surface = context->vk.surface;
877 swapchain_ci.minImageCount = desired_image_count;
878 swapchain_ci.imageFormat = present_surface_format.format;
879 swapchain_ci.imageColorSpace = present_surface_format.colorSpace;
880 swapchain_ci.imageExtent.width = swapchain_extent.width;
881 swapchain_ci.imageExtent.height = swapchain_extent.height;
882 swapchain_ci.imageUsage = image_usage_flags;
883 swapchain_ci.preTransform = surface_transform;
884 swapchain_ci.compositeAlpha = composite_alpha;
885 swapchain_ci.imageArrayLayers = 1;
886 swapchain_ci.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
887 swapchain_ci.queueFamilyIndexCount = 0;
888 swapchain_ci.pQueueFamilyIndices = nullptr;
889 swapchain_ci.presentMode = present_mode;
890 swapchain_ci.clipped = VK_TRUE;
891 swapchain_ci.oldSwapchain = VK_NULL_HANDLE;
892 result = vkCreateSwapchainKHR(context->vk.device, &swapchain_ci,
893 context->vk.allocation_callbacks,
894 &context->vk.swapchain);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800895 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800896 // Create swapchain image views
897 uint32_t image_count = 0;
898 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
899 &image_count, nullptr);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800900 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
901 LOG_ALWAYS_FATAL_IF(image_count == 0U);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800902 context->vk.swapchain_images.resize(image_count);
903 result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain,
904 &image_count,
905 context->vk.swapchain_images.data());
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800906 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800907 context->vk.swapchain_image_views.resize(image_count);
908 VkImageViewCreateInfo image_view_ci = {};
909 image_view_ci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
910 image_view_ci.pNext = nullptr;
911 image_view_ci.flags = 0;
912 image_view_ci.format = swapchain_ci.imageFormat;
913 image_view_ci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
914 image_view_ci.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
915 image_view_ci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
916 image_view_ci.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
917 image_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
918 image_view_ci.subresourceRange.baseMipLevel = 0;
919 image_view_ci.subresourceRange.levelCount = 1;
920 image_view_ci.subresourceRange.baseArrayLayer = 0;
921 image_view_ci.subresourceRange.layerCount = 1;
922 image_view_ci.viewType = VK_IMAGE_VIEW_TYPE_2D;
923 image_view_ci.image = VK_NULL_HANDLE; // filled in below
924 for (uint32_t i = 0; i < image_count; ++i) {
925 image_view_ci.image = context->vk.swapchain_images[i];
926 result = vkCreateImageView(context->vk.device, &image_view_ci,
927 context->vk.allocation_callbacks,
928 &context->vk.swapchain_image_views[i]);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800929 LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800930 }
931 // Fill in any requested output parameters.
932 for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) {
933 switch (p->key) {
934 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT:
935 *static_cast<uint32_t*>(p->value_out) = image_count;
936 break;
937 case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT:
938 *static_cast<VkFormat*>(p->value_out) = swapchain_ci.imageFormat;
939 break;
940 }
941 }
942 }
943
944 *return_graphics_context = context.release();
945 return 0;
946}
947
948void dvrGraphicsContextDestroy(DvrGraphicsContext* graphics_context) {
949 delete graphics_context;
950}
951
952// ANativeWindow function implementations. These should only be used
953// by the Vulkan path.
954int DvrGraphicsContext::Post(android::dvr::NativeBufferProducer* buffer,
955 int fence_fd) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800956 LOG_ALWAYS_FATAL_IF(graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800957 ATRACE_NAME(__PRETTY_FUNCTION__);
958 ALOGI_IF(TRACE, "DvrGraphicsContext::Post: buffer_id=%d, fence_fd=%d",
959 buffer->buffer()->id(), fence_fd);
960 ALOGW_IF(!display_surface->visible(),
961 "DvrGraphicsContext::Post: Posting buffer on invisible surface!!!");
962 // The NativeBufferProducer closes the fence fd, so dup it for tracking in the
963 // frame history.
964 frame_history.OnFrameSubmit(LocalHandle::AsDuplicate(fence_fd));
965 int result = buffer->Post(fence_fd, 0);
966 return result;
967}
968
969int DvrGraphicsContext::SetSwapInterval(ANativeWindow* window, int interval) {
970 ALOGI_IF(TRACE, "SetSwapInterval: window=%p interval=%d", window, interval);
971 DvrGraphicsContext* self = getSelf(window);
972 (void)self;
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800973 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800974 return android::NO_ERROR;
975}
976
977int DvrGraphicsContext::DequeueBuffer(ANativeWindow* window,
978 ANativeWindowBuffer** buffer,
979 int* fence_fd) {
980 ATRACE_NAME(__PRETTY_FUNCTION__);
981
982 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -0800983 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -0800984 std::lock_guard<std::mutex> autolock(self->lock_);
985
986 if (!self->current_buffer) {
987 self->current_buffer = self->buffer_queue.get()->Dequeue();
988 }
989 ATRACE_ASYNC_BEGIN("BufferDraw", self->current_buffer->buffer()->id());
990 *fence_fd = self->current_buffer->ClaimReleaseFence().Release();
991 *buffer = self->current_buffer;
992
993 ALOGI_IF(TRACE, "DvrGraphicsContext::DequeueBuffer: fence_fd=%d", *fence_fd);
994 return android::NO_ERROR;
995}
996
997int DvrGraphicsContext::QueueBuffer(ANativeWindow* window,
998 ANativeWindowBuffer* buffer, int fence_fd) {
999 ATRACE_NAME("NativeWindow::QueueBuffer");
1000 ALOGI_IF(TRACE, "NativeWindow::QueueBuffer: fence_fd=%d", fence_fd);
1001
1002 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001003 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001004 std::lock_guard<std::mutex> autolock(self->lock_);
1005
1006 android::dvr::NativeBufferProducer* native_buffer =
1007 static_cast<android::dvr::NativeBufferProducer*>(buffer);
1008 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
1009 bool do_post = true;
1010 if (self->buffer_already_posted) {
1011 // Check that the buffer is the one we expect, but handle it if this happens
1012 // in production by allowing this buffer to post on top of the previous one.
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001013 LOG_FATAL_IF(native_buffer != self->current_buffer);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001014 if (native_buffer == self->current_buffer) {
1015 do_post = false;
1016 if (fence_fd >= 0)
1017 close(fence_fd);
1018 }
1019 }
1020 if (do_post) {
1021 ATRACE_ASYNC_BEGIN("BufferPost", native_buffer->buffer()->id());
1022 self->Post(native_buffer, fence_fd);
1023 }
1024 self->buffer_already_posted = false;
1025 self->current_buffer = nullptr;
1026
1027 return android::NO_ERROR;
1028}
1029
1030int DvrGraphicsContext::CancelBuffer(ANativeWindow* window,
1031 ANativeWindowBuffer* buffer,
1032 int fence_fd) {
1033 ATRACE_NAME("DvrGraphicsContext::CancelBuffer");
1034 ALOGI_IF(TRACE, "DvrGraphicsContext::CancelBuffer: fence_fd: %d", fence_fd);
1035
1036 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001037 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001038 std::lock_guard<std::mutex> autolock(self->lock_);
1039
1040 android::dvr::NativeBufferProducer* native_buffer =
1041 static_cast<android::dvr::NativeBufferProducer*>(buffer);
1042 ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id());
1043 ATRACE_INT("CancelBuffer", native_buffer->buffer()->id());
1044 bool do_enqueue = true;
1045 if (self->buffer_already_posted) {
1046 // Check that the buffer is the one we expect, but handle it if this happens
1047 // in production by returning this buffer to the buffer queue.
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001048 LOG_FATAL_IF(native_buffer != self->current_buffer);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001049 if (native_buffer == self->current_buffer) {
1050 do_enqueue = false;
1051 }
1052 }
1053 if (do_enqueue) {
1054 self->buffer_queue.get()->Enqueue(native_buffer);
1055 }
1056 if (fence_fd >= 0)
1057 close(fence_fd);
1058 self->buffer_already_posted = false;
1059 self->current_buffer = nullptr;
1060
1061 return android::NO_ERROR;
1062}
1063
1064int DvrGraphicsContext::Query(const ANativeWindow* window, int what,
1065 int* value) {
1066 DvrGraphicsContext* self = getSelf(const_cast<ANativeWindow*>(window));
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001067 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001068 std::lock_guard<std::mutex> autolock(self->lock_);
1069
1070 switch (what) {
1071 case NATIVE_WINDOW_WIDTH:
1072 *value = self->display_surface->width();
1073 return android::NO_ERROR;
1074 case NATIVE_WINDOW_HEIGHT:
1075 *value = self->display_surface->height();
1076 return android::NO_ERROR;
1077 case NATIVE_WINDOW_FORMAT:
1078 *value = self->display_surface->format();
1079 return android::NO_ERROR;
1080 case NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS:
1081 *value = 1;
1082 return android::NO_ERROR;
1083 case NATIVE_WINDOW_CONCRETE_TYPE:
1084 *value = NATIVE_WINDOW_SURFACE;
1085 return android::NO_ERROR;
1086 case NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER:
1087 *value = 1;
1088 return android::NO_ERROR;
1089 case NATIVE_WINDOW_DEFAULT_WIDTH:
1090 *value = self->display_surface->width();
1091 return android::NO_ERROR;
1092 case NATIVE_WINDOW_DEFAULT_HEIGHT:
1093 *value = self->display_surface->height();
1094 return android::NO_ERROR;
1095 case NATIVE_WINDOW_TRANSFORM_HINT:
1096 *value = 0;
1097 return android::NO_ERROR;
1098 }
1099
1100 *value = 0;
1101 return android::BAD_VALUE;
1102}
1103
1104int DvrGraphicsContext::Perform(ANativeWindow* window, int operation, ...) {
1105 DvrGraphicsContext* self = getSelf(window);
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001106 LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001107 std::lock_guard<std::mutex> autolock(self->lock_);
1108
1109 va_list args;
1110 va_start(args, operation);
1111
1112 // TODO(eieio): The following operations are not used at this time. They are
1113 // included here to help document which operations may be useful and what
1114 // parameters they take.
1115 switch (operation) {
1116 case NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: {
1117 int w = va_arg(args, int);
1118 int h = va_arg(args, int);
1119 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: w=%d h=%d", w, h);
1120 return android::NO_ERROR;
1121 }
1122
1123 case NATIVE_WINDOW_SET_BUFFERS_FORMAT: {
1124 int format = va_arg(args, int);
1125 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_FORMAT: format=%d", format);
1126 return android::NO_ERROR;
1127 }
1128
1129 case NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: {
1130 int transform = va_arg(args, int);
1131 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: transform=%d",
1132 transform);
1133 return android::NO_ERROR;
1134 }
1135
1136 case NATIVE_WINDOW_SET_USAGE: {
1137 int usage = va_arg(args, int);
1138 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_USAGE: usage=%d", usage);
1139 return android::NO_ERROR;
1140 }
1141
1142 case NATIVE_WINDOW_CONNECT:
1143 case NATIVE_WINDOW_DISCONNECT:
1144 case NATIVE_WINDOW_SET_BUFFERS_GEOMETRY:
1145 case NATIVE_WINDOW_API_CONNECT:
1146 case NATIVE_WINDOW_API_DISCONNECT:
1147 // TODO(eieio): we should implement these
1148 return android::NO_ERROR;
1149
1150 case NATIVE_WINDOW_SET_BUFFER_COUNT: {
1151 int buffer_count = va_arg(args, int);
1152 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFER_COUNT: bufferCount=%d",
1153 buffer_count);
1154 return android::NO_ERROR;
1155 }
1156 case NATIVE_WINDOW_SET_BUFFERS_DATASPACE: {
1157 android_dataspace_t data_space =
1158 static_cast<android_dataspace_t>(va_arg(args, int));
1159 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DATASPACE: dataSpace=%d",
1160 data_space);
1161 return android::NO_ERROR;
1162 }
1163 case NATIVE_WINDOW_SET_SCALING_MODE: {
1164 int mode = va_arg(args, int);
1165 ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_SCALING_MODE: mode=%d", mode);
1166 return android::NO_ERROR;
1167 }
1168
1169 case NATIVE_WINDOW_LOCK:
1170 case NATIVE_WINDOW_UNLOCK_AND_POST:
1171 case NATIVE_WINDOW_SET_CROP:
1172 case NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP:
1173 return android::INVALID_OPERATION;
1174 }
1175
1176 return android::NAME_NOT_FOUND;
1177}
1178
1179int DvrGraphicsContext::DequeueBuffer_DEPRECATED(ANativeWindow* window,
1180 ANativeWindowBuffer** buffer) {
1181 int fence_fd = -1;
1182 int ret = DequeueBuffer(window, buffer, &fence_fd);
1183
1184 // wait for fence
1185 if (ret == android::NO_ERROR && fence_fd != -1)
1186 close(fence_fd);
1187
1188 return ret;
1189}
1190
1191int DvrGraphicsContext::CancelBuffer_DEPRECATED(ANativeWindow* window,
1192 ANativeWindowBuffer* buffer) {
1193 return CancelBuffer(window, buffer, -1);
1194}
1195
1196int DvrGraphicsContext::QueueBuffer_DEPRECATED(ANativeWindow* window,
1197 ANativeWindowBuffer* buffer) {
1198 return QueueBuffer(window, buffer, -1);
1199}
1200
1201int DvrGraphicsContext::LockBuffer_DEPRECATED(ANativeWindow* /*window*/,
1202 ANativeWindowBuffer* /*buffer*/) {
1203 return android::NO_ERROR;
1204}
1205// End ANativeWindow implementation
1206
1207int dvrSetEdsPose(DvrGraphicsContext* graphics_context,
1208 float32x4_t render_pose_orientation,
1209 float32x4_t render_pose_translation) {
1210 ATRACE_NAME("dvrSetEdsPose");
1211 if (!graphics_context->current_buffer) {
1212 ALOGE("dvrBeginRenderFrame must be called before dvrSetEdsPose");
1213 return -EPERM;
1214 }
1215
1216 // When late-latching is enabled, the pose buffer is written by the GPU, so
1217 // we don't touch it here.
1218 float32x4_t is_late_latch = DVR_POSE_LATE_LATCH;
1219 if (render_pose_orientation[0] != is_late_latch[0]) {
1220 volatile android::dvr::DisplaySurfaceMetadata* data =
1221 graphics_context->surface_metadata;
1222 uint32_t buffer_index =
1223 graphics_context->current_buffer->surface_buffer_index();
1224 ALOGE_IF(TRACE, "write pose index %d %f %f", buffer_index,
1225 render_pose_orientation[0], render_pose_orientation[1]);
1226 data->orientation[buffer_index] = render_pose_orientation;
1227 data->translation[buffer_index] = render_pose_translation;
1228 }
1229
1230 return 0;
1231}
1232
1233int dvrBeginRenderFrameEds(DvrGraphicsContext* graphics_context,
1234 float32x4_t render_pose_orientation,
1235 float32x4_t render_pose_translation) {
1236 ATRACE_NAME("dvrBeginRenderFrameEds");
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001237 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001238 CHECK_GL();
1239 // Grab a buffer from the queue and set its pose.
1240 if (!graphics_context->current_buffer) {
1241 graphics_context->current_buffer =
1242 graphics_context->buffer_queue->Dequeue();
1243 }
1244
1245 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1246 render_pose_translation);
1247 if (ret < 0)
1248 return ret;
1249
1250 ATRACE_ASYNC_BEGIN("BufferDraw",
1251 graphics_context->current_buffer->buffer()->id());
1252
1253 {
1254 ATRACE_NAME("glEGLImageTargetTexture2DOES");
1255 // Bind the texture to the latest buffer in the queue.
1256 for (int i = 0; i < graphics_context->gl.texture_count; ++i) {
1257 glBindTexture(graphics_context->gl.texture_target_type,
1258 graphics_context->gl.texture_id[i]);
1259 glEGLImageTargetTexture2DOES(
1260 graphics_context->gl.texture_target_type,
1261 graphics_context->current_buffer->image_khr(i));
1262 }
1263 glBindTexture(graphics_context->gl.texture_target_type, 0);
1264 }
1265 CHECK_GL();
1266 return 0;
1267}
1268int dvrBeginRenderFrameEdsVk(DvrGraphicsContext* graphics_context,
1269 float32x4_t render_pose_orientation,
1270 float32x4_t render_pose_translation,
1271 VkSemaphore acquire_semaphore,
1272 VkFence acquire_fence,
1273 uint32_t* swapchain_image_index,
1274 VkImageView* swapchain_image_view) {
1275 ATRACE_NAME("dvrBeginRenderFrameEds");
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001276 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api !=
1277 DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001278
1279 // Acquire a swapchain image. This calls Dequeue() internally.
1280 VkResult result = vkAcquireNextImageKHR(
1281 graphics_context->vk.device, graphics_context->vk.swapchain, UINT64_MAX,
1282 acquire_semaphore, acquire_fence, swapchain_image_index);
1283 if (result != VK_SUCCESS)
1284 return -EINVAL;
1285
1286 // Set the pose pose.
1287 int ret = dvrSetEdsPose(graphics_context, render_pose_orientation,
1288 render_pose_translation);
1289 if (ret < 0)
1290 return ret;
1291 *swapchain_image_view =
1292 graphics_context->vk.swapchain_image_views[*swapchain_image_index];
1293 return 0;
1294}
1295
1296int dvrBeginRenderFrame(DvrGraphicsContext* graphics_context) {
1297 return dvrBeginRenderFrameEds(graphics_context, DVR_POSE_NO_EDS,
1298 DVR_POSE_NO_EDS);
1299}
1300int dvrBeginRenderFrameVk(DvrGraphicsContext* graphics_context,
1301 VkSemaphore acquire_semaphore, VkFence acquire_fence,
1302 uint32_t* swapchain_image_index,
1303 VkImageView* swapchain_image_view) {
1304 return dvrBeginRenderFrameEdsVk(
1305 graphics_context, DVR_POSE_NO_EDS, DVR_POSE_NO_EDS, acquire_semaphore,
1306 acquire_fence, swapchain_image_index, swapchain_image_view);
1307}
1308
1309int dvrBeginRenderFrameLateLatch(DvrGraphicsContext* graphics_context,
1310 uint32_t /*flags*/,
1311 uint32_t target_vsync_count, int num_views,
1312 const float** projection_matrices,
1313 const float** eye_from_head_matrices,
1314 const float** pose_offset_matrices,
1315 uint32_t* out_late_latch_buffer_id) {
1316 if (!graphics_context->late_latch) {
1317 return -EPERM;
1318 }
1319 if (num_views > DVR_GRAPHICS_SURFACE_MAX_VIEWS) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001320 ALOGE("dvrBeginRenderFrameLateLatch called with too many views.");
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001321 return -EINVAL;
1322 }
1323 dvrBeginRenderFrameEds(graphics_context, DVR_POSE_LATE_LATCH,
1324 DVR_POSE_LATE_LATCH);
1325 auto& ll = graphics_context->late_latch;
1326 // TODO(jbates) Need to change this shader so that it dumps the single
1327 // captured pose for both eyes into the display surface metadata buffer at
1328 // the right index.
1329 android::dvr::LateLatchInput input;
1330 memset(&input, 0, sizeof(input));
1331 for (int i = 0; i < num_views; ++i) {
1332 memcpy(input.proj_mat + i, *(projection_matrices + i), 16 * sizeof(float));
1333 memcpy(input.eye_from_head_mat + i, *(eye_from_head_matrices + i),
1334 16 * sizeof(float));
1335 memcpy(input.pose_offset + i, *(pose_offset_matrices + i),
1336 16 * sizeof(float));
1337 }
1338 input.pose_index =
1339 target_vsync_count & android::dvr::kPoseAsyncBufferIndexMask;
1340 input.render_pose_index =
1341 graphics_context->current_buffer->surface_buffer_index();
1342 ll->AddLateLatch(input);
1343 *out_late_latch_buffer_id = ll->output_buffer_id();
1344 return 0;
1345}
1346
1347extern "C" int dvrGraphicsWaitNextFrame(
1348 DvrGraphicsContext* graphics_context, int64_t start_delay_ns,
1349 DvrFrameSchedule* out_next_frame_schedule) {
1350 start_delay_ns = std::max(start_delay_ns, static_cast<int64_t>(0));
1351
1352 // We only do one-shot timers:
1353 int64_t wake_time_ns = 0;
1354
1355 uint32_t current_frame_vsync;
1356 int64_t current_frame_scheduled_finish_ns;
1357 int64_t vsync_period_ns;
1358
1359 int fetch_schedule_result = graphics_context->vsync_client->GetSchedInfo(
1360 &vsync_period_ns, &current_frame_scheduled_finish_ns,
1361 &current_frame_vsync);
1362 if (fetch_schedule_result == 0) {
1363 wake_time_ns = current_frame_scheduled_finish_ns + start_delay_ns;
1364 // If the last wakeup time is still in the future, use it instead to avoid
1365 // major schedule jumps when applications call WaitNextFrame with
1366 // aggressive offsets.
1367 int64_t now = android::dvr::GetSystemClockNs();
1368 if (android::dvr::TimestampGT(wake_time_ns - vsync_period_ns, now)) {
1369 wake_time_ns -= vsync_period_ns;
1370 --current_frame_vsync;
1371 }
1372 // If the next wakeup time is in the past, add a vsync period to keep the
1373 // application on schedule.
1374 if (android::dvr::TimestampLT(wake_time_ns, now)) {
1375 wake_time_ns += vsync_period_ns;
1376 ++current_frame_vsync;
1377 }
1378 } else {
1379 ALOGE("Error getting frame schedule because: %s",
1380 strerror(-fetch_schedule_result));
1381 // Sleep for a vsync period to avoid cascading failure.
1382 wake_time_ns = android::dvr::GetSystemClockNs() +
1383 graphics_context->display_metrics.vsync_period_ns;
1384 }
1385
1386 // Adjust nsec to [0..999,999,999].
1387 struct itimerspec wake_time;
1388 wake_time.it_interval.tv_sec = 0;
1389 wake_time.it_interval.tv_nsec = 0;
1390 wake_time.it_value = android::dvr::NsToTimespec(wake_time_ns);
1391 bool sleep_result =
1392 timerfd_settime(graphics_context->timerfd.Get(), TFD_TIMER_ABSTIME,
1393 &wake_time, nullptr) == 0;
1394 if (sleep_result) {
1395 ATRACE_NAME("sleep");
1396 uint64_t expirations = 0;
1397 sleep_result = read(graphics_context->timerfd.Get(), &expirations,
1398 sizeof(uint64_t)) == sizeof(uint64_t);
1399 if (!sleep_result) {
1400 ALOGE("Error: timerfd read failed");
1401 }
1402 } else {
1403 ALOGE("Error: timerfd_settime failed because: %s", strerror(errno));
1404 }
1405
1406 auto& frame_history = graphics_context->frame_history;
1407 frame_history.CheckForFinishedFrames();
1408 if (fetch_schedule_result == 0) {
1409 uint32_t next_frame_vsync =
1410 current_frame_vsync +
1411 frame_history.PredictNextFrameVsyncInterval(vsync_period_ns);
1412 int64_t next_frame_scheduled_finish =
1413 (wake_time_ns - start_delay_ns) + vsync_period_ns;
1414 frame_history.OnFrameStart(next_frame_vsync, next_frame_scheduled_finish);
1415 if (out_next_frame_schedule) {
1416 out_next_frame_schedule->vsync_count = next_frame_vsync;
1417 out_next_frame_schedule->scheduled_frame_finish_ns =
1418 next_frame_scheduled_finish;
1419 }
1420 } else {
1421 frame_history.OnFrameStart(UINT32_MAX, -1);
1422 }
1423
1424 return (fetch_schedule_result == 0 && sleep_result) ? 0 : -1;
1425}
1426
1427extern "C" void dvrGraphicsPostEarly(DvrGraphicsContext* graphics_context) {
1428 ATRACE_NAME("dvrGraphicsPostEarly");
1429 ALOGI_IF(TRACE, "dvrGraphicsPostEarly");
1430
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001431 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001432
1433 // Note that this function can be called before or after
1434 // dvrBeginRenderFrame.
1435 if (!graphics_context->buffer_already_posted) {
1436 graphics_context->buffer_already_posted = true;
1437
1438 if (!graphics_context->current_buffer) {
1439 graphics_context->current_buffer =
1440 graphics_context->buffer_queue->Dequeue();
1441 }
1442
1443 auto buffer = graphics_context->current_buffer->buffer().get();
1444 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1445 int result = buffer->Post<uint64_t>(LocalHandle(), 0);
1446 if (result < 0)
1447 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1448 }
1449}
1450
1451int dvrPresent(DvrGraphicsContext* graphics_context) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001452 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001453
1454 std::array<char, 128> buf;
1455 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1456 graphics_context->frame_history.GetCurrentFrameVsync());
1457 ATRACE_NAME(buf.data());
1458
1459 if (!graphics_context->current_buffer) {
1460 ALOGE("Error: dvrPresent called without dvrBeginRenderFrame");
1461 return -EPERM;
1462 }
1463
1464 LocalHandle fence_fd =
1465 android::dvr::CreateGLSyncAndFlush(graphics_context->gl.egl_display);
1466
1467 ALOGI_IF(TRACE, "PostBuffer: buffer_id=%d, fence_fd=%d",
1468 graphics_context->current_buffer->buffer()->id(), fence_fd.Get());
1469 ALOGW_IF(!graphics_context->display_surface->visible(),
1470 "PostBuffer: Posting buffer on invisible surface!!!");
1471
1472 auto buffer = graphics_context->current_buffer->buffer().get();
1473 ATRACE_ASYNC_END("BufferDraw", buffer->id());
1474 if (!graphics_context->buffer_already_posted) {
1475 ATRACE_ASYNC_BEGIN("BufferPost", buffer->id());
1476 int result = buffer->Post<uint64_t>(fence_fd, 0);
1477 if (result < 0)
1478 ALOGE("Buffer post failed: %d (%s)", result, strerror(-result));
1479 }
1480
1481 graphics_context->frame_history.OnFrameSubmit(std::move(fence_fd));
1482 graphics_context->buffer_already_posted = false;
1483 graphics_context->current_buffer = nullptr;
1484 return 0;
1485}
1486
1487int dvrPresentVk(DvrGraphicsContext* graphics_context,
1488 VkSemaphore submit_semaphore, uint32_t swapchain_image_index) {
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001489 LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api !=
1490 DVR_GRAPHICS_API_VULKAN);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001491
1492 std::array<char, 128> buf;
1493 snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|",
1494 graphics_context->frame_history.GetCurrentFrameVsync());
1495 ATRACE_NAME(buf.data());
1496
1497 if (!graphics_context->current_buffer) {
1498 ALOGE("Error: dvrPresentVk called without dvrBeginRenderFrameVk");
1499 return -EPERM;
1500 }
1501
1502 // Present the specified image. Internally, this gets a fence from the
1503 // Vulkan driver and passes it to DvrGraphicsContext::Post(),
1504 // which in turn passes it to buffer->Post() and adds it to frame_history.
1505 VkPresentInfoKHR present_info = {};
1506 present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
1507 present_info.swapchainCount = 1;
1508 present_info.pSwapchains = &graphics_context->vk.swapchain;
1509 present_info.pImageIndices = &swapchain_image_index;
1510 present_info.waitSemaphoreCount =
1511 (submit_semaphore != VK_NULL_HANDLE) ? 1 : 0;
1512 present_info.pWaitSemaphores = &submit_semaphore;
1513 VkResult result =
1514 vkQueuePresentKHR(graphics_context->vk.present_queue, &present_info);
1515 if (result != VK_SUCCESS) {
1516 return -EINVAL;
1517 }
1518
1519 return 0;
1520}
1521
1522extern "C" int dvrGetFrameScheduleResults(DvrGraphicsContext* context,
1523 DvrFrameScheduleResult* results,
1524 int in_result_count) {
1525 if (!context || !results)
1526 return -EINVAL;
1527
1528 return context->frame_history.GetPreviousFrameResults(results,
1529 in_result_count);
1530}
1531
1532extern "C" void dvrGraphicsSurfaceSetVisible(
1533 DvrGraphicsContext* graphics_context, int visible) {
1534 graphics_context->display_surface->SetVisible(visible);
1535}
1536
1537extern "C" int dvrGraphicsSurfaceGetVisible(
1538 DvrGraphicsContext* graphics_context) {
1539 return graphics_context->display_surface->visible() ? 1 : 0;
1540}
1541
1542extern "C" void dvrGraphicsSurfaceSetZOrder(
1543 DvrGraphicsContext* graphics_context, int z_order) {
1544 graphics_context->display_surface->SetZOrder(z_order);
1545}
1546
1547extern "C" int dvrGraphicsSurfaceGetZOrder(
1548 DvrGraphicsContext* graphics_context) {
1549 return graphics_context->display_surface->z_order();
1550}
1551
1552extern "C" DvrVideoMeshSurface* dvrGraphicsVideoMeshSurfaceCreate(
1553 DvrGraphicsContext* graphics_context) {
1554 auto display_surface = graphics_context->display_surface;
1555 // A DisplaySurface must be created prior to the creation of a
1556 // VideoMeshSurface.
Alex Vakulenko4fe60582017-02-02 11:35:59 -08001557 LOG_ALWAYS_FATAL_IF(display_surface == nullptr);
Alex Vakulenkoe4eec202017-01-27 14:41:04 -08001558
1559 LocalChannelHandle surface_handle = display_surface->CreateVideoMeshSurface();
1560 if (!surface_handle.valid()) {
1561 return nullptr;
1562 }
1563
1564 std::unique_ptr<DvrVideoMeshSurface> surface(new DvrVideoMeshSurface);
1565 surface->client =
1566 android::dvr::VideoMeshSurfaceClient::Import(std::move(surface_handle));
1567
1568 // TODO(jwcai) The next line is not needed...
1569 auto producer_queue = surface->client->GetProducerQueue();
1570 return surface.release();
1571}
1572
1573extern "C" void dvrGraphicsVideoMeshSurfaceDestroy(
1574 DvrVideoMeshSurface* surface) {
1575 delete surface;
1576}
1577
1578extern "C" void dvrGraphicsVideoMeshSurfacePresent(
1579 DvrGraphicsContext* graphics_context, DvrVideoMeshSurface* surface,
1580 const int eye, const float* transform) {
1581 volatile android::dvr::VideoMeshSurfaceMetadata* metadata =
1582 surface->client->GetMetadataBufferPtr();
1583
1584 const uint32_t graphics_buffer_index =
1585 graphics_context->current_buffer->surface_buffer_index();
1586
1587 for (int i = 0; i < 4; ++i) {
1588 metadata->transform[graphics_buffer_index][eye].val[i] = {
1589 transform[i + 0], transform[i + 4], transform[i + 8], transform[i + 12],
1590 };
1591 }
1592}