Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1 | #include <dvr/graphics.h> |
| 2 | |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 3 | #include <inttypes.h> |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 4 | #include <sys/timerfd.h> |
| 5 | #include <array> |
| 6 | #include <vector> |
| 7 | |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 8 | #include <log/log.h> |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 9 | #include <utils/Trace.h> |
| 10 | |
| 11 | #ifndef VK_USE_PLATFORM_ANDROID_KHR |
| 12 | #define VK_USE_PLATFORM_ANDROID_KHR 1 |
| 13 | #endif |
| 14 | #include <vulkan/vulkan.h> |
| 15 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 16 | #include <dvr/dvr_display_types.h> |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 17 | #include <pdx/file_handle.h> |
| 18 | #include <private/dvr/clock_ns.h> |
| 19 | #include <private/dvr/debug.h> |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 20 | #include <private/dvr/frame_history.h> |
| 21 | #include <private/dvr/gl_fenced_flush.h> |
| 22 | #include <private/dvr/graphics/vr_gl_extensions.h> |
| 23 | #include <private/dvr/graphics_private.h> |
| 24 | #include <private/dvr/late_latch.h> |
| 25 | #include <private/dvr/native_buffer_queue.h> |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 26 | #include <private/dvr/platform_defines.h> |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 27 | #include <private/dvr/sensor_constants.h> |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 28 | #include <private/dvr/vsync_client.h> |
| 29 | |
Mathias Agopian | 6a3c05b | 2017-04-27 20:06:55 -0700 | [diff] [blame^] | 30 | #include <system/window.h> |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 31 | |
| 32 | #ifndef EGL_CONTEXT_MAJOR_VERSION |
| 33 | #define EGL_CONTEXT_MAJOR_VERSION 0x3098 |
| 34 | #define EGL_CONTEXT_MINOR_VERSION 0x30FB |
| 35 | #endif |
| 36 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 37 | using android::pdx::ErrorStatus; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 38 | using android::pdx::LocalHandle; |
| 39 | using android::pdx::LocalChannelHandle; |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 40 | using android::pdx::Status; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 41 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 42 | using android::dvr::display::DisplayClient; |
| 43 | using android::dvr::display::Metrics; |
| 44 | using android::dvr::display::NativeBufferQueue; |
| 45 | using android::dvr::display::Surface; |
| 46 | using android::dvr::display::SurfaceAttribute; |
| 47 | using android::dvr::display::SurfaceAttributes; |
| 48 | using android::dvr::display::SurfaceAttributeValue; |
| 49 | using android::dvr::VSyncClient; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 50 | |
| 51 | namespace { |
| 52 | |
Mark Urbanus | a6c1f92 | 2017-03-22 13:11:51 -0700 | [diff] [blame] | 53 | // TODO(urbanus): revisit once we have per-platform usage config in place. |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 54 | constexpr uint64_t kDefaultDisplaySurfaceUsage = |
| 55 | GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET | |
| 56 | GRALLOC1_PRODUCER_USAGE_PRIVATE_1 | GRALLOC1_CONSUMER_USAGE_CLIENT_TARGET | |
| 57 | GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE; |
| 58 | constexpr uint32_t kDefaultDisplaySurfaceFormat = HAL_PIXEL_FORMAT_RGBA_8888; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 59 | // TODO(alexst): revisit this count when HW encode is available for casting. |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 60 | constexpr size_t kDefaultBufferCount = 4; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 61 | |
| 62 | // Use with dvrBeginRenderFrame to disable EDS for the current frame. |
| 63 | constexpr float32x4_t DVR_POSE_NO_EDS = {10.0f, 0.0f, 0.0f, 0.0f}; |
| 64 | |
| 65 | // Use with dvrBeginRenderFrame to indicate that GPU late-latching is being used |
| 66 | // for determining the render pose. |
| 67 | constexpr float32x4_t DVR_POSE_LATE_LATCH = {20.0f, 0.0f, 0.0f, 0.0f}; |
| 68 | |
| 69 | #ifndef NDEBUG |
| 70 | |
| 71 | static const char* GetGlCallbackType(GLenum type) { |
| 72 | switch (type) { |
| 73 | case GL_DEBUG_TYPE_ERROR_KHR: |
| 74 | return "ERROR"; |
| 75 | case GL_DEBUG_TYPE_DEPRECATED_BEHAVIOR_KHR: |
| 76 | return "DEPRECATED_BEHAVIOR"; |
| 77 | case GL_DEBUG_TYPE_UNDEFINED_BEHAVIOR_KHR: |
| 78 | return "UNDEFINED_BEHAVIOR"; |
| 79 | case GL_DEBUG_TYPE_PORTABILITY_KHR: |
| 80 | return "PORTABILITY"; |
| 81 | case GL_DEBUG_TYPE_PERFORMANCE_KHR: |
| 82 | return "PERFORMANCE"; |
| 83 | case GL_DEBUG_TYPE_OTHER_KHR: |
| 84 | return "OTHER"; |
| 85 | default: |
| 86 | return "UNKNOWN"; |
| 87 | } |
| 88 | } |
| 89 | |
| 90 | static void on_gl_error(GLenum /*source*/, GLenum type, GLuint /*id*/, |
| 91 | GLenum severity, GLsizei /*length*/, |
| 92 | const char* message, const void* /*user_param*/) { |
| 93 | char msg[400]; |
| 94 | snprintf(msg, sizeof(msg), "[" __FILE__ ":%u] GL %s: %s", __LINE__, |
| 95 | GetGlCallbackType(type), message); |
| 96 | switch (severity) { |
| 97 | case GL_DEBUG_SEVERITY_LOW_KHR: |
| 98 | ALOGI("%s", msg); |
| 99 | break; |
| 100 | case GL_DEBUG_SEVERITY_MEDIUM_KHR: |
| 101 | ALOGW("%s", msg); |
| 102 | break; |
| 103 | case GL_DEBUG_SEVERITY_HIGH_KHR: |
| 104 | ALOGE("%s", msg); |
| 105 | break; |
| 106 | } |
| 107 | fprintf(stderr, "%s\n", msg); |
| 108 | } |
| 109 | |
| 110 | #endif |
| 111 | |
| 112 | int DvrToHalSurfaceFormat(int dvr_surface_format) { |
| 113 | switch (dvr_surface_format) { |
| 114 | case DVR_SURFACE_FORMAT_RGBA_8888: |
| 115 | return HAL_PIXEL_FORMAT_RGBA_8888; |
| 116 | case DVR_SURFACE_FORMAT_RGB_565: |
| 117 | return HAL_PIXEL_FORMAT_RGB_565; |
| 118 | default: |
| 119 | return HAL_PIXEL_FORMAT_RGBA_8888; |
| 120 | } |
| 121 | } |
| 122 | |
| 123 | int SelectEGLConfig(EGLDisplay dpy, EGLint* attr, unsigned format, |
| 124 | EGLConfig* config) { |
| 125 | std::array<EGLint, 4> desired_rgba; |
| 126 | switch (format) { |
| 127 | case HAL_PIXEL_FORMAT_RGBA_8888: |
| 128 | case HAL_PIXEL_FORMAT_BGRA_8888: |
| 129 | desired_rgba = {{8, 8, 8, 8}}; |
| 130 | break; |
| 131 | case HAL_PIXEL_FORMAT_RGB_565: |
| 132 | desired_rgba = {{5, 6, 5, 0}}; |
| 133 | break; |
| 134 | default: |
| 135 | ALOGE("Unsupported framebuffer pixel format %d", format); |
| 136 | return -1; |
| 137 | } |
| 138 | |
| 139 | EGLint max_configs = 0; |
| 140 | if (eglGetConfigs(dpy, NULL, 0, &max_configs) == EGL_FALSE) { |
| 141 | ALOGE("No EGL configurations available?!"); |
| 142 | return -1; |
| 143 | } |
| 144 | |
| 145 | std::vector<EGLConfig> configs(max_configs); |
| 146 | |
| 147 | EGLint num_configs; |
| 148 | if (eglChooseConfig(dpy, attr, &configs[0], max_configs, &num_configs) == |
| 149 | EGL_FALSE) { |
| 150 | ALOGE("eglChooseConfig failed"); |
| 151 | return -1; |
| 152 | } |
| 153 | |
| 154 | std::array<EGLint, 4> config_rgba; |
| 155 | for (int i = 0; i < num_configs; i++) { |
| 156 | eglGetConfigAttrib(dpy, configs[i], EGL_RED_SIZE, &config_rgba[0]); |
| 157 | eglGetConfigAttrib(dpy, configs[i], EGL_GREEN_SIZE, &config_rgba[1]); |
| 158 | eglGetConfigAttrib(dpy, configs[i], EGL_BLUE_SIZE, &config_rgba[2]); |
| 159 | eglGetConfigAttrib(dpy, configs[i], EGL_ALPHA_SIZE, &config_rgba[3]); |
| 160 | if (config_rgba == desired_rgba) { |
| 161 | *config = configs[i]; |
| 162 | return 0; |
| 163 | } |
| 164 | } |
| 165 | |
| 166 | ALOGE("Cannot find a matching EGL config"); |
| 167 | return -1; |
| 168 | } |
| 169 | |
| 170 | void DestroyEglContext(EGLDisplay egl_display, EGLContext* egl_context) { |
| 171 | if (*egl_context != EGL_NO_CONTEXT) { |
| 172 | eglDestroyContext(egl_display, *egl_context); |
| 173 | *egl_context = EGL_NO_CONTEXT; |
| 174 | } |
| 175 | } |
| 176 | |
| 177 | // Perform internal initialization. A GL context must be bound to the current |
| 178 | // thread. |
| 179 | // @param internally_created_context True if we created and own the GL context, |
| 180 | // false if it was supplied by the application. |
| 181 | // @return 0 if init was successful, or a negative error code on failure. |
| 182 | int InitGl(bool internally_created_context) { |
| 183 | EGLDisplay egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY); |
| 184 | if (egl_display == EGL_NO_DISPLAY) { |
| 185 | ALOGE("eglGetDisplay failed"); |
| 186 | return -EINVAL; |
| 187 | } |
| 188 | |
| 189 | EGLContext egl_context = eglGetCurrentContext(); |
| 190 | if (egl_context == EGL_NO_CONTEXT) { |
| 191 | ALOGE("No GL context bound"); |
| 192 | return -EINVAL; |
| 193 | } |
| 194 | |
| 195 | glGetError(); // Clear the error state |
| 196 | GLint major_version, minor_version; |
| 197 | glGetIntegerv(GL_MAJOR_VERSION, &major_version); |
| 198 | glGetIntegerv(GL_MINOR_VERSION, &minor_version); |
| 199 | if (glGetError() != GL_NO_ERROR) { |
| 200 | // GL_MAJOR_VERSION and GL_MINOR_VERSION were added in GLES 3. If we get an |
| 201 | // error querying them it's almost certainly because it's GLES 1 or 2. |
| 202 | ALOGE("Error getting GL version. Must be GLES 3.2 or greater."); |
| 203 | return -EINVAL; |
| 204 | } |
| 205 | |
| 206 | if (major_version < 3 || (major_version == 3 && minor_version < 2)) { |
| 207 | ALOGE("Invalid GL version: %d.%d. Must be GLES 3.2 or greater.", |
| 208 | major_version, minor_version); |
| 209 | return -EINVAL; |
| 210 | } |
| 211 | |
| 212 | #ifndef NDEBUG |
| 213 | if (internally_created_context) { |
| 214 | // Enable verbose GL debug output. |
| 215 | glEnable(GL_DEBUG_OUTPUT_SYNCHRONOUS_KHR); |
| 216 | glDebugMessageCallbackKHR(on_gl_error, NULL); |
| 217 | GLuint unused_ids = 0; |
| 218 | glDebugMessageControlKHR(GL_DONT_CARE, GL_DONT_CARE, GL_DONT_CARE, 0, |
| 219 | &unused_ids, GL_TRUE); |
| 220 | } |
| 221 | #else |
| 222 | (void)internally_created_context; |
| 223 | #endif |
| 224 | |
| 225 | load_gl_extensions(); |
| 226 | return 0; |
| 227 | } |
| 228 | |
| 229 | int CreateEglContext(EGLDisplay egl_display, DvrSurfaceParameter* parameters, |
| 230 | EGLContext* egl_context) { |
| 231 | *egl_context = EGL_NO_CONTEXT; |
| 232 | |
| 233 | EGLint major, minor; |
| 234 | if (!eglInitialize(egl_display, &major, &minor)) { |
| 235 | ALOGE("Failed to initialize EGL"); |
| 236 | return -ENXIO; |
| 237 | } |
| 238 | |
| 239 | ALOGI("EGL version: %d.%d\n", major, minor); |
| 240 | |
| 241 | int buffer_format = kDefaultDisplaySurfaceFormat; |
| 242 | |
| 243 | for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) { |
| 244 | switch (p->key) { |
| 245 | case DVR_SURFACE_PARAMETER_FORMAT_IN: |
| 246 | buffer_format = DvrToHalSurfaceFormat(p->value); |
| 247 | break; |
| 248 | } |
| 249 | } |
| 250 | |
| 251 | EGLint config_attrs[] = {EGL_SURFACE_TYPE, EGL_WINDOW_BIT, |
| 252 | EGL_RENDERABLE_TYPE, EGL_OPENGL_ES2_BIT, EGL_NONE}; |
| 253 | EGLConfig config = {0}; |
| 254 | |
| 255 | int ret = SelectEGLConfig(egl_display, config_attrs, buffer_format, &config); |
| 256 | if (ret < 0) |
| 257 | return ret; |
| 258 | |
| 259 | ALOGI("EGL SelectEGLConfig ok.\n"); |
| 260 | |
| 261 | EGLint context_attrs[] = {EGL_CONTEXT_MAJOR_VERSION, |
| 262 | 3, |
| 263 | EGL_CONTEXT_MINOR_VERSION, |
| 264 | 2, |
| 265 | #ifndef NDEBUG |
| 266 | EGL_CONTEXT_FLAGS_KHR, |
| 267 | EGL_CONTEXT_OPENGL_DEBUG_BIT_KHR, |
| 268 | #endif |
| 269 | EGL_NONE}; |
| 270 | |
| 271 | *egl_context = |
| 272 | eglCreateContext(egl_display, config, EGL_NO_CONTEXT, context_attrs); |
| 273 | if (*egl_context == EGL_NO_CONTEXT) { |
| 274 | ALOGE("eglCreateContext failed"); |
| 275 | return -ENXIO; |
| 276 | } |
| 277 | |
| 278 | ALOGI("eglCreateContext ok.\n"); |
| 279 | |
| 280 | if (!eglMakeCurrent(egl_display, EGL_NO_SURFACE, EGL_NO_SURFACE, |
| 281 | *egl_context)) { |
| 282 | ALOGE("eglMakeCurrent failed"); |
| 283 | DestroyEglContext(egl_display, egl_context); |
| 284 | return -EINVAL; |
| 285 | } |
| 286 | |
| 287 | return 0; |
| 288 | } |
| 289 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 290 | // Utility structure to hold info related to creating a surface. |
| 291 | struct SurfaceResult { |
| 292 | std::shared_ptr<Surface> surface; |
| 293 | Metrics metrics; |
| 294 | uint32_t width; |
| 295 | uint32_t height; |
| 296 | uint32_t format; |
| 297 | uint64_t usage; |
| 298 | size_t capacity; |
| 299 | int geometry; |
| 300 | bool direct_surface; |
| 301 | }; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 302 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 303 | Status<std::tuple<std::shared_ptr<android::dvr::ProducerQueue>, |
| 304 | std::shared_ptr<android::dvr::BufferProducer>, |
| 305 | volatile DisplaySurfaceMetadata*>> |
| 306 | CreateMetadataBuffer(const std::shared_ptr<Surface>& surface, |
| 307 | bool direct_surface) { |
| 308 | std::shared_ptr<android::dvr::ProducerQueue> queue; |
| 309 | std::shared_ptr<android::dvr::BufferProducer> buffer; |
| 310 | |
| 311 | if (!direct_surface) { |
| 312 | auto queue_status = surface->CreateQueue( |
Hendrik Wagenaar | 108e84f | 2017-05-07 22:19:17 -0700 | [diff] [blame] | 313 | sizeof(DisplaySurfaceMetadata), 1, 1, HAL_PIXEL_FORMAT_BLOB, |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 314 | GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET | |
| 315 | GRALLOC1_PRODUCER_USAGE_CPU_WRITE_OFTEN | |
| 316 | GRALLOC1_CONSUMER_USAGE_GPU_DATA_BUFFER, |
| 317 | 1); |
| 318 | if (!queue_status) { |
| 319 | ALOGE("CreateMetadataBuffer: Failed to create queue: %s", |
| 320 | queue_status.GetErrorMessage().c_str()); |
| 321 | return queue_status.error_status(); |
| 322 | } |
| 323 | |
| 324 | queue = queue_status.take(); |
| 325 | LocalHandle fence; |
| 326 | size_t slot; |
| 327 | auto buffer_status = queue->Dequeue(-1, &slot, &fence); |
| 328 | if (!buffer_status) { |
| 329 | ALOGE("CreateMetadataBuffer: Failed to dequeue buffer: %s", |
| 330 | buffer_status.GetErrorMessage().c_str()); |
| 331 | return buffer_status.error_status(); |
| 332 | } |
| 333 | buffer = buffer_status.take(); |
| 334 | } else { |
| 335 | buffer = android::dvr::BufferProducer::CreateUncachedBlob( |
| 336 | sizeof(DisplaySurfaceMetadata)); |
| 337 | if (!buffer) { |
| 338 | ALOGE("CreateMetadataBuffer: Failed to create stand-in buffer!"); |
| 339 | return ErrorStatus(ENOMEM); |
| 340 | } |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 341 | } |
| 342 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 343 | void* address = nullptr; |
| 344 | int ret = |
| 345 | buffer->GetBlobReadWritePointer(sizeof(DisplaySurfaceMetadata), &address); |
| 346 | |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 347 | if (ret < 0) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 348 | ALOGE("CreateMetadataBuffer: Failed to map buffer: %s", strerror(-ret)); |
| 349 | return ErrorStatus(-ret); |
| 350 | } |
| 351 | |
| 352 | // Post the buffer so that the compositor can retrieve it from the consumer |
| 353 | // queue. |
| 354 | ret = buffer->Post<void>(LocalHandle{}); |
| 355 | if (ret < 0) { |
| 356 | ALOGE("CreateMetadataBuffer: Failed to post buffer: %s", strerror(-ret)); |
| 357 | return ErrorStatus(-ret); |
| 358 | } |
| 359 | |
| 360 | ALOGD_IF(TRACE, "CreateMetadataBuffer: queue_id=%d buffer_id=%d address=%p", |
| 361 | queue ? queue->id() : -1, buffer->id(), address); |
| 362 | return {{std::move(queue), std::move(buffer), |
| 363 | static_cast<DisplaySurfaceMetadata*>(address)}}; |
| 364 | } |
| 365 | |
| 366 | } // anonymous namespace |
| 367 | |
| 368 | Status<SurfaceResult> CreateSurface(struct DvrSurfaceParameter* parameters) { |
| 369 | int error; |
| 370 | auto client = DisplayClient::Create(&error); |
| 371 | if (!client) { |
| 372 | ALOGE("CreateApplicationSurface: Failed to create display client!"); |
| 373 | return ErrorStatus(error); |
| 374 | } |
| 375 | |
| 376 | auto metrics_status = client->GetDisplayMetrics(); |
| 377 | if (!metrics_status) { |
| 378 | ALOGE("CreateApplicationSurface: Failed to get display metrics: %s", |
| 379 | metrics_status.GetErrorMessage().c_str()); |
| 380 | return metrics_status.error_status(); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 381 | } |
| 382 | |
| 383 | // Parameters that may be modified by the parameters array. Some of these are |
| 384 | // here for future expansion. |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 385 | |
| 386 | uint32_t request_width = metrics_status.get().display_width; |
| 387 | uint32_t request_height = metrics_status.get().display_width; |
| 388 | uint32_t request_format = kDefaultDisplaySurfaceFormat; |
| 389 | uint64_t request_usage = kDefaultDisplaySurfaceUsage; |
| 390 | size_t request_capacity = kDefaultBufferCount; |
| 391 | int request_geometry = DVR_SURFACE_GEOMETRY_SINGLE; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 392 | bool disable_distortion = false; |
| 393 | bool disable_stabilization = false; |
| 394 | bool disable_cac = false; |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 395 | bool request_visible = false; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 396 | bool vertical_flip = false; |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 397 | bool direct_surface = false; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 398 | int request_z_order = 0; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 399 | |
| 400 | // Handle parameter inputs. |
| 401 | for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) { |
| 402 | switch (p->key) { |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 403 | case DVR_SURFACE_PARAMETER_DISABLE_DISTORTION_IN: |
| 404 | disable_distortion = !!p->value; |
| 405 | break; |
| 406 | case DVR_SURFACE_PARAMETER_DISABLE_STABILIZATION_IN: |
| 407 | disable_stabilization = !!p->value; |
| 408 | break; |
| 409 | case DVR_SURFACE_PARAMETER_DISABLE_CAC_IN: |
| 410 | disable_cac = !!p->value; |
| 411 | break; |
| 412 | case DVR_SURFACE_PARAMETER_VISIBLE_IN: |
| 413 | request_visible = !!p->value; |
| 414 | break; |
| 415 | case DVR_SURFACE_PARAMETER_Z_ORDER_IN: |
| 416 | request_z_order = p->value; |
| 417 | break; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 418 | case DVR_SURFACE_PARAMETER_VERTICAL_FLIP_IN: |
| 419 | vertical_flip = !!p->value; |
| 420 | break; |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 421 | case DVR_SURFACE_PARAMETER_DIRECT_IN: |
| 422 | direct_surface = !!p->value; |
| 423 | break; |
| 424 | case DVR_SURFACE_PARAMETER_WIDTH_IN: |
| 425 | request_width = p->value; |
| 426 | break; |
| 427 | case DVR_SURFACE_PARAMETER_HEIGHT_IN: |
| 428 | request_height = p->value; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 429 | break; |
| 430 | case DVR_SURFACE_PARAMETER_FORMAT_IN: |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 431 | request_format = p->value; |
| 432 | break; |
| 433 | case DVR_SURFACE_PARAMETER_GEOMETRY_IN: |
| 434 | request_geometry = p->value; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 435 | break; |
| 436 | case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN: |
| 437 | case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN: |
| 438 | case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT: |
| 439 | case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT: |
| 440 | case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT: |
| 441 | case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT: |
| 442 | case DVR_SURFACE_PARAMETER_INTER_LENS_METERS_OUT: |
| 443 | case DVR_SURFACE_PARAMETER_LEFT_FOV_LRBT_OUT: |
| 444 | case DVR_SURFACE_PARAMETER_RIGHT_FOV_LRBT_OUT: |
| 445 | case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT: |
| 446 | case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT: |
| 447 | case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT: |
| 448 | case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN: |
| 449 | case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN: |
| 450 | case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN: |
| 451 | case DVR_SURFACE_PARAMETER_VK_DEVICE_IN: |
| 452 | case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN: |
| 453 | case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN: |
| 454 | case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT: |
| 455 | case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT: |
| 456 | break; |
| 457 | default: |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 458 | ALOGE( |
| 459 | "CreateSurface: Invalid display surface parameter: key=%d " |
| 460 | "value=%" PRId64, |
| 461 | p->key, p->value); |
| 462 | return ErrorStatus(EINVAL); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 463 | } |
| 464 | } |
| 465 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 466 | // TODO(eieio): Setup a "surface flags" attribute based on the surface |
| 467 | // parameters gathered above. |
| 468 | SurfaceAttributes surface_attributes; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 469 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 470 | surface_attributes[SurfaceAttribute::Direct] = direct_surface; |
| 471 | surface_attributes[SurfaceAttribute::Visible] = request_visible; |
| 472 | surface_attributes[SurfaceAttribute::ZOrder] = request_z_order; |
| 473 | |
| 474 | auto surface_status = Surface::CreateSurface(surface_attributes); |
| 475 | if (!surface_status) { |
| 476 | ALOGE("CreateSurface: Failed to create surface: %s", |
| 477 | surface_status.GetErrorMessage().c_str()); |
| 478 | return surface_status.error_status(); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 479 | } |
| 480 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 481 | return {{surface_status.take(), metrics_status.get(), request_width, |
| 482 | request_height, request_format, request_usage, request_capacity, |
| 483 | request_geometry, direct_surface}}; |
| 484 | } |
| 485 | |
| 486 | // TODO(hendrikw): When we remove the calls to this in native_window.cpp, move |
| 487 | // this back into the anonymous namespace |
| 488 | Status<SurfaceResult> CreateApplicationSurface( |
| 489 | struct DvrSurfaceParameter* parameters) { |
| 490 | auto surface_status = CreateSurface(parameters); |
| 491 | if (!surface_status) |
| 492 | return surface_status; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 493 | |
| 494 | // Handle parameter output requests down here so we can return surface info. |
| 495 | for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) { |
| 496 | switch (p->key) { |
| 497 | case DVR_SURFACE_PARAMETER_DISPLAY_WIDTH_OUT: |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 498 | *static_cast<int32_t*>(p->value_out) = |
| 499 | surface_status.get().metrics.display_width; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 500 | break; |
| 501 | case DVR_SURFACE_PARAMETER_DISPLAY_HEIGHT_OUT: |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 502 | *static_cast<int32_t*>(p->value_out) = |
| 503 | surface_status.get().metrics.display_height; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 504 | break; |
| 505 | case DVR_SURFACE_PARAMETER_VSYNC_PERIOD_OUT: |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 506 | *static_cast<uint64_t*>(p->value_out) = |
| 507 | surface_status.get().metrics.vsync_period_ns; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 508 | break; |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 509 | case DVR_SURFACE_PARAMETER_SURFACE_WIDTH_OUT: |
| 510 | *static_cast<uint32_t*>(p->value_out) = surface_status.get().width; |
| 511 | break; |
| 512 | case DVR_SURFACE_PARAMETER_SURFACE_HEIGHT_OUT: |
| 513 | *static_cast<uint32_t*>(p->value_out) = surface_status.get().height; |
| 514 | break; |
| 515 | |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 516 | default: |
| 517 | break; |
| 518 | } |
| 519 | } |
| 520 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 521 | return surface_status; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 522 | } |
| 523 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 524 | extern "C" int dvrGetNativeDisplayDimensions(int* display_width, |
| 525 | int* display_height) { |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 526 | int error = 0; |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 527 | auto client = DisplayClient::Create(&error); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 528 | if (!client) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 529 | ALOGE("dvrGetNativeDisplayDimensions: Failed to create display client!"); |
| 530 | return -error; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 531 | } |
| 532 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 533 | auto metrics_status = client->GetDisplayMetrics(); |
| 534 | if (!metrics_status) { |
| 535 | ALOGE("dvrGetNativeDisplayDimensions: Failed to get display metrics: %s", |
| 536 | metrics_status.GetErrorMessage().c_str()); |
| 537 | return -metrics_status.error(); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 538 | } |
| 539 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 540 | *display_width = static_cast<int>(metrics_status.get().display_width); |
| 541 | *display_height = static_cast<int>(metrics_status.get().display_height); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 542 | return 0; |
| 543 | } |
| 544 | |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 545 | struct DvrGraphicsContext : public android::ANativeObjectBase< |
| 546 | ANativeWindow, DvrGraphicsContext, |
| 547 | android::LightRefBase<DvrGraphicsContext>> { |
| 548 | public: |
| 549 | DvrGraphicsContext(); |
| 550 | ~DvrGraphicsContext(); |
| 551 | |
| 552 | int graphics_api; // DVR_SURFACE_GRAPHICS_API_* |
| 553 | |
| 554 | // GL specific members. |
| 555 | struct { |
| 556 | EGLDisplay egl_display; |
| 557 | EGLContext egl_context; |
| 558 | bool owns_egl_context; |
| 559 | GLuint texture_id[kSurfaceViewMaxCount]; |
| 560 | int texture_count; |
| 561 | GLenum texture_target_type; |
| 562 | } gl; |
| 563 | |
| 564 | // VK specific members |
| 565 | struct { |
| 566 | // These objects are passed in by the application, and are NOT owned |
| 567 | // by the context. |
| 568 | VkInstance instance; |
| 569 | VkPhysicalDevice physical_device; |
| 570 | VkDevice device; |
| 571 | VkQueue present_queue; |
| 572 | uint32_t present_queue_family; |
| 573 | const VkAllocationCallbacks* allocation_callbacks; |
| 574 | // These objects are owned by the context. |
| 575 | ANativeWindow* window; |
| 576 | VkSurfaceKHR surface; |
| 577 | VkSwapchainKHR swapchain; |
| 578 | std::vector<VkImage> swapchain_images; |
| 579 | std::vector<VkImageView> swapchain_image_views; |
| 580 | } vk; |
| 581 | |
| 582 | // Display surface, metrics, and buffer management members. |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 583 | std::shared_ptr<Surface> display_surface; |
| 584 | uint32_t width; |
| 585 | uint32_t height; |
| 586 | uint32_t format; |
| 587 | Metrics display_metrics; |
| 588 | std::unique_ptr<NativeBufferQueue> buffer_queue; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 589 | android::dvr::NativeBufferProducer* current_buffer; |
| 590 | bool buffer_already_posted; |
| 591 | |
| 592 | // Synchronization members. |
| 593 | std::unique_ptr<android::dvr::VSyncClient> vsync_client; |
| 594 | LocalHandle timerfd; |
| 595 | |
| 596 | android::dvr::FrameHistory frame_history; |
| 597 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 598 | // Metadata queue and buffer. |
| 599 | // TODO(eieio): Remove the queue once one-off buffers are supported as a |
| 600 | // surface primitive element. |
| 601 | std::shared_ptr<android::dvr::ProducerQueue> metadata_queue; |
| 602 | std::shared_ptr<android::dvr::BufferProducer> metadata_buffer; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 603 | // Mapped surface metadata (ie: for pose delivery with presented frames). |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 604 | volatile DisplaySurfaceMetadata* surface_metadata; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 605 | |
| 606 | // LateLatch support. |
| 607 | std::unique_ptr<android::dvr::LateLatch> late_latch; |
| 608 | |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 609 | private: |
| 610 | // ANativeWindow function implementations |
| 611 | std::mutex lock_; |
| 612 | int Post(android::dvr::NativeBufferProducer* buffer, int fence_fd); |
| 613 | static int SetSwapInterval(ANativeWindow* window, int interval); |
| 614 | static int DequeueBuffer(ANativeWindow* window, ANativeWindowBuffer** buffer, |
| 615 | int* fence_fd); |
| 616 | static int QueueBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer, |
| 617 | int fence_fd); |
| 618 | static int CancelBuffer(ANativeWindow* window, ANativeWindowBuffer* buffer, |
| 619 | int fence_fd); |
| 620 | static int Query(const ANativeWindow* window, int what, int* value); |
| 621 | static int Perform(ANativeWindow* window, int operation, ...); |
| 622 | static int DequeueBuffer_DEPRECATED(ANativeWindow* window, |
| 623 | ANativeWindowBuffer** buffer); |
| 624 | static int CancelBuffer_DEPRECATED(ANativeWindow* window, |
| 625 | ANativeWindowBuffer* buffer); |
| 626 | static int QueueBuffer_DEPRECATED(ANativeWindow* window, |
| 627 | ANativeWindowBuffer* buffer); |
| 628 | static int LockBuffer_DEPRECATED(ANativeWindow* window, |
| 629 | ANativeWindowBuffer* buffer); |
| 630 | |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 631 | DvrGraphicsContext(const DvrGraphicsContext&) = delete; |
| 632 | void operator=(const DvrGraphicsContext&) = delete; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 633 | }; |
| 634 | |
| 635 | DvrGraphicsContext::DvrGraphicsContext() |
| 636 | : graphics_api(DVR_GRAPHICS_API_GLES), |
| 637 | gl{}, |
| 638 | vk{}, |
| 639 | current_buffer(nullptr), |
| 640 | buffer_already_posted(false), |
| 641 | surface_metadata(nullptr) { |
| 642 | gl.egl_display = EGL_NO_DISPLAY; |
| 643 | gl.egl_context = EGL_NO_CONTEXT; |
| 644 | gl.owns_egl_context = true; |
| 645 | gl.texture_target_type = GL_TEXTURE_2D; |
| 646 | |
| 647 | ANativeWindow::setSwapInterval = SetSwapInterval; |
| 648 | ANativeWindow::dequeueBuffer = DequeueBuffer; |
| 649 | ANativeWindow::cancelBuffer = CancelBuffer; |
| 650 | ANativeWindow::queueBuffer = QueueBuffer; |
| 651 | ANativeWindow::query = Query; |
| 652 | ANativeWindow::perform = Perform; |
| 653 | |
| 654 | ANativeWindow::dequeueBuffer_DEPRECATED = DequeueBuffer_DEPRECATED; |
| 655 | ANativeWindow::cancelBuffer_DEPRECATED = CancelBuffer_DEPRECATED; |
| 656 | ANativeWindow::lockBuffer_DEPRECATED = LockBuffer_DEPRECATED; |
| 657 | ANativeWindow::queueBuffer_DEPRECATED = QueueBuffer_DEPRECATED; |
| 658 | } |
| 659 | |
| 660 | DvrGraphicsContext::~DvrGraphicsContext() { |
| 661 | if (graphics_api == DVR_GRAPHICS_API_GLES) { |
| 662 | glDeleteTextures(gl.texture_count, gl.texture_id); |
| 663 | if (gl.owns_egl_context) |
| 664 | DestroyEglContext(gl.egl_display, &gl.egl_context); |
| 665 | } else if (graphics_api == DVR_GRAPHICS_API_VULKAN) { |
| 666 | if (vk.swapchain != VK_NULL_HANDLE) { |
| 667 | for (auto view : vk.swapchain_image_views) { |
| 668 | vkDestroyImageView(vk.device, view, vk.allocation_callbacks); |
| 669 | } |
| 670 | vkDestroySwapchainKHR(vk.device, vk.swapchain, vk.allocation_callbacks); |
| 671 | vkDestroySurfaceKHR(vk.instance, vk.surface, vk.allocation_callbacks); |
| 672 | delete vk.window; |
| 673 | } |
| 674 | } |
| 675 | } |
| 676 | |
| 677 | int dvrGraphicsContextCreate(struct DvrSurfaceParameter* parameters, |
| 678 | DvrGraphicsContext** return_graphics_context) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 679 | auto context = std::make_unique<DvrGraphicsContext>(); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 680 | |
| 681 | // See whether we're using GL or Vulkan |
| 682 | for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) { |
| 683 | switch (p->key) { |
| 684 | case DVR_SURFACE_PARAMETER_GRAPHICS_API_IN: |
| 685 | context->graphics_api = p->value; |
| 686 | break; |
| 687 | } |
| 688 | } |
| 689 | |
| 690 | if (context->graphics_api == DVR_GRAPHICS_API_GLES) { |
| 691 | context->gl.egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY); |
| 692 | if (context->gl.egl_display == EGL_NO_DISPLAY) { |
| 693 | ALOGE("eglGetDisplay failed"); |
| 694 | return -ENXIO; |
| 695 | } |
| 696 | |
| 697 | // See if we should create a GL context |
| 698 | for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) { |
| 699 | switch (p->key) { |
| 700 | case DVR_SURFACE_PARAMETER_CREATE_GL_CONTEXT_IN: |
| 701 | context->gl.owns_egl_context = p->value != 0; |
| 702 | break; |
| 703 | } |
| 704 | } |
| 705 | |
| 706 | if (context->gl.owns_egl_context) { |
| 707 | int ret = CreateEglContext(context->gl.egl_display, parameters, |
| 708 | &context->gl.egl_context); |
| 709 | if (ret < 0) |
| 710 | return ret; |
| 711 | } else { |
| 712 | context->gl.egl_context = eglGetCurrentContext(); |
| 713 | } |
| 714 | |
| 715 | int ret = InitGl(context->gl.owns_egl_context); |
| 716 | if (ret < 0) |
| 717 | return ret; |
| 718 | } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) { |
| 719 | for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) { |
| 720 | switch (p->key) { |
| 721 | case DVR_SURFACE_PARAMETER_VK_INSTANCE_IN: |
| 722 | context->vk.instance = reinterpret_cast<VkInstance>(p->value); |
| 723 | break; |
| 724 | case DVR_SURFACE_PARAMETER_VK_PHYSICAL_DEVICE_IN: |
| 725 | context->vk.physical_device = |
| 726 | reinterpret_cast<VkPhysicalDevice>(p->value); |
| 727 | break; |
| 728 | case DVR_SURFACE_PARAMETER_VK_DEVICE_IN: |
| 729 | context->vk.device = reinterpret_cast<VkDevice>(p->value); |
| 730 | break; |
| 731 | case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_IN: |
| 732 | context->vk.present_queue = reinterpret_cast<VkQueue>(p->value); |
| 733 | break; |
| 734 | case DVR_SURFACE_PARAMETER_VK_PRESENT_QUEUE_FAMILY_IN: |
| 735 | context->vk.present_queue_family = static_cast<uint32_t>(p->value); |
| 736 | break; |
| 737 | } |
| 738 | } |
| 739 | } else { |
| 740 | ALOGE("Error: invalid graphics API type"); |
| 741 | return -EINVAL; |
| 742 | } |
| 743 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 744 | auto surface_status = CreateApplicationSurface(parameters); |
| 745 | if (!surface_status) { |
| 746 | ALOGE("dvrGraphicsContextCreate: Failed to create surface: %s", |
| 747 | surface_status.GetErrorMessage().c_str()); |
| 748 | return -surface_status.error(); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 749 | } |
| 750 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 751 | auto surface_result = surface_status.take(); |
| 752 | |
| 753 | context->display_surface = surface_result.surface; |
| 754 | context->display_metrics = surface_result.metrics; |
| 755 | context->width = surface_result.width; |
| 756 | context->height = surface_result.height; |
| 757 | context->format = surface_result.format; |
| 758 | |
| 759 | // Create an empty queue. NativeBufferQueue allocates the buffers for this |
| 760 | // queue. |
| 761 | auto queue_status = context->display_surface->CreateQueue(); |
| 762 | if (!queue_status) { |
| 763 | ALOGE("dvrGraphicsContextCreate: Failed to create queue: %s", |
| 764 | queue_status.GetErrorMessage().c_str()); |
| 765 | return -queue_status.error(); |
| 766 | } |
| 767 | |
| 768 | context->buffer_queue.reset(new NativeBufferQueue( |
| 769 | context->gl.egl_display, queue_status.take(), surface_result.width, |
| 770 | surface_result.height, surface_result.format, surface_result.usage, |
| 771 | surface_result.capacity)); |
| 772 | |
| 773 | // Create the metadata buffer. |
| 774 | auto metadata_status = CreateMetadataBuffer(context->display_surface, |
| 775 | surface_result.direct_surface); |
| 776 | if (!metadata_status) { |
| 777 | ALOGE("dvrGraphicsContextCreate: Failed to create metadata buffer: %s", |
| 778 | metadata_status.GetErrorMessage().c_str()); |
| 779 | return -metadata_status.error(); |
| 780 | } |
| 781 | std::tie(context->metadata_queue, context->metadata_buffer, |
| 782 | context->surface_metadata) = metadata_status.take(); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 783 | |
| 784 | // The way the call sequence works we need 1 more than the buffer queue |
| 785 | // capacity to store data for all pending frames |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 786 | context->frame_history.Reset(context->buffer_queue->capacity() + 1); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 787 | |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 788 | context->vsync_client = VSyncClient::Create(); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 789 | if (!context->vsync_client) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 790 | ALOGE("dvrGraphicsContextCreate: failed to create vsync client"); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 791 | return -ECOMM; |
| 792 | } |
| 793 | |
| 794 | context->timerfd.Reset(timerfd_create(CLOCK_MONOTONIC, 0)); |
| 795 | if (!context->timerfd) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 796 | ALOGE("dvrGraphicsContextCreate: timerfd_create failed because: %s", |
| 797 | strerror(errno)); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 798 | return -EPERM; |
| 799 | } |
| 800 | |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 801 | if (context->graphics_api == DVR_GRAPHICS_API_GLES) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 802 | context->gl.texture_count = |
| 803 | (surface_result.geometry == DVR_SURFACE_GEOMETRY_SEPARATE_2) ? 2 : 1; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 804 | |
| 805 | // Create the GL textures. |
| 806 | glGenTextures(context->gl.texture_count, context->gl.texture_id); |
| 807 | |
| 808 | // We must make sure that we have at least one buffer allocated at this time |
| 809 | // so that anyone who tries to bind an FBO to context->texture_id |
| 810 | // will not get an incomplete buffer. |
| 811 | context->current_buffer = context->buffer_queue->Dequeue(); |
Hendrik Wagenaar | 4d3590f | 2017-05-06 22:36:04 -0700 | [diff] [blame] | 812 | LOG_ALWAYS_FATAL_IF(context->gl.texture_count != 1); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 813 | for (int i = 0; i < context->gl.texture_count; ++i) { |
| 814 | glBindTexture(context->gl.texture_target_type, context->gl.texture_id[i]); |
| 815 | glEGLImageTargetTexture2DOES(context->gl.texture_target_type, |
| 816 | context->current_buffer->image_khr(i)); |
| 817 | } |
| 818 | glBindTexture(context->gl.texture_target_type, 0); |
| 819 | CHECK_GL(); |
| 820 | |
| 821 | bool is_late_latch = false; |
| 822 | |
| 823 | // Pass back the texture target type and id. |
| 824 | for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) { |
| 825 | switch (p->key) { |
| 826 | case DVR_SURFACE_PARAMETER_ENABLE_LATE_LATCH_IN: |
| 827 | is_late_latch = !!p->value; |
| 828 | break; |
| 829 | case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_TYPE_OUT: |
| 830 | *static_cast<GLenum*>(p->value_out) = context->gl.texture_target_type; |
| 831 | break; |
| 832 | case DVR_SURFACE_PARAMETER_SURFACE_TEXTURE_TARGET_ID_OUT: |
| 833 | for (int i = 0; i < context->gl.texture_count; ++i) { |
| 834 | *(static_cast<GLuint*>(p->value_out) + i) = |
| 835 | context->gl.texture_id[i]; |
| 836 | } |
| 837 | break; |
| 838 | } |
| 839 | } |
| 840 | |
| 841 | // Initialize late latch. |
| 842 | if (is_late_latch) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 843 | LocalHandle fd = context->metadata_buffer->GetBlobFd(); |
| 844 | context->late_latch.reset( |
| 845 | new android::dvr::LateLatch(true, std::move(fd))); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 846 | } |
| 847 | } else if (context->graphics_api == DVR_GRAPHICS_API_VULKAN) { |
| 848 | VkResult result = VK_SUCCESS; |
| 849 | // Create a VkSurfaceKHR from the ANativeWindow. |
| 850 | VkAndroidSurfaceCreateInfoKHR android_surface_ci = {}; |
| 851 | android_surface_ci.sType = |
| 852 | VK_STRUCTURE_TYPE_ANDROID_SURFACE_CREATE_INFO_KHR; |
| 853 | android_surface_ci.window = context.get(); |
| 854 | result = vkCreateAndroidSurfaceKHR( |
| 855 | context->vk.instance, &android_surface_ci, |
| 856 | context->vk.allocation_callbacks, &context->vk.surface); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 857 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 858 | VkBool32 surface_supports_present = VK_FALSE; |
| 859 | result = vkGetPhysicalDeviceSurfaceSupportKHR( |
| 860 | context->vk.physical_device, context->vk.present_queue_family, |
| 861 | context->vk.surface, &surface_supports_present); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 862 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 863 | if (!surface_supports_present) { |
| 864 | ALOGE("Error: provided queue family (%u) does not support presentation", |
| 865 | context->vk.present_queue_family); |
| 866 | return -EPERM; |
| 867 | } |
| 868 | VkSurfaceCapabilitiesKHR surface_capabilities = {}; |
| 869 | result = vkGetPhysicalDeviceSurfaceCapabilitiesKHR( |
| 870 | context->vk.physical_device, context->vk.surface, |
| 871 | &surface_capabilities); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 872 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 873 | // Determine the swapchain image format. |
| 874 | uint32_t device_surface_format_count = 0; |
| 875 | result = vkGetPhysicalDeviceSurfaceFormatsKHR( |
| 876 | context->vk.physical_device, context->vk.surface, |
| 877 | &device_surface_format_count, nullptr); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 878 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 879 | std::vector<VkSurfaceFormatKHR> device_surface_formats( |
| 880 | device_surface_format_count); |
| 881 | result = vkGetPhysicalDeviceSurfaceFormatsKHR( |
| 882 | context->vk.physical_device, context->vk.surface, |
| 883 | &device_surface_format_count, device_surface_formats.data()); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 884 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
| 885 | LOG_ALWAYS_FATAL_IF(device_surface_format_count == 0U); |
| 886 | LOG_ALWAYS_FATAL_IF(device_surface_formats[0].format == |
| 887 | VK_FORMAT_UNDEFINED); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 888 | VkSurfaceFormatKHR present_surface_format = device_surface_formats[0]; |
| 889 | // Determine the swapchain present mode. |
| 890 | // TODO(cort): query device_present_modes to make sure MAILBOX is supported. |
| 891 | // But according to libvulkan, it is. |
| 892 | uint32_t device_present_mode_count = 0; |
| 893 | result = vkGetPhysicalDeviceSurfacePresentModesKHR( |
| 894 | context->vk.physical_device, context->vk.surface, |
| 895 | &device_present_mode_count, nullptr); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 896 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 897 | std::vector<VkPresentModeKHR> device_present_modes( |
| 898 | device_present_mode_count); |
| 899 | result = vkGetPhysicalDeviceSurfacePresentModesKHR( |
| 900 | context->vk.physical_device, context->vk.surface, |
| 901 | &device_present_mode_count, device_present_modes.data()); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 902 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 903 | VkPresentModeKHR present_mode = VK_PRESENT_MODE_MAILBOX_KHR; |
| 904 | // Extract presentation surface extents, image count, transform, usages, |
| 905 | // etc. |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 906 | LOG_ALWAYS_FATAL_IF( |
| 907 | static_cast<int>(surface_capabilities.currentExtent.width) == -1 || |
| 908 | static_cast<int>(surface_capabilities.currentExtent.height) == -1); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 909 | VkExtent2D swapchain_extent = surface_capabilities.currentExtent; |
| 910 | |
| 911 | uint32_t desired_image_count = surface_capabilities.minImageCount; |
| 912 | if (surface_capabilities.maxImageCount > 0 && |
| 913 | desired_image_count > surface_capabilities.maxImageCount) { |
| 914 | desired_image_count = surface_capabilities.maxImageCount; |
| 915 | } |
| 916 | VkSurfaceTransformFlagBitsKHR surface_transform = |
| 917 | surface_capabilities.currentTransform; |
| 918 | VkImageUsageFlags image_usage_flags = |
| 919 | surface_capabilities.supportedUsageFlags; |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 920 | LOG_ALWAYS_FATAL_IF(surface_capabilities.supportedCompositeAlpha == |
| 921 | static_cast<VkFlags>(0)); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 922 | VkCompositeAlphaFlagBitsKHR composite_alpha = |
| 923 | VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR; |
| 924 | if (!(surface_capabilities.supportedCompositeAlpha & |
| 925 | VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) { |
| 926 | composite_alpha = VkCompositeAlphaFlagBitsKHR( |
| 927 | static_cast<int>(surface_capabilities.supportedCompositeAlpha) & |
| 928 | -static_cast<int>(surface_capabilities.supportedCompositeAlpha)); |
| 929 | } |
| 930 | // Create VkSwapchainKHR |
| 931 | VkSwapchainCreateInfoKHR swapchain_ci = {}; |
| 932 | swapchain_ci.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR; |
| 933 | swapchain_ci.pNext = nullptr; |
| 934 | swapchain_ci.surface = context->vk.surface; |
| 935 | swapchain_ci.minImageCount = desired_image_count; |
| 936 | swapchain_ci.imageFormat = present_surface_format.format; |
| 937 | swapchain_ci.imageColorSpace = present_surface_format.colorSpace; |
| 938 | swapchain_ci.imageExtent.width = swapchain_extent.width; |
| 939 | swapchain_ci.imageExtent.height = swapchain_extent.height; |
| 940 | swapchain_ci.imageUsage = image_usage_flags; |
| 941 | swapchain_ci.preTransform = surface_transform; |
| 942 | swapchain_ci.compositeAlpha = composite_alpha; |
| 943 | swapchain_ci.imageArrayLayers = 1; |
| 944 | swapchain_ci.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; |
| 945 | swapchain_ci.queueFamilyIndexCount = 0; |
| 946 | swapchain_ci.pQueueFamilyIndices = nullptr; |
| 947 | swapchain_ci.presentMode = present_mode; |
| 948 | swapchain_ci.clipped = VK_TRUE; |
| 949 | swapchain_ci.oldSwapchain = VK_NULL_HANDLE; |
| 950 | result = vkCreateSwapchainKHR(context->vk.device, &swapchain_ci, |
| 951 | context->vk.allocation_callbacks, |
| 952 | &context->vk.swapchain); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 953 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 954 | // Create swapchain image views |
| 955 | uint32_t image_count = 0; |
| 956 | result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain, |
| 957 | &image_count, nullptr); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 958 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
| 959 | LOG_ALWAYS_FATAL_IF(image_count == 0U); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 960 | context->vk.swapchain_images.resize(image_count); |
| 961 | result = vkGetSwapchainImagesKHR(context->vk.device, context->vk.swapchain, |
| 962 | &image_count, |
| 963 | context->vk.swapchain_images.data()); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 964 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 965 | context->vk.swapchain_image_views.resize(image_count); |
| 966 | VkImageViewCreateInfo image_view_ci = {}; |
| 967 | image_view_ci.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO; |
| 968 | image_view_ci.pNext = nullptr; |
| 969 | image_view_ci.flags = 0; |
| 970 | image_view_ci.format = swapchain_ci.imageFormat; |
| 971 | image_view_ci.components.r = VK_COMPONENT_SWIZZLE_IDENTITY; |
| 972 | image_view_ci.components.g = VK_COMPONENT_SWIZZLE_IDENTITY; |
| 973 | image_view_ci.components.b = VK_COMPONENT_SWIZZLE_IDENTITY; |
| 974 | image_view_ci.components.a = VK_COMPONENT_SWIZZLE_IDENTITY; |
| 975 | image_view_ci.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT; |
| 976 | image_view_ci.subresourceRange.baseMipLevel = 0; |
| 977 | image_view_ci.subresourceRange.levelCount = 1; |
| 978 | image_view_ci.subresourceRange.baseArrayLayer = 0; |
| 979 | image_view_ci.subresourceRange.layerCount = 1; |
| 980 | image_view_ci.viewType = VK_IMAGE_VIEW_TYPE_2D; |
| 981 | image_view_ci.image = VK_NULL_HANDLE; // filled in below |
| 982 | for (uint32_t i = 0; i < image_count; ++i) { |
| 983 | image_view_ci.image = context->vk.swapchain_images[i]; |
| 984 | result = vkCreateImageView(context->vk.device, &image_view_ci, |
| 985 | context->vk.allocation_callbacks, |
| 986 | &context->vk.swapchain_image_views[i]); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 987 | LOG_ALWAYS_FATAL_IF(result != VK_SUCCESS); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 988 | } |
| 989 | // Fill in any requested output parameters. |
| 990 | for (auto p = parameters; p && p->key != DVR_SURFACE_PARAMETER_NONE; ++p) { |
| 991 | switch (p->key) { |
| 992 | case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_COUNT_OUT: |
| 993 | *static_cast<uint32_t*>(p->value_out) = image_count; |
| 994 | break; |
| 995 | case DVR_SURFACE_PARAMETER_VK_SWAPCHAIN_IMAGE_FORMAT_OUT: |
| 996 | *static_cast<VkFormat*>(p->value_out) = swapchain_ci.imageFormat; |
| 997 | break; |
| 998 | } |
| 999 | } |
| 1000 | } |
| 1001 | |
| 1002 | *return_graphics_context = context.release(); |
| 1003 | return 0; |
| 1004 | } |
| 1005 | |
| 1006 | void dvrGraphicsContextDestroy(DvrGraphicsContext* graphics_context) { |
| 1007 | delete graphics_context; |
| 1008 | } |
| 1009 | |
| 1010 | // ANativeWindow function implementations. These should only be used |
| 1011 | // by the Vulkan path. |
| 1012 | int DvrGraphicsContext::Post(android::dvr::NativeBufferProducer* buffer, |
| 1013 | int fence_fd) { |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1014 | LOG_ALWAYS_FATAL_IF(graphics_api != DVR_GRAPHICS_API_VULKAN); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1015 | ATRACE_NAME(__PRETTY_FUNCTION__); |
| 1016 | ALOGI_IF(TRACE, "DvrGraphicsContext::Post: buffer_id=%d, fence_fd=%d", |
| 1017 | buffer->buffer()->id(), fence_fd); |
| 1018 | ALOGW_IF(!display_surface->visible(), |
| 1019 | "DvrGraphicsContext::Post: Posting buffer on invisible surface!!!"); |
| 1020 | // The NativeBufferProducer closes the fence fd, so dup it for tracking in the |
| 1021 | // frame history. |
| 1022 | frame_history.OnFrameSubmit(LocalHandle::AsDuplicate(fence_fd)); |
| 1023 | int result = buffer->Post(fence_fd, 0); |
| 1024 | return result; |
| 1025 | } |
| 1026 | |
| 1027 | int DvrGraphicsContext::SetSwapInterval(ANativeWindow* window, int interval) { |
| 1028 | ALOGI_IF(TRACE, "SetSwapInterval: window=%p interval=%d", window, interval); |
| 1029 | DvrGraphicsContext* self = getSelf(window); |
| 1030 | (void)self; |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1031 | LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1032 | return android::NO_ERROR; |
| 1033 | } |
| 1034 | |
| 1035 | int DvrGraphicsContext::DequeueBuffer(ANativeWindow* window, |
| 1036 | ANativeWindowBuffer** buffer, |
| 1037 | int* fence_fd) { |
| 1038 | ATRACE_NAME(__PRETTY_FUNCTION__); |
| 1039 | |
| 1040 | DvrGraphicsContext* self = getSelf(window); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1041 | LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1042 | std::lock_guard<std::mutex> autolock(self->lock_); |
| 1043 | |
| 1044 | if (!self->current_buffer) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1045 | self->current_buffer = self->buffer_queue->Dequeue(); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1046 | } |
| 1047 | ATRACE_ASYNC_BEGIN("BufferDraw", self->current_buffer->buffer()->id()); |
| 1048 | *fence_fd = self->current_buffer->ClaimReleaseFence().Release(); |
| 1049 | *buffer = self->current_buffer; |
| 1050 | |
| 1051 | ALOGI_IF(TRACE, "DvrGraphicsContext::DequeueBuffer: fence_fd=%d", *fence_fd); |
| 1052 | return android::NO_ERROR; |
| 1053 | } |
| 1054 | |
| 1055 | int DvrGraphicsContext::QueueBuffer(ANativeWindow* window, |
| 1056 | ANativeWindowBuffer* buffer, int fence_fd) { |
| 1057 | ATRACE_NAME("NativeWindow::QueueBuffer"); |
| 1058 | ALOGI_IF(TRACE, "NativeWindow::QueueBuffer: fence_fd=%d", fence_fd); |
| 1059 | |
| 1060 | DvrGraphicsContext* self = getSelf(window); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1061 | LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1062 | std::lock_guard<std::mutex> autolock(self->lock_); |
| 1063 | |
| 1064 | android::dvr::NativeBufferProducer* native_buffer = |
| 1065 | static_cast<android::dvr::NativeBufferProducer*>(buffer); |
| 1066 | ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id()); |
| 1067 | bool do_post = true; |
| 1068 | if (self->buffer_already_posted) { |
| 1069 | // Check that the buffer is the one we expect, but handle it if this happens |
| 1070 | // in production by allowing this buffer to post on top of the previous one. |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1071 | LOG_FATAL_IF(native_buffer != self->current_buffer); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1072 | if (native_buffer == self->current_buffer) { |
| 1073 | do_post = false; |
| 1074 | if (fence_fd >= 0) |
| 1075 | close(fence_fd); |
| 1076 | } |
| 1077 | } |
| 1078 | if (do_post) { |
| 1079 | ATRACE_ASYNC_BEGIN("BufferPost", native_buffer->buffer()->id()); |
| 1080 | self->Post(native_buffer, fence_fd); |
| 1081 | } |
| 1082 | self->buffer_already_posted = false; |
| 1083 | self->current_buffer = nullptr; |
| 1084 | |
| 1085 | return android::NO_ERROR; |
| 1086 | } |
| 1087 | |
| 1088 | int DvrGraphicsContext::CancelBuffer(ANativeWindow* window, |
| 1089 | ANativeWindowBuffer* buffer, |
| 1090 | int fence_fd) { |
| 1091 | ATRACE_NAME("DvrGraphicsContext::CancelBuffer"); |
| 1092 | ALOGI_IF(TRACE, "DvrGraphicsContext::CancelBuffer: fence_fd: %d", fence_fd); |
| 1093 | |
| 1094 | DvrGraphicsContext* self = getSelf(window); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1095 | LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1096 | std::lock_guard<std::mutex> autolock(self->lock_); |
| 1097 | |
| 1098 | android::dvr::NativeBufferProducer* native_buffer = |
| 1099 | static_cast<android::dvr::NativeBufferProducer*>(buffer); |
| 1100 | ATRACE_ASYNC_END("BufferDraw", native_buffer->buffer()->id()); |
| 1101 | ATRACE_INT("CancelBuffer", native_buffer->buffer()->id()); |
| 1102 | bool do_enqueue = true; |
| 1103 | if (self->buffer_already_posted) { |
| 1104 | // Check that the buffer is the one we expect, but handle it if this happens |
| 1105 | // in production by returning this buffer to the buffer queue. |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1106 | LOG_FATAL_IF(native_buffer != self->current_buffer); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1107 | if (native_buffer == self->current_buffer) { |
| 1108 | do_enqueue = false; |
| 1109 | } |
| 1110 | } |
| 1111 | if (do_enqueue) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1112 | self->buffer_queue->Enqueue(native_buffer); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1113 | } |
| 1114 | if (fence_fd >= 0) |
| 1115 | close(fence_fd); |
| 1116 | self->buffer_already_posted = false; |
| 1117 | self->current_buffer = nullptr; |
| 1118 | |
| 1119 | return android::NO_ERROR; |
| 1120 | } |
| 1121 | |
| 1122 | int DvrGraphicsContext::Query(const ANativeWindow* window, int what, |
| 1123 | int* value) { |
| 1124 | DvrGraphicsContext* self = getSelf(const_cast<ANativeWindow*>(window)); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1125 | LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1126 | std::lock_guard<std::mutex> autolock(self->lock_); |
| 1127 | |
| 1128 | switch (what) { |
| 1129 | case NATIVE_WINDOW_WIDTH: |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1130 | *value = self->width; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1131 | return android::NO_ERROR; |
| 1132 | case NATIVE_WINDOW_HEIGHT: |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1133 | *value = self->height; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1134 | return android::NO_ERROR; |
| 1135 | case NATIVE_WINDOW_FORMAT: |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1136 | *value = self->format; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1137 | return android::NO_ERROR; |
| 1138 | case NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS: |
| 1139 | *value = 1; |
| 1140 | return android::NO_ERROR; |
| 1141 | case NATIVE_WINDOW_CONCRETE_TYPE: |
| 1142 | *value = NATIVE_WINDOW_SURFACE; |
| 1143 | return android::NO_ERROR; |
| 1144 | case NATIVE_WINDOW_QUEUES_TO_WINDOW_COMPOSER: |
| 1145 | *value = 1; |
| 1146 | return android::NO_ERROR; |
| 1147 | case NATIVE_WINDOW_DEFAULT_WIDTH: |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1148 | *value = self->width; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1149 | return android::NO_ERROR; |
| 1150 | case NATIVE_WINDOW_DEFAULT_HEIGHT: |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1151 | *value = self->height; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1152 | return android::NO_ERROR; |
| 1153 | case NATIVE_WINDOW_TRANSFORM_HINT: |
| 1154 | *value = 0; |
| 1155 | return android::NO_ERROR; |
| 1156 | } |
| 1157 | |
| 1158 | *value = 0; |
| 1159 | return android::BAD_VALUE; |
| 1160 | } |
| 1161 | |
| 1162 | int DvrGraphicsContext::Perform(ANativeWindow* window, int operation, ...) { |
| 1163 | DvrGraphicsContext* self = getSelf(window); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1164 | LOG_ALWAYS_FATAL_IF(self->graphics_api != DVR_GRAPHICS_API_VULKAN); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1165 | std::lock_guard<std::mutex> autolock(self->lock_); |
| 1166 | |
| 1167 | va_list args; |
| 1168 | va_start(args, operation); |
| 1169 | |
| 1170 | // TODO(eieio): The following operations are not used at this time. They are |
| 1171 | // included here to help document which operations may be useful and what |
| 1172 | // parameters they take. |
| 1173 | switch (operation) { |
| 1174 | case NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: { |
| 1175 | int w = va_arg(args, int); |
| 1176 | int h = va_arg(args, int); |
| 1177 | ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS: w=%d h=%d", w, h); |
| 1178 | return android::NO_ERROR; |
| 1179 | } |
| 1180 | |
| 1181 | case NATIVE_WINDOW_SET_BUFFERS_FORMAT: { |
| 1182 | int format = va_arg(args, int); |
| 1183 | ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_FORMAT: format=%d", format); |
| 1184 | return android::NO_ERROR; |
| 1185 | } |
| 1186 | |
| 1187 | case NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: { |
| 1188 | int transform = va_arg(args, int); |
| 1189 | ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_TRANSFORM: transform=%d", |
| 1190 | transform); |
| 1191 | return android::NO_ERROR; |
| 1192 | } |
| 1193 | |
| 1194 | case NATIVE_WINDOW_SET_USAGE: { |
| 1195 | int usage = va_arg(args, int); |
| 1196 | ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_USAGE: usage=%d", usage); |
| 1197 | return android::NO_ERROR; |
| 1198 | } |
| 1199 | |
| 1200 | case NATIVE_WINDOW_CONNECT: |
| 1201 | case NATIVE_WINDOW_DISCONNECT: |
| 1202 | case NATIVE_WINDOW_SET_BUFFERS_GEOMETRY: |
| 1203 | case NATIVE_WINDOW_API_CONNECT: |
| 1204 | case NATIVE_WINDOW_API_DISCONNECT: |
| 1205 | // TODO(eieio): we should implement these |
| 1206 | return android::NO_ERROR; |
| 1207 | |
| 1208 | case NATIVE_WINDOW_SET_BUFFER_COUNT: { |
| 1209 | int buffer_count = va_arg(args, int); |
| 1210 | ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFER_COUNT: bufferCount=%d", |
| 1211 | buffer_count); |
| 1212 | return android::NO_ERROR; |
| 1213 | } |
| 1214 | case NATIVE_WINDOW_SET_BUFFERS_DATASPACE: { |
| 1215 | android_dataspace_t data_space = |
| 1216 | static_cast<android_dataspace_t>(va_arg(args, int)); |
| 1217 | ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_BUFFERS_DATASPACE: dataSpace=%d", |
| 1218 | data_space); |
| 1219 | return android::NO_ERROR; |
| 1220 | } |
| 1221 | case NATIVE_WINDOW_SET_SCALING_MODE: { |
| 1222 | int mode = va_arg(args, int); |
| 1223 | ALOGD_IF(TRACE, "NATIVE_WINDOW_SET_SCALING_MODE: mode=%d", mode); |
| 1224 | return android::NO_ERROR; |
| 1225 | } |
| 1226 | |
| 1227 | case NATIVE_WINDOW_LOCK: |
| 1228 | case NATIVE_WINDOW_UNLOCK_AND_POST: |
| 1229 | case NATIVE_WINDOW_SET_CROP: |
| 1230 | case NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP: |
| 1231 | return android::INVALID_OPERATION; |
| 1232 | } |
| 1233 | |
| 1234 | return android::NAME_NOT_FOUND; |
| 1235 | } |
| 1236 | |
| 1237 | int DvrGraphicsContext::DequeueBuffer_DEPRECATED(ANativeWindow* window, |
| 1238 | ANativeWindowBuffer** buffer) { |
| 1239 | int fence_fd = -1; |
| 1240 | int ret = DequeueBuffer(window, buffer, &fence_fd); |
| 1241 | |
| 1242 | // wait for fence |
| 1243 | if (ret == android::NO_ERROR && fence_fd != -1) |
| 1244 | close(fence_fd); |
| 1245 | |
| 1246 | return ret; |
| 1247 | } |
| 1248 | |
| 1249 | int DvrGraphicsContext::CancelBuffer_DEPRECATED(ANativeWindow* window, |
| 1250 | ANativeWindowBuffer* buffer) { |
| 1251 | return CancelBuffer(window, buffer, -1); |
| 1252 | } |
| 1253 | |
| 1254 | int DvrGraphicsContext::QueueBuffer_DEPRECATED(ANativeWindow* window, |
| 1255 | ANativeWindowBuffer* buffer) { |
| 1256 | return QueueBuffer(window, buffer, -1); |
| 1257 | } |
| 1258 | |
| 1259 | int DvrGraphicsContext::LockBuffer_DEPRECATED(ANativeWindow* /*window*/, |
| 1260 | ANativeWindowBuffer* /*buffer*/) { |
| 1261 | return android::NO_ERROR; |
| 1262 | } |
| 1263 | // End ANativeWindow implementation |
| 1264 | |
| 1265 | int dvrSetEdsPose(DvrGraphicsContext* graphics_context, |
| 1266 | float32x4_t render_pose_orientation, |
| 1267 | float32x4_t render_pose_translation) { |
| 1268 | ATRACE_NAME("dvrSetEdsPose"); |
| 1269 | if (!graphics_context->current_buffer) { |
| 1270 | ALOGE("dvrBeginRenderFrame must be called before dvrSetEdsPose"); |
| 1271 | return -EPERM; |
| 1272 | } |
| 1273 | |
| 1274 | // When late-latching is enabled, the pose buffer is written by the GPU, so |
| 1275 | // we don't touch it here. |
| 1276 | float32x4_t is_late_latch = DVR_POSE_LATE_LATCH; |
| 1277 | if (render_pose_orientation[0] != is_late_latch[0]) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1278 | volatile DisplaySurfaceMetadata* data = graphics_context->surface_metadata; |
Hendrik Wagenaar | 4d3590f | 2017-05-06 22:36:04 -0700 | [diff] [blame] | 1279 | uint32_t buffer_index = 0; |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1280 | ALOGE_IF(TRACE, "write pose index %d %f %f", buffer_index, |
| 1281 | render_pose_orientation[0], render_pose_orientation[1]); |
| 1282 | data->orientation[buffer_index] = render_pose_orientation; |
| 1283 | data->translation[buffer_index] = render_pose_translation; |
| 1284 | } |
| 1285 | |
| 1286 | return 0; |
| 1287 | } |
| 1288 | |
| 1289 | int dvrBeginRenderFrameEds(DvrGraphicsContext* graphics_context, |
| 1290 | float32x4_t render_pose_orientation, |
| 1291 | float32x4_t render_pose_translation) { |
| 1292 | ATRACE_NAME("dvrBeginRenderFrameEds"); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1293 | LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1294 | CHECK_GL(); |
| 1295 | // Grab a buffer from the queue and set its pose. |
| 1296 | if (!graphics_context->current_buffer) { |
| 1297 | graphics_context->current_buffer = |
| 1298 | graphics_context->buffer_queue->Dequeue(); |
| 1299 | } |
| 1300 | |
| 1301 | int ret = dvrSetEdsPose(graphics_context, render_pose_orientation, |
| 1302 | render_pose_translation); |
| 1303 | if (ret < 0) |
| 1304 | return ret; |
| 1305 | |
| 1306 | ATRACE_ASYNC_BEGIN("BufferDraw", |
| 1307 | graphics_context->current_buffer->buffer()->id()); |
| 1308 | |
| 1309 | { |
| 1310 | ATRACE_NAME("glEGLImageTargetTexture2DOES"); |
| 1311 | // Bind the texture to the latest buffer in the queue. |
| 1312 | for (int i = 0; i < graphics_context->gl.texture_count; ++i) { |
| 1313 | glBindTexture(graphics_context->gl.texture_target_type, |
| 1314 | graphics_context->gl.texture_id[i]); |
| 1315 | glEGLImageTargetTexture2DOES( |
| 1316 | graphics_context->gl.texture_target_type, |
| 1317 | graphics_context->current_buffer->image_khr(i)); |
| 1318 | } |
| 1319 | glBindTexture(graphics_context->gl.texture_target_type, 0); |
| 1320 | } |
| 1321 | CHECK_GL(); |
| 1322 | return 0; |
| 1323 | } |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1324 | |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1325 | int dvrBeginRenderFrameEdsVk(DvrGraphicsContext* graphics_context, |
| 1326 | float32x4_t render_pose_orientation, |
| 1327 | float32x4_t render_pose_translation, |
| 1328 | VkSemaphore acquire_semaphore, |
| 1329 | VkFence acquire_fence, |
| 1330 | uint32_t* swapchain_image_index, |
| 1331 | VkImageView* swapchain_image_view) { |
| 1332 | ATRACE_NAME("dvrBeginRenderFrameEds"); |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1333 | LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != |
| 1334 | DVR_GRAPHICS_API_VULKAN); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1335 | |
| 1336 | // Acquire a swapchain image. This calls Dequeue() internally. |
| 1337 | VkResult result = vkAcquireNextImageKHR( |
| 1338 | graphics_context->vk.device, graphics_context->vk.swapchain, UINT64_MAX, |
| 1339 | acquire_semaphore, acquire_fence, swapchain_image_index); |
| 1340 | if (result != VK_SUCCESS) |
| 1341 | return -EINVAL; |
| 1342 | |
| 1343 | // Set the pose pose. |
| 1344 | int ret = dvrSetEdsPose(graphics_context, render_pose_orientation, |
| 1345 | render_pose_translation); |
| 1346 | if (ret < 0) |
| 1347 | return ret; |
| 1348 | *swapchain_image_view = |
| 1349 | graphics_context->vk.swapchain_image_views[*swapchain_image_index]; |
| 1350 | return 0; |
| 1351 | } |
| 1352 | |
| 1353 | int dvrBeginRenderFrame(DvrGraphicsContext* graphics_context) { |
| 1354 | return dvrBeginRenderFrameEds(graphics_context, DVR_POSE_NO_EDS, |
| 1355 | DVR_POSE_NO_EDS); |
| 1356 | } |
| 1357 | int dvrBeginRenderFrameVk(DvrGraphicsContext* graphics_context, |
| 1358 | VkSemaphore acquire_semaphore, VkFence acquire_fence, |
| 1359 | uint32_t* swapchain_image_index, |
| 1360 | VkImageView* swapchain_image_view) { |
| 1361 | return dvrBeginRenderFrameEdsVk( |
| 1362 | graphics_context, DVR_POSE_NO_EDS, DVR_POSE_NO_EDS, acquire_semaphore, |
| 1363 | acquire_fence, swapchain_image_index, swapchain_image_view); |
| 1364 | } |
| 1365 | |
| 1366 | int dvrBeginRenderFrameLateLatch(DvrGraphicsContext* graphics_context, |
| 1367 | uint32_t /*flags*/, |
| 1368 | uint32_t target_vsync_count, int num_views, |
| 1369 | const float** projection_matrices, |
| 1370 | const float** eye_from_head_matrices, |
| 1371 | const float** pose_offset_matrices, |
| 1372 | uint32_t* out_late_latch_buffer_id) { |
| 1373 | if (!graphics_context->late_latch) { |
| 1374 | return -EPERM; |
| 1375 | } |
| 1376 | if (num_views > DVR_GRAPHICS_SURFACE_MAX_VIEWS) { |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1377 | ALOGE("dvrBeginRenderFrameLateLatch called with too many views."); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1378 | return -EINVAL; |
| 1379 | } |
| 1380 | dvrBeginRenderFrameEds(graphics_context, DVR_POSE_LATE_LATCH, |
| 1381 | DVR_POSE_LATE_LATCH); |
| 1382 | auto& ll = graphics_context->late_latch; |
| 1383 | // TODO(jbates) Need to change this shader so that it dumps the single |
| 1384 | // captured pose for both eyes into the display surface metadata buffer at |
| 1385 | // the right index. |
| 1386 | android::dvr::LateLatchInput input; |
| 1387 | memset(&input, 0, sizeof(input)); |
| 1388 | for (int i = 0; i < num_views; ++i) { |
| 1389 | memcpy(input.proj_mat + i, *(projection_matrices + i), 16 * sizeof(float)); |
| 1390 | memcpy(input.eye_from_head_mat + i, *(eye_from_head_matrices + i), |
| 1391 | 16 * sizeof(float)); |
| 1392 | memcpy(input.pose_offset + i, *(pose_offset_matrices + i), |
| 1393 | 16 * sizeof(float)); |
| 1394 | } |
| 1395 | input.pose_index = |
| 1396 | target_vsync_count & android::dvr::kPoseAsyncBufferIndexMask; |
| 1397 | input.render_pose_index = |
| 1398 | graphics_context->current_buffer->surface_buffer_index(); |
| 1399 | ll->AddLateLatch(input); |
| 1400 | *out_late_latch_buffer_id = ll->output_buffer_id(); |
| 1401 | return 0; |
| 1402 | } |
| 1403 | |
| 1404 | extern "C" int dvrGraphicsWaitNextFrame( |
| 1405 | DvrGraphicsContext* graphics_context, int64_t start_delay_ns, |
| 1406 | DvrFrameSchedule* out_next_frame_schedule) { |
| 1407 | start_delay_ns = std::max(start_delay_ns, static_cast<int64_t>(0)); |
| 1408 | |
| 1409 | // We only do one-shot timers: |
| 1410 | int64_t wake_time_ns = 0; |
| 1411 | |
| 1412 | uint32_t current_frame_vsync; |
| 1413 | int64_t current_frame_scheduled_finish_ns; |
| 1414 | int64_t vsync_period_ns; |
| 1415 | |
| 1416 | int fetch_schedule_result = graphics_context->vsync_client->GetSchedInfo( |
| 1417 | &vsync_period_ns, ¤t_frame_scheduled_finish_ns, |
| 1418 | ¤t_frame_vsync); |
| 1419 | if (fetch_schedule_result == 0) { |
| 1420 | wake_time_ns = current_frame_scheduled_finish_ns + start_delay_ns; |
| 1421 | // If the last wakeup time is still in the future, use it instead to avoid |
| 1422 | // major schedule jumps when applications call WaitNextFrame with |
| 1423 | // aggressive offsets. |
| 1424 | int64_t now = android::dvr::GetSystemClockNs(); |
| 1425 | if (android::dvr::TimestampGT(wake_time_ns - vsync_period_ns, now)) { |
| 1426 | wake_time_ns -= vsync_period_ns; |
| 1427 | --current_frame_vsync; |
| 1428 | } |
| 1429 | // If the next wakeup time is in the past, add a vsync period to keep the |
| 1430 | // application on schedule. |
| 1431 | if (android::dvr::TimestampLT(wake_time_ns, now)) { |
| 1432 | wake_time_ns += vsync_period_ns; |
| 1433 | ++current_frame_vsync; |
| 1434 | } |
| 1435 | } else { |
| 1436 | ALOGE("Error getting frame schedule because: %s", |
| 1437 | strerror(-fetch_schedule_result)); |
| 1438 | // Sleep for a vsync period to avoid cascading failure. |
| 1439 | wake_time_ns = android::dvr::GetSystemClockNs() + |
| 1440 | graphics_context->display_metrics.vsync_period_ns; |
| 1441 | } |
| 1442 | |
| 1443 | // Adjust nsec to [0..999,999,999]. |
| 1444 | struct itimerspec wake_time; |
| 1445 | wake_time.it_interval.tv_sec = 0; |
| 1446 | wake_time.it_interval.tv_nsec = 0; |
| 1447 | wake_time.it_value = android::dvr::NsToTimespec(wake_time_ns); |
| 1448 | bool sleep_result = |
| 1449 | timerfd_settime(graphics_context->timerfd.Get(), TFD_TIMER_ABSTIME, |
| 1450 | &wake_time, nullptr) == 0; |
| 1451 | if (sleep_result) { |
| 1452 | ATRACE_NAME("sleep"); |
| 1453 | uint64_t expirations = 0; |
| 1454 | sleep_result = read(graphics_context->timerfd.Get(), &expirations, |
| 1455 | sizeof(uint64_t)) == sizeof(uint64_t); |
| 1456 | if (!sleep_result) { |
| 1457 | ALOGE("Error: timerfd read failed"); |
| 1458 | } |
| 1459 | } else { |
| 1460 | ALOGE("Error: timerfd_settime failed because: %s", strerror(errno)); |
| 1461 | } |
| 1462 | |
| 1463 | auto& frame_history = graphics_context->frame_history; |
| 1464 | frame_history.CheckForFinishedFrames(); |
| 1465 | if (fetch_schedule_result == 0) { |
| 1466 | uint32_t next_frame_vsync = |
| 1467 | current_frame_vsync + |
| 1468 | frame_history.PredictNextFrameVsyncInterval(vsync_period_ns); |
| 1469 | int64_t next_frame_scheduled_finish = |
| 1470 | (wake_time_ns - start_delay_ns) + vsync_period_ns; |
| 1471 | frame_history.OnFrameStart(next_frame_vsync, next_frame_scheduled_finish); |
| 1472 | if (out_next_frame_schedule) { |
| 1473 | out_next_frame_schedule->vsync_count = next_frame_vsync; |
| 1474 | out_next_frame_schedule->scheduled_frame_finish_ns = |
| 1475 | next_frame_scheduled_finish; |
| 1476 | } |
| 1477 | } else { |
| 1478 | frame_history.OnFrameStart(UINT32_MAX, -1); |
| 1479 | } |
| 1480 | |
| 1481 | return (fetch_schedule_result == 0 && sleep_result) ? 0 : -1; |
| 1482 | } |
| 1483 | |
| 1484 | extern "C" void dvrGraphicsPostEarly(DvrGraphicsContext* graphics_context) { |
| 1485 | ATRACE_NAME("dvrGraphicsPostEarly"); |
| 1486 | ALOGI_IF(TRACE, "dvrGraphicsPostEarly"); |
| 1487 | |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1488 | LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1489 | |
| 1490 | // Note that this function can be called before or after |
| 1491 | // dvrBeginRenderFrame. |
| 1492 | if (!graphics_context->buffer_already_posted) { |
| 1493 | graphics_context->buffer_already_posted = true; |
| 1494 | |
| 1495 | if (!graphics_context->current_buffer) { |
| 1496 | graphics_context->current_buffer = |
| 1497 | graphics_context->buffer_queue->Dequeue(); |
| 1498 | } |
| 1499 | |
| 1500 | auto buffer = graphics_context->current_buffer->buffer().get(); |
| 1501 | ATRACE_ASYNC_BEGIN("BufferPost", buffer->id()); |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1502 | int result = buffer->Post<void>(LocalHandle()); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1503 | if (result < 0) |
| 1504 | ALOGE("Buffer post failed: %d (%s)", result, strerror(-result)); |
| 1505 | } |
| 1506 | } |
| 1507 | |
| 1508 | int dvrPresent(DvrGraphicsContext* graphics_context) { |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1509 | LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != DVR_GRAPHICS_API_GLES); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1510 | |
| 1511 | std::array<char, 128> buf; |
| 1512 | snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|", |
| 1513 | graphics_context->frame_history.GetCurrentFrameVsync()); |
| 1514 | ATRACE_NAME(buf.data()); |
| 1515 | |
| 1516 | if (!graphics_context->current_buffer) { |
| 1517 | ALOGE("Error: dvrPresent called without dvrBeginRenderFrame"); |
| 1518 | return -EPERM; |
| 1519 | } |
| 1520 | |
| 1521 | LocalHandle fence_fd = |
| 1522 | android::dvr::CreateGLSyncAndFlush(graphics_context->gl.egl_display); |
| 1523 | |
| 1524 | ALOGI_IF(TRACE, "PostBuffer: buffer_id=%d, fence_fd=%d", |
| 1525 | graphics_context->current_buffer->buffer()->id(), fence_fd.Get()); |
| 1526 | ALOGW_IF(!graphics_context->display_surface->visible(), |
| 1527 | "PostBuffer: Posting buffer on invisible surface!!!"); |
| 1528 | |
| 1529 | auto buffer = graphics_context->current_buffer->buffer().get(); |
| 1530 | ATRACE_ASYNC_END("BufferDraw", buffer->id()); |
| 1531 | if (!graphics_context->buffer_already_posted) { |
| 1532 | ATRACE_ASYNC_BEGIN("BufferPost", buffer->id()); |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1533 | int result = buffer->Post<void>(fence_fd); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1534 | if (result < 0) |
| 1535 | ALOGE("Buffer post failed: %d (%s)", result, strerror(-result)); |
| 1536 | } |
| 1537 | |
| 1538 | graphics_context->frame_history.OnFrameSubmit(std::move(fence_fd)); |
| 1539 | graphics_context->buffer_already_posted = false; |
| 1540 | graphics_context->current_buffer = nullptr; |
| 1541 | return 0; |
| 1542 | } |
| 1543 | |
| 1544 | int dvrPresentVk(DvrGraphicsContext* graphics_context, |
| 1545 | VkSemaphore submit_semaphore, uint32_t swapchain_image_index) { |
Alex Vakulenko | 4fe6058 | 2017-02-02 11:35:59 -0800 | [diff] [blame] | 1546 | LOG_ALWAYS_FATAL_IF(graphics_context->graphics_api != |
| 1547 | DVR_GRAPHICS_API_VULKAN); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1548 | |
| 1549 | std::array<char, 128> buf; |
| 1550 | snprintf(buf.data(), buf.size(), "dvrPresent|vsync=%d|", |
| 1551 | graphics_context->frame_history.GetCurrentFrameVsync()); |
| 1552 | ATRACE_NAME(buf.data()); |
| 1553 | |
| 1554 | if (!graphics_context->current_buffer) { |
| 1555 | ALOGE("Error: dvrPresentVk called without dvrBeginRenderFrameVk"); |
| 1556 | return -EPERM; |
| 1557 | } |
| 1558 | |
| 1559 | // Present the specified image. Internally, this gets a fence from the |
| 1560 | // Vulkan driver and passes it to DvrGraphicsContext::Post(), |
| 1561 | // which in turn passes it to buffer->Post() and adds it to frame_history. |
| 1562 | VkPresentInfoKHR present_info = {}; |
| 1563 | present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR; |
| 1564 | present_info.swapchainCount = 1; |
| 1565 | present_info.pSwapchains = &graphics_context->vk.swapchain; |
| 1566 | present_info.pImageIndices = &swapchain_image_index; |
| 1567 | present_info.waitSemaphoreCount = |
| 1568 | (submit_semaphore != VK_NULL_HANDLE) ? 1 : 0; |
| 1569 | present_info.pWaitSemaphores = &submit_semaphore; |
| 1570 | VkResult result = |
| 1571 | vkQueuePresentKHR(graphics_context->vk.present_queue, &present_info); |
| 1572 | if (result != VK_SUCCESS) { |
| 1573 | return -EINVAL; |
| 1574 | } |
| 1575 | |
| 1576 | return 0; |
| 1577 | } |
| 1578 | |
| 1579 | extern "C" int dvrGetFrameScheduleResults(DvrGraphicsContext* context, |
| 1580 | DvrFrameScheduleResult* results, |
| 1581 | int in_result_count) { |
| 1582 | if (!context || !results) |
| 1583 | return -EINVAL; |
| 1584 | |
| 1585 | return context->frame_history.GetPreviousFrameResults(results, |
| 1586 | in_result_count); |
| 1587 | } |
| 1588 | |
| 1589 | extern "C" void dvrGraphicsSurfaceSetVisible( |
| 1590 | DvrGraphicsContext* graphics_context, int visible) { |
| 1591 | graphics_context->display_surface->SetVisible(visible); |
| 1592 | } |
| 1593 | |
| 1594 | extern "C" int dvrGraphicsSurfaceGetVisible( |
| 1595 | DvrGraphicsContext* graphics_context) { |
Corey Tabaka | 2251d82 | 2017-04-20 16:04:07 -0700 | [diff] [blame] | 1596 | return !!graphics_context->display_surface->visible(); |
Alex Vakulenko | e4eec20 | 2017-01-27 14:41:04 -0800 | [diff] [blame] | 1597 | } |
| 1598 | |
| 1599 | extern "C" void dvrGraphicsSurfaceSetZOrder( |
| 1600 | DvrGraphicsContext* graphics_context, int z_order) { |
| 1601 | graphics_context->display_surface->SetZOrder(z_order); |
| 1602 | } |
| 1603 | |
| 1604 | extern "C" int dvrGraphicsSurfaceGetZOrder( |
| 1605 | DvrGraphicsContext* graphics_context) { |
| 1606 | return graphics_context->display_surface->z_order(); |
| 1607 | } |