Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2016 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "OpenGLPipeline.h" |
| 18 | |
| 19 | #include "DeferredLayerUpdater.h" |
| 20 | #include "EglManager.h" |
Greg Daniel | cd55852 | 2016-11-17 13:31:40 -0500 | [diff] [blame] | 21 | #include "Frame.h" |
Greg Daniel | 8cd3edf | 2017-01-09 14:15:41 -0500 | [diff] [blame] | 22 | #include "GlLayer.h" |
Matt Sarett | de97307 | 2016-10-25 11:07:40 -0400 | [diff] [blame] | 23 | #include "ProfileRenderer.h" |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 24 | #include "renderstate/RenderState.h" |
Derek Sollenberger | c4fbada | 2016-11-07 16:05:41 -0500 | [diff] [blame] | 25 | #include "OpenGLReadback.h" |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 26 | |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 27 | #include <cutils/properties.h> |
| 28 | #include <strings.h> |
| 29 | |
| 30 | namespace android { |
| 31 | namespace uirenderer { |
| 32 | namespace renderthread { |
| 33 | |
| 34 | OpenGLPipeline::OpenGLPipeline(RenderThread& thread) |
Derek Sollenberger | b7d34b6 | 2016-11-04 10:46:18 -0400 | [diff] [blame] | 35 | : mEglManager(thread.eglManager()) |
| 36 | , mRenderThread(thread) { |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 37 | } |
| 38 | |
| 39 | MakeCurrentResult OpenGLPipeline::makeCurrent() { |
| 40 | // TODO: Figure out why this workaround is needed, see b/13913604 |
| 41 | // In the meantime this matches the behavior of GLRenderer, so it is not a regression |
| 42 | EGLint error = 0; |
| 43 | bool haveNewSurface = mEglManager.makeCurrent(mEglSurface, &error); |
| 44 | |
| 45 | Caches::getInstance().textureCache.resetMarkInUse(this); |
| 46 | if (!haveNewSurface) { |
| 47 | return MakeCurrentResult::AlreadyCurrent; |
| 48 | } |
| 49 | return error ? MakeCurrentResult::Failed : MakeCurrentResult::Succeeded; |
| 50 | } |
| 51 | |
| 52 | Frame OpenGLPipeline::getFrame() { |
| 53 | LOG_ALWAYS_FATAL_IF(mEglSurface == EGL_NO_SURFACE, |
| 54 | "drawRenderNode called on a context with no surface!"); |
| 55 | return mEglManager.beginFrame(mEglSurface); |
| 56 | } |
| 57 | |
| 58 | bool OpenGLPipeline::draw(const Frame& frame, const SkRect& screenDirty, const SkRect& dirty, |
| 59 | const FrameBuilder::LightGeometry& lightGeometry, |
| 60 | LayerUpdateQueue* layerUpdateQueue, |
| 61 | const Rect& contentDrawBounds, bool opaque, |
| 62 | const BakedOpRenderer::LightInfo& lightInfo, |
| 63 | const std::vector< sp<RenderNode> >& renderNodes, |
| 64 | FrameInfoVisualizer* profiler) { |
| 65 | |
| 66 | mEglManager.damageFrame(frame, dirty); |
| 67 | |
| 68 | bool drew = false; |
| 69 | |
| 70 | |
| 71 | auto& caches = Caches::getInstance(); |
| 72 | FrameBuilder frameBuilder(dirty, frame.width(), frame.height(), lightGeometry, caches); |
| 73 | |
| 74 | frameBuilder.deferLayers(*layerUpdateQueue); |
| 75 | layerUpdateQueue->clear(); |
| 76 | |
| 77 | frameBuilder.deferRenderNodeScene(renderNodes, contentDrawBounds); |
| 78 | |
| 79 | BakedOpRenderer renderer(caches, mRenderThread.renderState(), |
| 80 | opaque, lightInfo); |
| 81 | frameBuilder.replayBakedOps<BakedOpDispatcher>(renderer); |
Matt Sarett | de97307 | 2016-10-25 11:07:40 -0400 | [diff] [blame] | 82 | ProfileRenderer profileRenderer(renderer); |
| 83 | profiler->draw(profileRenderer); |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 84 | drew = renderer.didDraw(); |
| 85 | |
| 86 | // post frame cleanup |
| 87 | caches.clearGarbage(); |
| 88 | caches.pathCache.trim(); |
| 89 | caches.tessellationCache.trim(); |
| 90 | |
| 91 | #if DEBUG_MEMORY_USAGE |
Daichi Hirono | 116c376 | 2016-11-29 16:09:59 +0900 | [diff] [blame] | 92 | caches.dumpMemoryUsage(); |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 93 | #else |
| 94 | if (CC_UNLIKELY(Properties::debugLevel & kDebugMemory)) { |
| 95 | caches.dumpMemoryUsage(); |
| 96 | } |
| 97 | #endif |
| 98 | |
| 99 | return drew; |
| 100 | } |
| 101 | |
| 102 | bool OpenGLPipeline::swapBuffers(const Frame& frame, bool drew, const SkRect& screenDirty, |
| 103 | FrameInfo* currentFrameInfo, bool* requireSwap) { |
| 104 | |
| 105 | GL_CHECKPOINT(LOW); |
| 106 | |
| 107 | // Even if we decided to cancel the frame, from the perspective of jank |
| 108 | // metrics the frame was swapped at this point |
| 109 | currentFrameInfo->markSwapBuffers(); |
| 110 | |
| 111 | *requireSwap = drew || mEglManager.damageRequiresSwap(); |
| 112 | |
| 113 | if (*requireSwap && (CC_UNLIKELY(!mEglManager.swapBuffers(frame, screenDirty)))) { |
| 114 | return false; |
| 115 | } |
| 116 | |
| 117 | return *requireSwap; |
| 118 | } |
| 119 | |
| 120 | bool OpenGLPipeline::copyLayerInto(DeferredLayerUpdater* layer, SkBitmap* bitmap) { |
Derek Sollenberger | c4fbada | 2016-11-07 16:05:41 -0500 | [diff] [blame] | 121 | ATRACE_CALL(); |
Chris Craik | 2f1aaf7 | 2017-02-14 13:01:42 -0800 | [diff] [blame] | 122 | // acquire most recent buffer for drawing |
| 123 | layer->updateTexImage(); |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 124 | layer->apply(); |
Greg Daniel | 8cd3edf | 2017-01-09 14:15:41 -0500 | [diff] [blame] | 125 | return OpenGLReadbackImpl::copyLayerInto(mRenderThread, |
| 126 | static_cast<GlLayer&>(*layer->backingLayer()), bitmap); |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 127 | } |
| 128 | |
sergeyv | 3e9999b | 2017-01-19 15:37:02 -0800 | [diff] [blame] | 129 | static Layer* createLayer(RenderState& renderState, uint32_t layerWidth, uint32_t layerHeight, |
| 130 | SkColorFilter* colorFilter, int alpha, SkBlendMode mode, bool blend) { |
| 131 | GlLayer* layer = new GlLayer(renderState, layerWidth, layerHeight, colorFilter, alpha, |
| 132 | mode, blend); |
Derek Sollenberger | 56ad6ec | 2016-07-22 12:13:32 -0400 | [diff] [blame] | 133 | Caches::getInstance().textureState().activateTexture(0); |
| 134 | layer->generateTexture(); |
sergeyv | 3e9999b | 2017-01-19 15:37:02 -0800 | [diff] [blame] | 135 | return layer; |
| 136 | } |
Derek Sollenberger | 56ad6ec | 2016-07-22 12:13:32 -0400 | [diff] [blame] | 137 | |
sergeyv | 3e9999b | 2017-01-19 15:37:02 -0800 | [diff] [blame] | 138 | DeferredLayerUpdater* OpenGLPipeline::createTextureLayer() { |
| 139 | mEglManager.initialize(); |
| 140 | return new DeferredLayerUpdater(mRenderThread.renderState(), createLayer, Layer::Api::OpenGL); |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 141 | } |
| 142 | |
| 143 | void OpenGLPipeline::onStop() { |
| 144 | if (mEglManager.isCurrent(mEglSurface)) { |
| 145 | mEglManager.makeCurrent(EGL_NO_SURFACE); |
| 146 | } |
| 147 | } |
| 148 | |
| 149 | bool OpenGLPipeline::setSurface(Surface* surface, SwapBehavior swapBehavior) { |
| 150 | |
| 151 | if (mEglSurface != EGL_NO_SURFACE) { |
| 152 | mEglManager.destroySurface(mEglSurface); |
| 153 | mEglSurface = EGL_NO_SURFACE; |
| 154 | } |
| 155 | |
| 156 | if (surface) { |
| 157 | mEglSurface = mEglManager.createSurface(surface); |
| 158 | } |
| 159 | |
| 160 | if (mEglSurface != EGL_NO_SURFACE) { |
| 161 | const bool preserveBuffer = (swapBehavior != SwapBehavior::kSwap_discardBuffer); |
| 162 | mBufferPreserved = mEglManager.setPreserveBuffer(mEglSurface, preserveBuffer); |
| 163 | return true; |
| 164 | } |
| 165 | |
| 166 | return false; |
| 167 | } |
| 168 | |
| 169 | bool OpenGLPipeline::isSurfaceReady() { |
| 170 | return CC_UNLIKELY(mEglSurface != EGL_NO_SURFACE); |
| 171 | } |
| 172 | |
| 173 | bool OpenGLPipeline::isContextReady() { |
| 174 | return CC_LIKELY(mEglManager.hasEglContext()); |
| 175 | } |
| 176 | |
| 177 | void OpenGLPipeline::onDestroyHardwareResources() { |
Derek Sollenberger | daf7229 | 2016-10-25 12:09:18 -0400 | [diff] [blame] | 178 | Caches& caches = Caches::getInstance(); |
| 179 | // Make sure to release all the textures we were owning as there won't |
| 180 | // be another draw |
| 181 | caches.textureCache.resetMarkInUse(this); |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 182 | mRenderThread.renderState().flush(Caches::FlushMode::Layers); |
| 183 | } |
| 184 | |
| 185 | void OpenGLPipeline::renderLayers(const FrameBuilder::LightGeometry& lightGeometry, |
| 186 | LayerUpdateQueue* layerUpdateQueue, bool opaque, |
| 187 | const BakedOpRenderer::LightInfo& lightInfo) { |
| 188 | static const std::vector< sp<RenderNode> > emptyNodeList; |
| 189 | auto& caches = Caches::getInstance(); |
| 190 | FrameBuilder frameBuilder(*layerUpdateQueue, lightGeometry, caches); |
| 191 | layerUpdateQueue->clear(); |
| 192 | BakedOpRenderer renderer(caches, mRenderThread.renderState(), |
| 193 | opaque, lightInfo); |
| 194 | LOG_ALWAYS_FATAL_IF(renderer.didDraw(), "shouldn't draw in buildlayer case"); |
| 195 | frameBuilder.replayBakedOps<BakedOpDispatcher>(renderer); |
| 196 | } |
| 197 | |
| 198 | TaskManager* OpenGLPipeline::getTaskManager() { |
| 199 | return &Caches::getInstance().tasks; |
| 200 | } |
| 201 | |
Derek Sollenberger | 6a21ca5 | 2016-09-28 13:39:55 -0400 | [diff] [blame] | 202 | static bool layerMatchesWH(OffscreenBuffer* layer, int width, int height) { |
| 203 | return layer->viewportWidth == (uint32_t)width && layer->viewportHeight == (uint32_t)height; |
| 204 | } |
| 205 | |
| 206 | bool OpenGLPipeline::createOrUpdateLayer(RenderNode* node, |
| 207 | const DamageAccumulator& damageAccumulator) { |
| 208 | RenderState& renderState = mRenderThread.renderState(); |
| 209 | OffscreenBufferPool& layerPool = renderState.layerPool(); |
| 210 | bool transformUpdateNeeded = false; |
| 211 | if (node->getLayer() == nullptr) { |
| 212 | node->setLayer(layerPool.get(renderState, node->getWidth(), node->getHeight())); |
| 213 | transformUpdateNeeded = true; |
| 214 | } else if (!layerMatchesWH(node->getLayer(), node->getWidth(), node->getHeight())) { |
| 215 | // TODO: remove now irrelevant, currently enqueued damage (respecting damage ordering) |
| 216 | // Or, ideally, maintain damage between frames on node/layer so ordering is always correct |
| 217 | if (node->properties().fitsOnLayer()) { |
| 218 | node->setLayer(layerPool.resize(node->getLayer(), node->getWidth(), node->getHeight())); |
| 219 | } else { |
| 220 | destroyLayer(node); |
| 221 | } |
| 222 | transformUpdateNeeded = true; |
| 223 | } |
| 224 | |
| 225 | if (transformUpdateNeeded && node->getLayer()) { |
| 226 | // update the transform in window of the layer to reset its origin wrt light source position |
| 227 | Matrix4 windowTransform; |
| 228 | damageAccumulator.computeCurrentTransform(&windowTransform); |
| 229 | node->getLayer()->setWindowTransform(windowTransform); |
| 230 | } |
| 231 | |
| 232 | return transformUpdateNeeded; |
| 233 | } |
| 234 | |
Derek Sollenberger | b7d34b6 | 2016-11-04 10:46:18 -0400 | [diff] [blame] | 235 | bool OpenGLPipeline::pinImages(LsaVector<sk_sp<Bitmap>>& images) { |
| 236 | TextureCache& cache = Caches::getInstance().textureCache; |
| 237 | bool prefetchSucceeded = true; |
| 238 | for (auto& bitmapResource : images) { |
| 239 | prefetchSucceeded &= cache.prefetchAndMarkInUse(this, bitmapResource.get()); |
| 240 | } |
| 241 | return prefetchSucceeded; |
| 242 | } |
| 243 | |
| 244 | void OpenGLPipeline::unpinImages() { |
| 245 | Caches::getInstance().textureCache.resetMarkInUse(this); |
| 246 | } |
| 247 | |
Derek Sollenberger | 6a21ca5 | 2016-09-28 13:39:55 -0400 | [diff] [blame] | 248 | void OpenGLPipeline::destroyLayer(RenderNode* node) { |
| 249 | if (OffscreenBuffer* layer = node->getLayer()) { |
| 250 | layer->renderState.layerPool().putOrDelete(layer); |
| 251 | node->setLayer(nullptr); |
| 252 | } |
| 253 | } |
| 254 | |
Derek Sollenberger | daf7229 | 2016-10-25 12:09:18 -0400 | [diff] [blame] | 255 | void OpenGLPipeline::prepareToDraw(const RenderThread& thread, Bitmap* bitmap) { |
| 256 | if (Caches::hasInstance() && thread.eglManager().hasEglContext()) { |
| 257 | ATRACE_NAME("Bitmap#prepareToDraw task"); |
| 258 | Caches::getInstance().textureCache.prefetch(bitmap); |
| 259 | } |
| 260 | } |
| 261 | |
| 262 | void OpenGLPipeline::invokeFunctor(const RenderThread& thread, Functor* functor) { |
| 263 | DrawGlInfo::Mode mode = DrawGlInfo::kModeProcessNoContext; |
| 264 | if (thread.eglManager().hasEglContext()) { |
| 265 | mode = DrawGlInfo::kModeProcess; |
| 266 | } |
| 267 | thread.renderState().invokeFunctor(functor, mode, nullptr); |
| 268 | } |
| 269 | |
Stan Iliev | 7bc3bc6 | 2017-05-24 13:28:36 -0400 | [diff] [blame^] | 270 | #define FENCE_TIMEOUT 2000000000 |
| 271 | |
| 272 | class AutoEglFence { |
| 273 | public: |
| 274 | AutoEglFence(EGLDisplay display) |
| 275 | : mDisplay(display) { |
| 276 | fence = eglCreateSyncKHR(mDisplay, EGL_SYNC_FENCE_KHR, NULL); |
| 277 | } |
| 278 | |
| 279 | ~AutoEglFence() { |
| 280 | if (fence != EGL_NO_SYNC_KHR) { |
| 281 | eglDestroySyncKHR(mDisplay, fence); |
| 282 | } |
| 283 | } |
| 284 | |
| 285 | EGLSyncKHR fence = EGL_NO_SYNC_KHR; |
| 286 | private: |
| 287 | EGLDisplay mDisplay = EGL_NO_DISPLAY; |
| 288 | }; |
| 289 | |
| 290 | class AutoEglImage { |
| 291 | public: |
| 292 | AutoEglImage(EGLDisplay display, EGLClientBuffer clientBuffer) |
| 293 | : mDisplay(display) { |
| 294 | EGLint imageAttrs[] = { EGL_IMAGE_PRESERVED_KHR, EGL_TRUE, EGL_NONE }; |
| 295 | image = eglCreateImageKHR(display, EGL_NO_CONTEXT, |
| 296 | EGL_NATIVE_BUFFER_ANDROID, clientBuffer, imageAttrs); |
| 297 | } |
| 298 | |
| 299 | ~AutoEglImage() { |
| 300 | if (image != EGL_NO_IMAGE_KHR) { |
| 301 | eglDestroyImageKHR(mDisplay, image); |
| 302 | } |
| 303 | } |
| 304 | |
| 305 | EGLImageKHR image = EGL_NO_IMAGE_KHR; |
| 306 | private: |
| 307 | EGLDisplay mDisplay = EGL_NO_DISPLAY; |
| 308 | }; |
| 309 | |
| 310 | class AutoGlTexture { |
| 311 | public: |
| 312 | AutoGlTexture(uirenderer::Caches& caches) |
| 313 | : mCaches(caches) { |
| 314 | glGenTextures(1, &mTexture); |
| 315 | caches.textureState().bindTexture(mTexture); |
| 316 | } |
| 317 | |
| 318 | ~AutoGlTexture() { |
| 319 | mCaches.textureState().deleteTexture(mTexture); |
| 320 | } |
| 321 | |
| 322 | private: |
| 323 | uirenderer::Caches& mCaches; |
| 324 | GLuint mTexture = 0; |
| 325 | }; |
| 326 | |
| 327 | static bool uploadBitmapToGraphicBuffer(uirenderer::Caches& caches, SkBitmap& bitmap, |
| 328 | GraphicBuffer& buffer, GLint format, GLint type) { |
| 329 | EGLDisplay display = eglGetCurrentDisplay(); |
| 330 | LOG_ALWAYS_FATAL_IF(display == EGL_NO_DISPLAY, |
| 331 | "Failed to get EGL_DEFAULT_DISPLAY! err=%s", |
| 332 | uirenderer::renderthread::EglManager::eglErrorString()); |
| 333 | // We use an EGLImage to access the content of the GraphicBuffer |
| 334 | // The EGL image is later bound to a 2D texture |
| 335 | EGLClientBuffer clientBuffer = (EGLClientBuffer) buffer.getNativeBuffer(); |
| 336 | AutoEglImage autoImage(display, clientBuffer); |
| 337 | if (autoImage.image == EGL_NO_IMAGE_KHR) { |
| 338 | ALOGW("Could not create EGL image, err =%s", |
| 339 | uirenderer::renderthread::EglManager::eglErrorString()); |
| 340 | return false; |
| 341 | } |
| 342 | AutoGlTexture glTexture(caches); |
| 343 | glEGLImageTargetTexture2DOES(GL_TEXTURE_2D, autoImage.image); |
| 344 | |
| 345 | GL_CHECKPOINT(MODERATE); |
| 346 | |
| 347 | glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, bitmap.width(), bitmap.height(), |
| 348 | format, type, bitmap.getPixels()); |
| 349 | |
| 350 | GL_CHECKPOINT(MODERATE); |
| 351 | |
| 352 | // The fence is used to wait for the texture upload to finish |
| 353 | // properly. We cannot rely on glFlush() and glFinish() as |
| 354 | // some drivers completely ignore these API calls |
| 355 | AutoEglFence autoFence(display); |
| 356 | if (autoFence.fence == EGL_NO_SYNC_KHR) { |
| 357 | LOG_ALWAYS_FATAL("Could not create sync fence %#x", eglGetError()); |
| 358 | return false; |
| 359 | } |
| 360 | // The flag EGL_SYNC_FLUSH_COMMANDS_BIT_KHR will trigger a |
| 361 | // pipeline flush (similar to what a glFlush() would do.) |
| 362 | EGLint waitStatus = eglClientWaitSyncKHR(display, autoFence.fence, |
| 363 | EGL_SYNC_FLUSH_COMMANDS_BIT_KHR, FENCE_TIMEOUT); |
| 364 | if (waitStatus != EGL_CONDITION_SATISFIED_KHR) { |
| 365 | LOG_ALWAYS_FATAL("Failed to wait for the fence %#x", eglGetError()); |
| 366 | return false; |
| 367 | } |
| 368 | return true; |
| 369 | } |
| 370 | |
| 371 | // TODO: handle SRGB sanely |
| 372 | static PixelFormat internalFormatToPixelFormat(GLint internalFormat) { |
| 373 | switch (internalFormat) { |
| 374 | case GL_LUMINANCE: |
| 375 | return PIXEL_FORMAT_RGBA_8888; |
| 376 | case GL_SRGB8_ALPHA8: |
| 377 | return PIXEL_FORMAT_RGBA_8888; |
| 378 | case GL_RGBA: |
| 379 | return PIXEL_FORMAT_RGBA_8888; |
| 380 | case GL_RGB: |
| 381 | return PIXEL_FORMAT_RGB_565; |
| 382 | case GL_RGBA16F: |
| 383 | return PIXEL_FORMAT_RGBA_FP16; |
| 384 | default: |
| 385 | LOG_ALWAYS_FATAL("Unsupported bitmap colorType: %d", internalFormat); |
| 386 | return PIXEL_FORMAT_UNKNOWN; |
| 387 | } |
| 388 | } |
| 389 | |
| 390 | sk_sp<Bitmap> OpenGLPipeline::allocateHardwareBitmap(RenderThread& renderThread, |
| 391 | SkBitmap& skBitmap) { |
| 392 | renderThread.eglManager().initialize(); |
| 393 | uirenderer::Caches& caches = uirenderer::Caches::getInstance(); |
| 394 | |
| 395 | const SkImageInfo& info = skBitmap.info(); |
| 396 | if (info.colorType() == kUnknown_SkColorType || info.colorType() == kAlpha_8_SkColorType) { |
| 397 | ALOGW("unable to create hardware bitmap of colortype: %d", info.colorType()); |
| 398 | return nullptr; |
| 399 | } |
| 400 | |
| 401 | bool needSRGB = uirenderer::transferFunctionCloseToSRGB(skBitmap.info().colorSpace()); |
| 402 | bool hasLinearBlending = caches.extensions().hasLinearBlending(); |
| 403 | GLint format, type, internalFormat; |
| 404 | uirenderer::Texture::colorTypeToGlFormatAndType(caches, skBitmap.colorType(), |
| 405 | needSRGB && hasLinearBlending, &internalFormat, &format, &type); |
| 406 | |
| 407 | PixelFormat pixelFormat = internalFormatToPixelFormat(internalFormat); |
| 408 | sp<GraphicBuffer> buffer = new GraphicBuffer(info.width(), info.height(), pixelFormat, |
| 409 | GraphicBuffer::USAGE_HW_TEXTURE | |
| 410 | GraphicBuffer::USAGE_SW_WRITE_NEVER | |
| 411 | GraphicBuffer::USAGE_SW_READ_NEVER, |
| 412 | std::string("Bitmap::allocateHardwareBitmap pid [") + std::to_string(getpid()) + "]"); |
| 413 | |
| 414 | status_t error = buffer->initCheck(); |
| 415 | if (error < 0) { |
| 416 | ALOGW("createGraphicBuffer() failed in GraphicBuffer.create()"); |
| 417 | return nullptr; |
| 418 | } |
| 419 | |
| 420 | SkBitmap bitmap; |
| 421 | if (CC_UNLIKELY(uirenderer::Texture::hasUnsupportedColorType(skBitmap.info(), |
| 422 | hasLinearBlending))) { |
| 423 | sk_sp<SkColorSpace> sRGB = SkColorSpace::MakeSRGB(); |
| 424 | bitmap = uirenderer::Texture::uploadToN32(skBitmap, hasLinearBlending, std::move(sRGB)); |
| 425 | } else { |
| 426 | bitmap = skBitmap; |
| 427 | } |
| 428 | |
| 429 | if (!uploadBitmapToGraphicBuffer(caches, bitmap, *buffer, format, type)) { |
| 430 | return nullptr; |
| 431 | } |
| 432 | return sk_sp<Bitmap>(new Bitmap(buffer.get(), bitmap.info())); |
| 433 | } |
| 434 | |
Stan Iliev | 768e393 | 2016-07-08 21:34:52 -0400 | [diff] [blame] | 435 | } /* namespace renderthread */ |
| 436 | } /* namespace uirenderer */ |
| 437 | } /* namespace android */ |