blob: f0867686c3218681405a8b23a39bc723995bb75a [file] [log] [blame]
John Reck23b797a2014-01-03 18:08:34 -08001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
John Reck23b797a2014-01-03 18:08:34 -080017#include "CanvasContext.h"
Alec Mouri8d451742019-08-01 19:19:16 -070018
John Reck1bcacfd2017-11-03 10:12:19 -070019#include <GpuMemoryTracker.h>
Alec Mouri8d451742019-08-01 19:19:16 -070020#include <apex/window.h>
21#include <fcntl.h>
22#include <strings.h>
23#include <sys/stat.h>
24
25#include <algorithm>
26#include <cstdint>
27#include <cstdlib>
28#include <functional>
John Reck23b797a2014-01-03 18:08:34 -080029
John Reck0fa0cbc2019-04-05 16:57:46 -070030#include "../Properties.h"
John Reckd04794a2015-05-08 10:04:36 -070031#include "AnimationContext.h"
Greg Danielcd558522016-11-17 13:31:40 -050032#include "Frame.h"
Chris Craik5e00c7c2016-07-06 16:10:09 -070033#include "LayerUpdateQueue.h"
John Reckd04794a2015-05-08 10:04:36 -070034#include "Properties.h"
John Reck4f02bf42014-01-03 18:09:17 -080035#include "RenderThread.h"
sergeyvdccca442016-03-21 15:38:21 -070036#include "hwui/Canvas.h"
Stan Iliev500a0c32016-10-26 10:30:09 -040037#include "pipeline/skia/SkiaOpenGLPipeline.h"
38#include "pipeline/skia/SkiaPipeline.h"
39#include "pipeline/skia/SkiaVulkanPipeline.h"
John Reck322b8ab2019-03-14 13:15:28 -070040#include "thread/CommonPool.h"
John Reck9372ac32016-01-19 11:46:52 -080041#include "utils/GLUtils.h"
John Recke486d932015-10-28 09:21:19 -070042#include "utils/TimeUtils.h"
John Reck322b8ab2019-03-14 13:15:28 -070043#include "utils/TraceUtils.h"
John Recke248bd12015-08-05 13:53:53 -070044
John Reckf47a5942014-06-30 16:20:04 -070045#define TRIM_MEMORY_COMPLETE 80
46#define TRIM_MEMORY_UI_HIDDEN 20
47
John Reck149173d2015-08-10 09:52:29 -070048#define LOG_FRAMETIME_MMA 0
49
50#if LOG_FRAMETIME_MMA
51static float sBenchMma = 0;
52static int sFrameCount = 0;
53static const float NANOS_PER_MILLIS_F = 1000000.0f;
54#endif
55
John Reck23b797a2014-01-03 18:08:34 -080056namespace android {
57namespace uirenderer {
58namespace renderthread {
59
John Reck1bcacfd2017-11-03 10:12:19 -070060CanvasContext* CanvasContext::create(RenderThread& thread, bool translucent,
61 RenderNode* rootRenderNode, IContextFactory* contextFactory) {
Stan Iliev03de0742016-07-07 12:35:54 -040062 auto renderType = Properties::getRenderPipelineType();
Stan Iliev768e3932016-07-08 21:34:52 -040063
Stan Iliev03de0742016-07-07 12:35:54 -040064 switch (renderType) {
Stan Iliev03de0742016-07-07 12:35:54 -040065 case RenderPipelineType::SkiaGL:
Stan Iliev500a0c32016-10-26 10:30:09 -040066 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070067 std::make_unique<skiapipeline::SkiaOpenGLPipeline>(thread));
Stan Iliev8a33e402016-07-08 09:57:49 -040068 case RenderPipelineType::SkiaVulkan:
Stan Iliev500a0c32016-10-26 10:30:09 -040069 return new CanvasContext(thread, translucent, rootRenderNode, contextFactory,
John Reck1bcacfd2017-11-03 10:12:19 -070070 std::make_unique<skiapipeline::SkiaVulkanPipeline>(thread));
Stan Iliev03de0742016-07-07 12:35:54 -040071 default:
John Reck1bcacfd2017-11-03 10:12:19 -070072 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Stan Iliev03de0742016-07-07 12:35:54 -040073 break;
74 }
75 return nullptr;
76}
77
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040078void CanvasContext::invokeFunctor(const RenderThread& thread, Functor* functor) {
79 ATRACE_CALL();
80 auto renderType = Properties::getRenderPipelineType();
81 switch (renderType) {
Stan Iliev500a0c32016-10-26 10:30:09 -040082 case RenderPipelineType::SkiaGL:
83 skiapipeline::SkiaOpenGLPipeline::invokeFunctor(thread, functor);
84 break;
85 case RenderPipelineType::SkiaVulkan:
86 skiapipeline::SkiaVulkanPipeline::invokeFunctor(thread, functor);
87 break;
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040088 default:
John Reck1bcacfd2017-11-03 10:12:19 -070089 LOG_ALWAYS_FATAL("canvas context type %d not supported", (int32_t)renderType);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040090 break;
91 }
92}
93
94void CanvasContext::prepareToDraw(const RenderThread& thread, Bitmap* bitmap) {
John Reck18f442e2018-04-09 16:56:34 -070095 skiapipeline::SkiaPipeline::prepareToDraw(thread, bitmap);
Derek Sollenbergerdaf72292016-10-25 12:09:18 -040096}
97
John Reck1bcacfd2017-11-03 10:12:19 -070098CanvasContext::CanvasContext(RenderThread& thread, bool translucent, RenderNode* rootRenderNode,
99 IContextFactory* contextFactory,
100 std::unique_ptr<IRenderPipeline> renderPipeline)
John Reck3b202512014-06-23 13:13:08 -0700101 : mRenderThread(thread)
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400102 , mGenerationID(0)
John Reck4f02bf42014-01-03 18:09:17 -0800103 , mOpaque(!translucent)
Chris Craik51d6a3d2014-12-22 17:16:56 -0800104 , mAnimationContext(contextFactory->createAnimationContext(mRenderThread.timeLord()))
Alec Mouri22d753f2019-09-05 17:11:45 -0700105 , mJankTracker(&thread.globalProfileData())
John Reck3c0f5632019-03-15 16:36:01 -0700106 , mProfiler(mJankTracker.frames(), thread.timeLord().frameIntervalNanos())
Stan Iliev768e3932016-07-08 21:34:52 -0400107 , mContentDrawBounds(0, 0, 0, 0)
108 , mRenderPipeline(std::move(renderPipeline)) {
John Reck2de950d2017-01-25 10:58:30 -0800109 rootRenderNode->makeRoot();
Skuhneea7a7fb2015-08-28 07:10:31 -0700110 mRenderNodes.emplace_back(rootRenderNode);
Alec Mouri22d753f2019-09-05 17:11:45 -0700111 mProfiler.setDensity(DeviceInfo::getDensity());
John Reck4d527222019-03-13 16:25:20 -0700112 setRenderAheadDepth(Properties::defaultRenderAhead);
John Reck23b797a2014-01-03 18:08:34 -0800113}
114
115CanvasContext::~CanvasContext() {
John Reck2de950d2017-01-25 10:58:30 -0800116 destroy();
John Reck2de950d2017-01-25 10:58:30 -0800117 for (auto& node : mRenderNodes) {
118 node->clearRoot();
119 }
120 mRenderNodes.clear();
John Reck4f02bf42014-01-03 18:09:17 -0800121}
122
John Reck2de950d2017-01-25 10:58:30 -0800123void CanvasContext::addRenderNode(RenderNode* node, bool placeFront) {
124 int pos = placeFront ? 0 : static_cast<int>(mRenderNodes.size());
125 node->makeRoot();
126 mRenderNodes.emplace(mRenderNodes.begin() + pos, node);
127}
128
129void CanvasContext::removeRenderNode(RenderNode* node) {
130 node->clearRoot();
131 mRenderNodes.erase(std::remove(mRenderNodes.begin(), mRenderNodes.end(), node),
John Reck1bcacfd2017-11-03 10:12:19 -0700132 mRenderNodes.end());
John Reck2de950d2017-01-25 10:58:30 -0800133}
134
135void CanvasContext::destroy() {
John Reck17035b02014-09-03 07:39:53 -0700136 stopDrawing();
Chris Craikd41c4d82015-01-05 15:51:13 -0800137 setSurface(nullptr);
John Reck2de950d2017-01-25 10:58:30 -0800138 freePrefetchedLayers();
139 destroyHardwareResources();
John Recke2478d42014-09-03 16:46:05 -0700140 mAnimationContext->destroy();
John Reck23b797a2014-01-03 18:08:34 -0800141}
142
John Reckf8441e62017-10-23 13:10:41 -0700143void CanvasContext::setSurface(sp<Surface>&& surface) {
John Reckfbc8df02014-11-14 16:18:41 -0800144 ATRACE_CALL();
145
John Reck848f6512018-12-03 13:26:43 -0800146 if (surface) {
147 mNativeSurface = new ReliableSurface{std::move(surface)};
John Reck3f333d62019-07-17 11:12:32 -0700148 // TODO: Fix error handling & re-shorten timeout
Alec Mouri672e9b02019-08-23 19:43:37 -0700149 ANativeWindow_setDequeueTimeout(mNativeSurface.get(), 4000_ms);
John Reck848f6512018-12-03 13:26:43 -0800150 } else {
151 mNativeSurface = nullptr;
152 }
John Recka5dda642014-05-22 15:43:54 -0700153
Miguel de Dios509627b2019-05-14 23:55:36 +0000154 if (mRenderAheadDepth == 0 && DeviceInfo::get()->getMaxRefreshRate() > 66.6f) {
John Reckcf185f52019-04-11 16:11:24 -0700155 mFixedRenderAhead = false;
156 mRenderAheadCapacity = 1;
157 } else {
158 mFixedRenderAhead = true;
159 mRenderAheadCapacity = mRenderAheadDepth;
160 }
161
Peiyong Lin189021b2018-09-27 16:41:40 -0700162 ColorMode colorMode = mWideColorGamut ? ColorMode::WideColorGamut : ColorMode::SRGB;
John Reck0fa0cbc2019-04-05 16:57:46 -0700163 bool hasSurface = mRenderPipeline->setSurface(mNativeSurface.get(), mSwapBehavior, colorMode,
John Reckcf185f52019-04-11 16:11:24 -0700164 mRenderAheadCapacity);
John Reck23b797a2014-01-03 18:08:34 -0800165
John Reck28912a52016-04-18 14:34:18 -0700166 mFrameNumber = -1;
167
Stan Iliev768e3932016-07-08 21:34:52 -0400168 if (hasSurface) {
John Reck1bcacfd2017-11-03 10:12:19 -0700169 mHaveNewSurface = true;
170 mSwapHistory.clear();
Stan Iliev10689992019-11-13 10:25:22 -0500171 // Enable frame stats after the surface has been bound to the appropriate graphics API.
172 // Order is important when new and old surfaces are the same, because old surface has
173 // its frame stats disabled automatically.
174 mNativeSurface->enableFrameTimestamps(true);
John Reck368cdd82014-05-07 13:11:00 -0700175 } else {
John Reck1bcacfd2017-11-03 10:12:19 -0700176 mRenderThread.removeFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400177 mGenerationID++;
John Reck23b797a2014-01-03 18:08:34 -0800178 }
John Reck23b797a2014-01-03 18:08:34 -0800179}
180
John Reck1125d1f2014-10-23 11:02:19 -0700181void CanvasContext::setSwapBehavior(SwapBehavior swapBehavior) {
182 mSwapBehavior = swapBehavior;
183}
184
John Reckf8441e62017-10-23 13:10:41 -0700185bool CanvasContext::pauseSurface() {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400186 mGenerationID++;
John Reck01a5ea32014-12-03 13:01:07 -0800187 return mRenderThread.removeFrameCallback(this);
John Reck4f02bf42014-01-03 18:09:17 -0800188}
189
John Reck8afcc762016-04-13 10:24:06 -0700190void CanvasContext::setStopped(bool stopped) {
191 if (mStopped != stopped) {
192 mStopped = stopped;
193 if (mStopped) {
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400194 mGenerationID++;
John Reck8afcc762016-04-13 10:24:06 -0700195 mRenderThread.removeFrameCallback(this);
Stan Iliev768e3932016-07-08 21:34:52 -0400196 mRenderPipeline->onStop();
John Reck306f3312016-06-10 16:01:55 -0700197 } else if (mIsDirty && hasSurface()) {
198 mRenderThread.postFrameCallback(this);
John Reck8afcc762016-04-13 10:24:06 -0700199 }
200 }
201}
202
John Reck8785ceb2018-10-29 16:45:58 -0700203void CanvasContext::allocateBuffers() {
204 if (mNativeSurface) {
205 mNativeSurface->allocateBuffers();
206 }
207}
208
209void CanvasContext::setLightAlpha(uint8_t ambientShadowAlpha, uint8_t spotShadowAlpha) {
Chris Craik98787e62015-11-13 10:55:30 -0800210 mLightInfo.ambientShadowAlpha = ambientShadowAlpha;
211 mLightInfo.spotShadowAlpha = spotShadowAlpha;
Alan Viverette50210d92015-05-14 18:05:36 -0700212}
213
John Reck8785ceb2018-10-29 16:45:58 -0700214void CanvasContext::setLightGeometry(const Vector3& lightCenter, float lightRadius) {
Chris Craik6e068c012016-01-15 16:15:30 -0800215 mLightGeometry.center = lightCenter;
John Reck8785ceb2018-10-29 16:45:58 -0700216 mLightGeometry.radius = lightRadius;
John Reck4f02bf42014-01-03 18:09:17 -0800217}
218
John Reck63a06672014-05-07 13:45:54 -0700219void CanvasContext::setOpaque(bool opaque) {
220 mOpaque = opaque;
221}
222
Romain Guy26a2b972017-04-17 09:39:51 -0700223void CanvasContext::setWideGamut(bool wideGamut) {
224 mWideColorGamut = wideGamut;
225}
226
John Reck8afcc762016-04-13 10:24:06 -0700227bool CanvasContext::makeCurrent() {
228 if (mStopped) return false;
229
Stan Iliev768e3932016-07-08 21:34:52 -0400230 auto result = mRenderPipeline->makeCurrent();
231 switch (result) {
232 case MakeCurrentResult::AlreadyCurrent:
233 return true;
234 case MakeCurrentResult::Failed:
235 mHaveNewSurface = true;
236 setSurface(nullptr);
237 return false;
238 case MakeCurrentResult::Succeeded:
239 mHaveNewSurface = true;
240 return true;
241 default:
242 LOG_ALWAYS_FATAL("unexpected result %d from IRenderPipeline::makeCurrent",
John Reck1bcacfd2017-11-03 10:12:19 -0700243 (int32_t)result);
John Reckf2dcc2a2015-07-16 09:17:59 -0700244 }
Stan Iliev768e3932016-07-08 21:34:52 -0400245
246 return true;
John Reck860d1552014-04-11 19:15:05 -0700247}
248
John Reckbf3c6022015-06-02 15:55:00 -0700249static bool wasSkipped(FrameInfo* info) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700250 return info && ((*info)[FrameInfoIndex::Flags] & FrameInfoFlags::SkippedFrame);
John Reckbf3c6022015-06-02 15:55:00 -0700251}
252
John Reck0def73a2016-07-01 16:19:13 -0700253bool CanvasContext::isSwapChainStuffed() {
John Recka3d795a2016-07-27 19:28:05 -0700254 static const auto SLOW_THRESHOLD = 6_ms;
255
John Reck0def73a2016-07-01 16:19:13 -0700256 if (mSwapHistory.size() != mSwapHistory.capacity()) {
257 // We want at least 3 frames of history before attempting to
258 // guess if the queue is stuffed
259 return false;
260 }
261 nsecs_t frameInterval = mRenderThread.timeLord().frameIntervalNanos();
262 auto& swapA = mSwapHistory[0];
263
264 // Was there a happy queue & dequeue time? If so, don't
265 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700266 if (swapA.dequeueDuration < SLOW_THRESHOLD && swapA.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700267 return false;
268 }
269
270 for (size_t i = 1; i < mSwapHistory.size(); i++) {
271 auto& swapB = mSwapHistory[i];
272
Chris Craik31635682016-07-19 17:59:12 -0700273 // If there's a multi-frameInterval gap we effectively already dropped a frame,
John Reck0def73a2016-07-01 16:19:13 -0700274 // so consider the queue healthy.
Yu Jiae57493a2018-05-30 11:18:05 +0800275 if (std::abs(swapA.swapCompletedTime - swapB.swapCompletedTime) > frameInterval * 3) {
John Reck0def73a2016-07-01 16:19:13 -0700276 return false;
277 }
278
279 // Was there a happy queue & dequeue time? If so, don't
280 // consider it stuffed
John Reck1bcacfd2017-11-03 10:12:19 -0700281 if (swapB.dequeueDuration < SLOW_THRESHOLD && swapB.queueDuration < SLOW_THRESHOLD) {
John Reck0def73a2016-07-01 16:19:13 -0700282 return false;
283 }
284
285 swapA = swapB;
286 }
287
288 // All signs point to a stuffed swap chain
Tim Murrayffde62742016-07-18 14:11:28 -0700289 ATRACE_NAME("swap chain stuffed");
John Reck0def73a2016-07-01 16:19:13 -0700290 return true;
291}
292
John Reck1bcacfd2017-11-03 10:12:19 -0700293void CanvasContext::prepareTree(TreeInfo& info, int64_t* uiFrameInfo, int64_t syncQueued,
294 RenderNode* target) {
John Reckf9be7792014-05-02 18:21:16 -0700295 mRenderThread.removeFrameCallback(this);
John Reck18f16e62014-05-02 16:46:41 -0700296
John Reckbf3c6022015-06-02 15:55:00 -0700297 // If the previous frame was dropped we don't need to hold onto it, so
298 // just keep using the previous frame's structure instead
299 if (!wasSkipped(mCurrentFrameInfo)) {
John Reck34781b22017-07-05 16:39:36 -0700300 mCurrentFrameInfo = mJankTracker.startFrame();
Stan Iliev7203e1f2019-07-25 13:12:02 -0400301 mLast4FrameInfos.next().first = mCurrentFrameInfo;
John Reckbf3c6022015-06-02 15:55:00 -0700302 }
John Reckba6adf62015-02-19 14:36:50 -0800303 mCurrentFrameInfo->importUiThreadInfo(uiFrameInfo);
John Reckbe3fba02015-07-06 13:49:58 -0700304 mCurrentFrameInfo->set(FrameInfoIndex::SyncQueued) = syncQueued;
John Reckba6adf62015-02-19 14:36:50 -0800305 mCurrentFrameInfo->markSyncStart();
306
John Recke4267ea2014-06-03 15:53:15 -0700307 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700308 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck41459192019-10-31 15:04:58 -0700309 info.damageGenerationId = mDamageId++;
John Reck848f6512018-12-03 13:26:43 -0800310 info.out.canDrawThisFrame = true;
John Reck00e79c92015-07-21 10:23:59 -0700311
John Reckec845a22014-09-05 15:23:38 -0700312 mAnimationContext->startFrame(info.mode);
John Reck0fa0cbc2019-04-05 16:57:46 -0700313 for (const sp<RenderNode>& node : mRenderNodes) {
Skuhneea7a7fb2015-08-28 07:10:31 -0700314 // Only the primary target node will be drawn full - all other nodes would get drawn in
315 // real time mode. In case of a window, the primary node is the window content and the other
316 // node(s) are non client / filler nodes.
317 info.mode = (node.get() == target ? TreeInfo::MODE_FULL : TreeInfo::MODE_RT_ONLY);
318 node->prepareTree(info);
John Reck975591a2016-01-22 16:28:07 -0800319 GL_CHECKPOINT(MODERATE);
Skuhneea7a7fb2015-08-28 07:10:31 -0700320 }
John Reck119907c2014-08-14 09:02:01 -0700321 mAnimationContext->runRemainingAnimations(info);
John Reck975591a2016-01-22 16:28:07 -0800322 GL_CHECKPOINT(MODERATE);
John Recke45b1fd2014-04-15 09:50:16 -0700323
John Reck2de950d2017-01-25 10:58:30 -0800324 freePrefetchedLayers();
John Reck975591a2016-01-22 16:28:07 -0800325 GL_CHECKPOINT(MODERATE);
John Reck998a6d82014-08-28 15:35:53 -0700326
John Reck306f3312016-06-10 16:01:55 -0700327 mIsDirty = true;
328
John Reck848f6512018-12-03 13:26:43 -0800329 if (CC_UNLIKELY(!hasSurface())) {
Chris Craik1b54fb22015-06-02 17:40:58 -0700330 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaa95a882014-11-07 11:02:07 -0800331 info.out.canDrawThisFrame = false;
332 return;
333 }
334
John Reckf1480762016-07-03 18:28:25 -0700335 if (CC_LIKELY(mSwapHistory.size() && !Properties::forceDrawFrame)) {
John Recke486d932015-10-28 09:21:19 -0700336 nsecs_t latestVsync = mRenderThread.timeLord().latestVsync();
John Reck0fa0cbc2019-04-05 16:57:46 -0700337 SwapHistory& lastSwap = mSwapHistory.back();
John Reck52b783f2015-11-24 11:12:55 -0800338 nsecs_t vsyncDelta = std::abs(lastSwap.vsyncTime - latestVsync);
John Recke486d932015-10-28 09:21:19 -0700339 // The slight fudge-factor is to deal with cases where
340 // the vsync was estimated due to being slow handling the signal.
341 // See the logic in TimeLord#computeFrameTimeNanos or in
342 // Choreographer.java for details on when this happens
343 if (vsyncDelta < 2_ms) {
344 // Already drew for this vsync pulse, UI draw request missed
345 // the deadline for RT animations
346 info.out.canDrawThisFrame = false;
John Reck6f75da02018-03-21 14:43:40 -0700347 }
Bo Hudd082242018-12-02 05:22:41 +0000348 } else {
349 info.out.canDrawThisFrame = true;
John Recke486d932015-10-28 09:21:19 -0700350 }
John Recka5dda642014-05-22 15:43:54 -0700351
John Reckcc2eee82018-05-17 10:44:00 -0700352 // TODO: Do we need to abort out if the backdrop is added but not ready? Should that even
353 // be an allowable combination?
354 if (mRenderNodes.size() > 2 && !mRenderNodes[1]->isRenderable()) {
355 info.out.canDrawThisFrame = false;
356 }
357
John Reck848f6512018-12-03 13:26:43 -0800358 if (info.out.canDrawThisFrame) {
359 int err = mNativeSurface->reserveNext();
360 if (err != OK) {
361 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
362 info.out.canDrawThisFrame = false;
363 ALOGW("reserveNext failed, error = %d (%s)", err, strerror(-err));
364 if (err != TIMED_OUT) {
365 // A timed out surface can still recover, but assume others are permanently dead.
366 setSurface(nullptr);
367 return;
368 }
369 }
370 } else {
Chris Craik1b54fb22015-06-02 17:40:58 -0700371 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
John Reckaef9dc82015-05-08 14:10:57 -0700372 }
373
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400374 bool postedFrameCallback = false;
John Recka5dda642014-05-22 15:43:54 -0700375 if (info.out.hasAnimations || !info.out.canDrawThisFrame) {
John Reck9f516442017-09-25 10:27:21 -0700376 if (CC_UNLIKELY(!Properties::enableRTAnimations)) {
377 info.out.requiresUiRedraw = true;
378 }
John Reckcd028f32014-06-24 08:44:29 -0700379 if (!info.out.requiresUiRedraw) {
John Reckf9be7792014-05-02 18:21:16 -0700380 // If animationsNeedsRedraw is set don't bother posting for an RT anim
381 // as we will just end up fighting the UI thread.
382 mRenderThread.postFrameCallback(this);
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400383 postedFrameCallback = true;
384 }
385 }
386
387 if (!postedFrameCallback &&
388 info.out.animatedImageDelay != TreeInfo::Out::kNoAnimatedImageDelay) {
389 // Subtract the time of one frame so it can be displayed on time.
390 const nsecs_t kFrameTime = mRenderThread.timeLord().frameIntervalNanos();
391 if (info.out.animatedImageDelay <= kFrameTime) {
392 mRenderThread.postFrameCallback(this);
393 } else {
394 const auto delay = info.out.animatedImageDelay - kFrameTime;
395 int genId = mGenerationID;
396 mRenderThread.queue().postDelayed(delay, [this, genId]() {
397 if (mGenerationID == genId) {
398 mRenderThread.postFrameCallback(this);
399 }
400 });
John Reckf9be7792014-05-02 18:21:16 -0700401 }
John Recke45b1fd2014-04-15 09:50:16 -0700402 }
403}
404
John Reckf47a5942014-06-30 16:20:04 -0700405void CanvasContext::stopDrawing() {
406 mRenderThread.removeFrameCallback(this);
Doris Liuc82e8792016-07-29 16:45:24 -0700407 mAnimationContext->pauseAnimators();
Leon Scroggins III4afdd1c2018-05-14 14:59:30 -0400408 mGenerationID++;
John Reckf47a5942014-06-30 16:20:04 -0700409}
410
John Recka5dda642014-05-22 15:43:54 -0700411void CanvasContext::notifyFramePending() {
412 ATRACE_CALL();
413 mRenderThread.pushBackFrameCallback(this);
414}
415
John Reckcf185f52019-04-11 16:11:24 -0700416void CanvasContext::setPresentTime() {
417 int64_t presentTime = NATIVE_WINDOW_TIMESTAMP_AUTO;
418 int renderAhead = 0;
419 const auto frameIntervalNanos = mRenderThread.timeLord().frameIntervalNanos();
420 if (mFixedRenderAhead) {
421 renderAhead = std::min(mRenderAheadDepth, mRenderAheadCapacity);
422 } else if (frameIntervalNanos < 15_ms) {
423 renderAhead = std::min(1, static_cast<int>(mRenderAheadCapacity));
424 }
425
426 if (renderAhead) {
427 presentTime = mCurrentFrameInfo->get(FrameInfoIndex::Vsync) +
428 (frameIntervalNanos * (renderAhead + 1));
429 }
430 native_window_set_buffers_timestamp(mNativeSurface.get(), presentTime);
431}
432
John Recke4267ea2014-06-03 15:53:15 -0700433void CanvasContext::draw() {
John Recke4267ea2014-06-03 15:53:15 -0700434 SkRect dirty;
435 mDamageAccumulator.finish(&dirty);
436
John Reck0fa0cbc2019-04-05 16:57:46 -0700437 if (dirty.isEmpty() && Properties::skipEmptyFrames && !surfaceRequiresRedraw()) {
John Reck0b19a732019-03-07 17:18:25 -0800438 mCurrentFrameInfo->addFlag(FrameInfoFlags::SkippedFrame);
439 return;
440 }
John Reck240ff622015-04-28 13:50:00 -0700441
John Reck240ff622015-04-28 13:50:00 -0700442 mCurrentFrameInfo->markIssueDrawCommandsStart();
443
Stan Iliev768e3932016-07-08 21:34:52 -0400444 Frame frame = mRenderPipeline->getFrame();
John Reckcf185f52019-04-11 16:11:24 -0700445 setPresentTime();
Chris Craikb565df12015-10-05 13:00:52 -0700446
Stan Iliev768e3932016-07-08 21:34:52 -0400447 SkRect windowDirty = computeDirtyRect(frame, &dirty);
John Reck4f02bf42014-01-03 18:09:17 -0800448
Stan Iliev768e3932016-07-08 21:34:52 -0400449 bool drew = mRenderPipeline->draw(frame, windowDirty, dirty, mLightGeometry, &mLayerUpdateQueue,
John Reck0fa0cbc2019-04-05 16:57:46 -0700450 mContentDrawBounds, mOpaque, mLightInfo, mRenderNodes,
451 &(profiler()));
Chris Craik1dfa0702016-03-04 15:59:24 -0800452
Stan Iliev7203e1f2019-07-25 13:12:02 -0400453 int64_t frameCompleteNr = getFrameNumber();
John Reckcc2eee82018-05-17 10:44:00 -0700454
John Reck38f6c032016-03-17 10:23:49 -0700455 waitOnFences();
456
Stan Iliev768e3932016-07-08 21:34:52 -0400457 bool requireSwap = false;
John Reck59dd2ea2019-07-26 16:51:08 -0700458 int error = OK;
John Reck1bcacfd2017-11-03 10:12:19 -0700459 bool didSwap =
460 mRenderPipeline->swapBuffers(frame, drew, windowDirty, mCurrentFrameInfo, &requireSwap);
John Reck9372ac32016-01-19 11:46:52 -0800461
John Reck306f3312016-06-10 16:01:55 -0700462 mIsDirty = false;
John Reckba6adf62015-02-19 14:36:50 -0800463
Stan Iliev768e3932016-07-08 21:34:52 -0400464 if (requireSwap) {
John Reck59dd2ea2019-07-26 16:51:08 -0700465 bool didDraw = true;
466 // Handle any swapchain errors
467 error = mNativeSurface->getAndClearError();
468 if (error == TIMED_OUT) {
469 // Try again
470 mRenderThread.postFrameCallback(this);
471 // But since this frame didn't happen, we need to mark full damage in the swap
472 // history
473 didDraw = false;
474
475 } else if (error != OK || !didSwap) {
476 // Unknown error, abandon the surface
John Reck149173d2015-08-10 09:52:29 -0700477 setSurface(nullptr);
John Reck59dd2ea2019-07-26 16:51:08 -0700478 didDraw = false;
John Reck149173d2015-08-10 09:52:29 -0700479 }
John Reck59dd2ea2019-07-26 16:51:08 -0700480
John Recke486d932015-10-28 09:21:19 -0700481 SwapHistory& swap = mSwapHistory.next();
John Reck59dd2ea2019-07-26 16:51:08 -0700482 if (didDraw) {
483 swap.damage = windowDirty;
484 } else {
Nick Desaulniersb451d872019-11-04 17:18:51 -0800485 float max = static_cast<float>(INT_MAX);
486 swap.damage = SkRect::MakeWH(max, max);
John Reck59dd2ea2019-07-26 16:51:08 -0700487 }
Jerome Gaillarde218c692019-06-14 12:58:57 +0100488 swap.swapCompletedTime = systemTime(SYSTEM_TIME_MONOTONIC);
John Recke486d932015-10-28 09:21:19 -0700489 swap.vsyncTime = mRenderThread.timeLord().latestVsync();
John Reck59dd2ea2019-07-26 16:51:08 -0700490 if (didDraw) {
Alec Mouri8d0c5bd22019-08-22 19:20:41 -0700491 nsecs_t dequeueStart = ANativeWindow_getLastDequeueStartTime(mNativeSurface.get());
John Recka67b62e2017-06-01 12:44:58 -0700492 if (dequeueStart < mCurrentFrameInfo->get(FrameInfoIndex::SyncStart)) {
493 // Ignoring dequeue duration as it happened prior to frame render start
John Reck32414ee2017-05-31 14:02:50 -0700494 // and thus is not part of the frame.
495 swap.dequeueDuration = 0;
496 } else {
Alec Mouri8d451742019-08-01 19:19:16 -0700497 swap.dequeueDuration =
Alec Mouri6e1271b2019-08-31 01:52:50 -0700498 ANativeWindow_getLastDequeueDuration(mNativeSurface.get());
John Reck32414ee2017-05-31 14:02:50 -0700499 }
Alec Mouri6e1271b2019-08-31 01:52:50 -0700500 swap.queueDuration = ANativeWindow_getLastQueueDuration(mNativeSurface.get());
John Reck882d5152016-08-01 14:41:08 -0700501 } else {
502 swap.dequeueDuration = 0;
503 swap.queueDuration = 0;
504 }
John Reck1bcacfd2017-11-03 10:12:19 -0700505 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = swap.dequeueDuration;
506 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = swap.queueDuration;
Stan Iliev7203e1f2019-07-25 13:12:02 -0400507 mLast4FrameInfos[-1].second = frameCompleteNr;
John Reck149173d2015-08-10 09:52:29 -0700508 mHaveNewSurface = false;
John Reck28912a52016-04-18 14:34:18 -0700509 mFrameNumber = -1;
John Reck70e89c92016-08-05 10:50:36 -0700510 } else {
511 mCurrentFrameInfo->set(FrameInfoIndex::DequeueBufferDuration) = 0;
512 mCurrentFrameInfo->set(FrameInfoIndex::QueueBufferDuration) = 0;
Stan Iliev7203e1f2019-07-25 13:12:02 -0400513 mLast4FrameInfos[-1].second = -1;
John Reck4f02bf42014-01-03 18:09:17 -0800514 }
John Reckfe5e7b72014-05-23 17:42:28 -0700515
John Reckba6adf62015-02-19 14:36:50 -0800516 // TODO: Use a fence for real completion?
517 mCurrentFrameInfo->markFrameCompleted();
John Reck149173d2015-08-10 09:52:29 -0700518
519#if LOG_FRAMETIME_MMA
John Reck1bcacfd2017-11-03 10:12:19 -0700520 float thisFrame = mCurrentFrameInfo->duration(FrameInfoIndex::IssueDrawCommandsStart,
521 FrameInfoIndex::FrameCompleted) /
522 NANOS_PER_MILLIS_F;
John Reck149173d2015-08-10 09:52:29 -0700523 if (sFrameCount) {
524 sBenchMma = ((9 * sBenchMma) + thisFrame) / 10;
525 } else {
526 sBenchMma = thisFrame;
527 }
528 if (++sFrameCount == 10) {
529 sFrameCount = 1;
530 ALOGD("Average frame time: %.4f", sBenchMma);
531 }
532#endif
533
John Reckcc2eee82018-05-17 10:44:00 -0700534 if (didSwap) {
535 for (auto& func : mFrameCompleteCallbacks) {
536 std::invoke(func, frameCompleteNr);
537 }
538 mFrameCompleteCallbacks.clear();
539 }
540
John Reck34781b22017-07-05 16:39:36 -0700541 mJankTracker.finishFrame(*mCurrentFrameInfo);
Andres Morales910beb82016-02-02 16:19:40 -0800542 if (CC_UNLIKELY(mFrameMetricsReporter.get() != nullptr)) {
543 mFrameMetricsReporter->reportFrameMetrics(mCurrentFrameInfo->data());
Andres Morales06f5bc72015-12-15 15:21:31 -0800544 }
John Reck38e0c322015-11-10 12:19:17 -0800545
Stan Iliev7203e1f2019-07-25 13:12:02 -0400546 if (mLast4FrameInfos.size() == mLast4FrameInfos.capacity()) {
547 // By looking 4 frames back, we guarantee all SF stats are available. There are at
548 // most 3 buffers in BufferQueue. Surface object keeps stats for the last 8 frames.
549 FrameInfo* forthBehind = mLast4FrameInfos.front().first;
550 int64_t composedFrameId = mLast4FrameInfos.front().second;
551 nsecs_t acquireTime = -1;
552 mNativeSurface->getFrameTimestamps(composedFrameId, nullptr, &acquireTime, nullptr, nullptr,
553 nullptr, nullptr, nullptr, nullptr, nullptr);
554 // Ignore default -1, NATIVE_WINDOW_TIMESTAMP_INVALID and NATIVE_WINDOW_TIMESTAMP_PENDING
555 forthBehind->set(FrameInfoIndex::GpuCompleted) = acquireTime > 0 ? acquireTime : -1;
556 mJankTracker.finishGpuDraw(*forthBehind);
557 }
558
John Reck38e0c322015-11-10 12:19:17 -0800559 GpuMemoryTracker::onFrameCompleted();
John Reck4f02bf42014-01-03 18:09:17 -0800560}
561
John Recke45b1fd2014-04-15 09:50:16 -0700562// Called by choreographer to do an RT-driven animation
John Reck18f16e62014-05-02 16:46:41 -0700563void CanvasContext::doFrame() {
Stan Iliev768e3932016-07-08 21:34:52 -0400564 if (!mRenderPipeline->isSurfaceReady()) return;
Skuhneea7a7fb2015-08-28 07:10:31 -0700565 prepareAndDraw(nullptr);
566}
John Reck368cdd82014-05-07 13:11:00 -0700567
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400568SkISize CanvasContext::getNextFrameSize() const {
569 ReliableSurface* surface = mNativeSurface.get();
570 if (surface) {
571 SkISize size;
Alec Mourida819e42019-10-25 16:18:16 -0700572 size.fWidth = ANativeWindow_getWidth(surface);
573 size.fHeight = ANativeWindow_getHeight(surface);
Leon Scroggins III6c5864c2019-04-03 15:09:25 -0400574 return size;
575 }
576 return {INT32_MAX, INT32_MAX};
577}
578
Skuhneea7a7fb2015-08-28 07:10:31 -0700579void CanvasContext::prepareAndDraw(RenderNode* node) {
John Recke45b1fd2014-04-15 09:50:16 -0700580 ATRACE_CALL();
581
Matthew Bouyack7f667e72016-01-12 12:01:48 -0800582 nsecs_t vsync = mRenderThread.timeLord().computeFrameTimeNanos();
John Reckba6adf62015-02-19 14:36:50 -0800583 int64_t frameInfo[UI_THREAD_FRAME_INFO_SIZE];
John Reck1bcacfd2017-11-03 10:12:19 -0700584 UiFrameInfoBuilder(frameInfo).addFlag(FrameInfoFlags::RTAnimation).setVsync(vsync, vsync);
John Reckfe5e7b72014-05-23 17:42:28 -0700585
Chris Craike2e53a72015-10-28 15:55:40 -0700586 TreeInfo info(TreeInfo::MODE_RT_ONLY, *this);
Jerome Gaillarde218c692019-06-14 12:58:57 +0100587 prepareTree(info, frameInfo, systemTime(SYSTEM_TIME_MONOTONIC), node);
John Recka5dda642014-05-22 15:43:54 -0700588 if (info.out.canDrawThisFrame) {
John Recke4267ea2014-06-03 15:53:15 -0700589 draw();
Chris Craik06e2e9c2016-08-31 17:32:46 -0700590 } else {
591 // wait on fences so tasks don't overlap next frame
592 waitOnFences();
John Recka5dda642014-05-22 15:43:54 -0700593 }
John Recke45b1fd2014-04-15 09:50:16 -0700594}
595
John Reck998a6d82014-08-28 15:35:53 -0700596void CanvasContext::markLayerInUse(RenderNode* node) {
John Reck51f2d602016-04-06 07:50:47 -0700597 if (mPrefetchedLayers.erase(node)) {
Chris Craikd41c4d82015-01-05 15:51:13 -0800598 node->decStrong(nullptr);
John Reck998a6d82014-08-28 15:35:53 -0700599 }
600}
601
John Reck2de950d2017-01-25 10:58:30 -0800602void CanvasContext::freePrefetchedLayers() {
John Reck51f2d602016-04-06 07:50:47 -0700603 if (mPrefetchedLayers.size()) {
604 for (auto& node : mPrefetchedLayers) {
605 ALOGW("Incorrectly called buildLayer on View: %s, destroying layer...",
John Reck1bcacfd2017-11-03 10:12:19 -0700606 node->getName());
John Reck2de950d2017-01-25 10:58:30 -0800607 node->destroyLayers();
608 node->decStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700609 }
610 mPrefetchedLayers.clear();
John Reck998a6d82014-08-28 15:35:53 -0700611 }
612}
613
John Reck2de950d2017-01-25 10:58:30 -0800614void CanvasContext::buildLayer(RenderNode* node) {
John Reck3e824952014-08-20 10:08:39 -0700615 ATRACE_CALL();
Stan Iliev768e3932016-07-08 21:34:52 -0400616 if (!mRenderPipeline->isContextReady()) return;
Chris Craik6246d2782016-03-29 15:01:41 -0700617
John Reck3e824952014-08-20 10:08:39 -0700618 // buildLayer() will leave the tree in an unknown state, so we must stop drawing
619 stopDrawing();
620
Chris Craike2e53a72015-10-28 15:55:40 -0700621 TreeInfo info(TreeInfo::MODE_FULL, *this);
John Reck3e824952014-08-20 10:08:39 -0700622 info.damageAccumulator = &mDamageAccumulator;
Chris Craik0b7e8242015-10-28 16:50:44 -0700623 info.layerUpdateQueue = &mLayerUpdateQueue;
John Reck9eb9f6f2014-08-21 11:23:05 -0700624 info.runAnimations = false;
John Reck3e824952014-08-20 10:08:39 -0700625 node->prepareTree(info);
626 SkRect ignore;
627 mDamageAccumulator.finish(&ignore);
628 // Tickle the GENERIC property on node to mark it as dirty for damaging
629 // purposes when the frame is actually drawn
630 node->setPropertyFieldsDirty(RenderNode::GENERIC);
631
Peiyong Lin1f6aa122018-09-10 16:28:08 -0700632 mRenderPipeline->renderLayers(mLightGeometry, &mLayerUpdateQueue, mOpaque, mLightInfo);
John Reck998a6d82014-08-28 15:35:53 -0700633
Chris Craikd41c4d82015-01-05 15:51:13 -0800634 node->incStrong(nullptr);
John Reck51f2d602016-04-06 07:50:47 -0700635 mPrefetchedLayers.insert(node);
John Reck3e824952014-08-20 10:08:39 -0700636}
637
John Reck2de950d2017-01-25 10:58:30 -0800638void CanvasContext::destroyHardwareResources() {
John Reckf47a5942014-06-30 16:20:04 -0700639 stopDrawing();
Stan Iliev768e3932016-07-08 21:34:52 -0400640 if (mRenderPipeline->isContextReady()) {
John Reck2de950d2017-01-25 10:58:30 -0800641 freePrefetchedLayers();
Skuhneea7a7fb2015-08-28 07:10:31 -0700642 for (const sp<RenderNode>& node : mRenderNodes) {
John Reck2de950d2017-01-25 10:58:30 -0800643 node->destroyHardwareResources();
Skuhneea7a7fb2015-08-28 07:10:31 -0700644 }
Stan Iliev768e3932016-07-08 21:34:52 -0400645 mRenderPipeline->onDestroyHardwareResources();
John Reckf47a5942014-06-30 16:20:04 -0700646 }
647}
648
649void CanvasContext::trimMemory(RenderThread& thread, int level) {
John Reck18f442e2018-04-09 16:56:34 -0700650 ATRACE_CALL();
651 if (!thread.getGrContext()) return;
652 ATRACE_CALL();
653 if (level >= TRIM_MEMORY_COMPLETE) {
654 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::Complete);
John Reck283bb462018-12-13 16:40:14 -0800655 thread.destroyRenderingContext();
John Reck18f442e2018-04-09 16:56:34 -0700656 } else if (level >= TRIM_MEMORY_UI_HIDDEN) {
657 thread.cacheManager().trimMemory(CacheManager::TrimMemoryMode::UiHidden);
John Recke1628b72014-05-23 15:11:19 -0700658 }
659}
660
Derek Sollenberger56ad6ec2016-07-22 12:13:32 -0400661DeferredLayerUpdater* CanvasContext::createTextureLayer() {
Stan Iliev768e3932016-07-08 21:34:52 -0400662 return mRenderPipeline->createTextureLayer();
John Reck1949e792014-04-08 15:18:56 -0700663}
664
John Reckba6adf62015-02-19 14:36:50 -0800665void CanvasContext::dumpFrames(int fd) {
John Reck34781b22017-07-05 16:39:36 -0700666 mJankTracker.dumpStats(fd);
667 mJankTracker.dumpFrames(fd);
John Reckba6adf62015-02-19 14:36:50 -0800668}
669
670void CanvasContext::resetFrameStats() {
John Reck34781b22017-07-05 16:39:36 -0700671 mJankTracker.reset();
John Reckba6adf62015-02-19 14:36:50 -0800672}
673
John Reckdf1742e2017-01-19 15:56:21 -0800674void CanvasContext::setName(const std::string&& name) {
675 mJankTracker.setDescription(JankTrackerType::Window, std::move(name));
676}
677
John Reck38f6c032016-03-17 10:23:49 -0700678void CanvasContext::waitOnFences() {
679 if (mFrameFences.size()) {
680 ATRACE_CALL();
681 for (auto& fence : mFrameFences) {
John Reck322b8ab2019-03-14 13:15:28 -0700682 fence.get();
John Reck38f6c032016-03-17 10:23:49 -0700683 }
684 mFrameFences.clear();
685 }
686}
687
John Reck38f6c032016-03-17 10:23:49 -0700688void CanvasContext::enqueueFrameWork(std::function<void()>&& func) {
John Reck322b8ab2019-03-14 13:15:28 -0700689 mFrameFences.push_back(CommonPool::async(std::move(func)));
John Reck38f6c032016-03-17 10:23:49 -0700690}
691
John Reck28912a52016-04-18 14:34:18 -0700692int64_t CanvasContext::getFrameNumber() {
693 // mFrameNumber is reset to -1 when the surface changes or we swap buffers
694 if (mFrameNumber == -1 && mNativeSurface.get()) {
695 mFrameNumber = static_cast<int64_t>(mNativeSurface->getNextFrameNumber());
696 }
697 return mFrameNumber;
698}
699
John Reck0b19a732019-03-07 17:18:25 -0800700bool CanvasContext::surfaceRequiresRedraw() {
701 if (!mNativeSurface) return false;
702 if (mHaveNewSurface) return true;
703
704 int width = -1;
705 int height = -1;
706 ReliableSurface* surface = mNativeSurface.get();
707 surface->query(NATIVE_WINDOW_WIDTH, &width);
708 surface->query(NATIVE_WINDOW_HEIGHT, &height);
709
John Reck41459192019-10-31 15:04:58 -0700710 return width != mLastFrameWidth || height != mLastFrameHeight;
John Reck0b19a732019-03-07 17:18:25 -0800711}
712
John Reckcf185f52019-04-11 16:11:24 -0700713void CanvasContext::setRenderAheadDepth(int renderAhead) {
714 if (renderAhead > 2 || renderAhead < 0 || mNativeSurface) {
John Reck4d527222019-03-13 16:25:20 -0700715 return;
716 }
John Reckcf185f52019-04-11 16:11:24 -0700717 mFixedRenderAhead = true;
718 mRenderAheadDepth = static_cast<uint32_t>(renderAhead);
John Reck4d527222019-03-13 16:25:20 -0700719}
720
Stan Iliev768e3932016-07-08 21:34:52 -0400721SkRect CanvasContext::computeDirtyRect(const Frame& frame, SkRect* dirty) {
722 if (frame.width() != mLastFrameWidth || frame.height() != mLastFrameHeight) {
723 // can't rely on prior content of window if viewport size changes
724 dirty->setEmpty();
725 mLastFrameWidth = frame.width();
726 mLastFrameHeight = frame.height();
727 } else if (mHaveNewSurface || frame.bufferAge() == 0) {
728 // New surface needs a full draw
729 dirty->setEmpty();
730 } else {
Mike Reed39adc882019-08-22 11:53:05 -0400731 if (!dirty->isEmpty() && !dirty->intersect(SkRect::MakeIWH(frame.width(), frame.height()))) {
John Reck1bcacfd2017-11-03 10:12:19 -0700732 ALOGW("Dirty " RECT_STRING " doesn't intersect with 0 0 %d %d ?", SK_RECT_ARGS(*dirty),
733 frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400734 dirty->setEmpty();
735 }
736 profiler().unionDirty(dirty);
737 }
738
739 if (dirty->isEmpty()) {
Mike Reed39adc882019-08-22 11:53:05 -0400740 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400741 }
742
743 // At this point dirty is the area of the window to update. However,
744 // the area of the frame we need to repaint is potentially different, so
745 // stash the screen area for later
746 SkRect windowDirty(*dirty);
747
748 // If the buffer age is 0 we do a full-screen repaint (handled above)
749 // If the buffer age is 1 the buffer contents are the same as they were
750 // last frame so there's nothing to union() against
751 // Therefore we only care about the > 1 case.
752 if (frame.bufferAge() > 1) {
John Reck1bcacfd2017-11-03 10:12:19 -0700753 if (frame.bufferAge() > (int)mSwapHistory.size()) {
Stan Iliev768e3932016-07-08 21:34:52 -0400754 // We don't have enough history to handle this old of a buffer
755 // Just do a full-draw
Mike Reed39adc882019-08-22 11:53:05 -0400756 dirty->setIWH(frame.width(), frame.height());
Stan Iliev768e3932016-07-08 21:34:52 -0400757 } else {
758 // At this point we haven't yet added the latest frame
759 // to the damage history (happens below)
760 // So we need to damage
761 for (int i = mSwapHistory.size() - 1;
John Reck1bcacfd2017-11-03 10:12:19 -0700762 i > ((int)mSwapHistory.size()) - frame.bufferAge(); i--) {
Stan Iliev768e3932016-07-08 21:34:52 -0400763 dirty->join(mSwapHistory[i].damage);
764 }
765 }
766 }
767
768 return windowDirty;
769}
770
John Reck23b797a2014-01-03 18:08:34 -0800771} /* namespace renderthread */
772} /* namespace uirenderer */
773} /* namespace android */