blob: 34c3d6004108fe96343cb29592e6c011aeeae7aa [file] [log] [blame]
Chris Craikb565df12015-10-05 13:00:52 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "OpReorderer.h"
18
Chris Craik0b7e8242015-10-28 16:50:44 -070019#include "LayerUpdateQueue.h"
Chris Craik161f54b2015-11-05 11:08:52 -080020#include "RenderNode.h"
Chris Craik98787e62015-11-13 10:55:30 -080021#include "renderstate/OffscreenBufferPool.h"
Chris Craik161f54b2015-11-05 11:08:52 -080022#include "utils/FatVector.h"
23#include "utils/PaintUtils.h"
Chris Craik8ecf41c2015-11-16 10:27:59 -080024#include "utils/TraceUtils.h"
Chris Craikb565df12015-10-05 13:00:52 -070025
Chris Craik161f54b2015-11-05 11:08:52 -080026#include <SkCanvas.h>
Chris Craikd3daa312015-11-06 10:59:56 -080027#include <SkPathOps.h>
Chris Craik161f54b2015-11-05 11:08:52 -080028#include <utils/TypeHelpers.h>
Chris Craikb565df12015-10-05 13:00:52 -070029
30namespace android {
31namespace uirenderer {
32
33class BatchBase {
34
35public:
36 BatchBase(batchid_t batchId, BakedOpState* op, bool merging)
Chris Craik98787e62015-11-13 10:55:30 -080037 : mBatchId(batchId)
38 , mMerging(merging) {
Chris Craikb565df12015-10-05 13:00:52 -070039 mBounds = op->computedState.clippedBounds;
40 mOps.push_back(op);
41 }
42
43 bool intersects(const Rect& rect) const {
44 if (!rect.intersects(mBounds)) return false;
45
46 for (const BakedOpState* op : mOps) {
47 if (rect.intersects(op->computedState.clippedBounds)) {
48 return true;
49 }
50 }
51 return false;
52 }
53
54 batchid_t getBatchId() const { return mBatchId; }
55 bool isMerging() const { return mMerging; }
56
57 const std::vector<BakedOpState*>& getOps() const { return mOps; }
58
59 void dump() const {
Chris Craik6fe991e52015-10-20 09:39:42 -070060 ALOGD(" Batch %p, id %d, merging %d, count %d, bounds " RECT_STRING,
61 this, mBatchId, mMerging, mOps.size(), RECT_ARGS(mBounds));
Chris Craikb565df12015-10-05 13:00:52 -070062 }
63protected:
64 batchid_t mBatchId;
65 Rect mBounds;
66 std::vector<BakedOpState*> mOps;
67 bool mMerging;
68};
69
70class OpBatch : public BatchBase {
71public:
72 static void* operator new(size_t size, LinearAllocator& allocator) {
73 return allocator.alloc(size);
74 }
75
76 OpBatch(batchid_t batchId, BakedOpState* op)
77 : BatchBase(batchId, op, false) {
78 }
79
80 void batchOp(BakedOpState* op) {
81 mBounds.unionWith(op->computedState.clippedBounds);
82 mOps.push_back(op);
83 }
84};
85
86class MergingOpBatch : public BatchBase {
87public:
88 static void* operator new(size_t size, LinearAllocator& allocator) {
89 return allocator.alloc(size);
90 }
91
92 MergingOpBatch(batchid_t batchId, BakedOpState* op)
Chris Craikd7448e62015-12-15 10:34:36 -080093 : BatchBase(batchId, op, true)
Chris Craik93e53e02015-12-17 18:42:44 -080094 , mClipSideFlags(op->computedState.clipSideFlags) {
Chris Craikb565df12015-10-05 13:00:52 -070095 }
96
97 /*
98 * Helper for determining if a new op can merge with a MergingDrawBatch based on their bounds
99 * and clip side flags. Positive bounds delta means new bounds fit in old.
100 */
101 static inline bool checkSide(const int currentFlags, const int newFlags, const int side,
102 float boundsDelta) {
103 bool currentClipExists = currentFlags & side;
104 bool newClipExists = newFlags & side;
105
106 // if current is clipped, we must be able to fit new bounds in current
107 if (boundsDelta > 0 && currentClipExists) return false;
108
109 // if new is clipped, we must be able to fit current bounds in new
110 if (boundsDelta < 0 && newClipExists) return false;
111
112 return true;
113 }
114
115 static bool paintIsDefault(const SkPaint& paint) {
116 return paint.getAlpha() == 255
117 && paint.getColorFilter() == nullptr
118 && paint.getShader() == nullptr;
119 }
120
121 static bool paintsAreEquivalent(const SkPaint& a, const SkPaint& b) {
122 return a.getAlpha() == b.getAlpha()
123 && a.getColorFilter() == b.getColorFilter()
124 && a.getShader() == b.getShader();
125 }
126
127 /*
128 * Checks if a (mergeable) op can be merged into this batch
129 *
130 * If true, the op's multiDraw must be guaranteed to handle both ops simultaneously, so it is
131 * important to consider all paint attributes used in the draw calls in deciding both a) if an
132 * op tries to merge at all, and b) if the op can merge with another set of ops
133 *
134 * False positives can lead to information from the paints of subsequent merged operations being
135 * dropped, so we make simplifying qualifications on the ops that can merge, per op type.
136 */
137 bool canMergeWith(BakedOpState* op) const {
138 bool isTextBatch = getBatchId() == OpBatchType::Text
139 || getBatchId() == OpBatchType::ColorText;
140
141 // Overlapping other operations is only allowed for text without shadow. For other ops,
142 // multiDraw isn't guaranteed to overdraw correctly
143 if (!isTextBatch || PaintUtils::hasTextShadow(op->op->paint)) {
144 if (intersects(op->computedState.clippedBounds)) return false;
145 }
146
147 const BakedOpState* lhs = op;
148 const BakedOpState* rhs = mOps[0];
149
150 if (!MathUtils::areEqual(lhs->alpha, rhs->alpha)) return false;
151
152 // Identical round rect clip state means both ops will clip in the same way, or not at all.
153 // As the state objects are const, we can compare their pointers to determine mergeability
154 if (lhs->roundRectClipState != rhs->roundRectClipState) return false;
155 if (lhs->projectionPathMask != rhs->projectionPathMask) return false;
156
157 /* Clipping compatibility check
158 *
159 * Exploits the fact that if a op or batch is clipped on a side, its bounds will equal its
160 * clip for that side.
161 */
162 const int currentFlags = mClipSideFlags;
163 const int newFlags = op->computedState.clipSideFlags;
164 if (currentFlags != OpClipSideFlags::None || newFlags != OpClipSideFlags::None) {
165 const Rect& opBounds = op->computedState.clippedBounds;
166 float boundsDelta = mBounds.left - opBounds.left;
167 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Left, boundsDelta)) return false;
168 boundsDelta = mBounds.top - opBounds.top;
169 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Top, boundsDelta)) return false;
170
171 // right and bottom delta calculation reversed to account for direction
172 boundsDelta = opBounds.right - mBounds.right;
173 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Right, boundsDelta)) return false;
174 boundsDelta = opBounds.bottom - mBounds.bottom;
175 if (!checkSide(currentFlags, newFlags, OpClipSideFlags::Bottom, boundsDelta)) return false;
176 }
177
178 const SkPaint* newPaint = op->op->paint;
179 const SkPaint* oldPaint = mOps[0]->op->paint;
180
181 if (newPaint == oldPaint) {
182 // if paints are equal, then modifiers + paint attribs don't need to be compared
183 return true;
184 } else if (newPaint && !oldPaint) {
185 return paintIsDefault(*newPaint);
186 } else if (!newPaint && oldPaint) {
187 return paintIsDefault(*oldPaint);
188 }
189 return paintsAreEquivalent(*newPaint, *oldPaint);
190 }
191
192 void mergeOp(BakedOpState* op) {
193 mBounds.unionWith(op->computedState.clippedBounds);
194 mOps.push_back(op);
195
Chris Craik93e53e02015-12-17 18:42:44 -0800196 // Because a new op must have passed canMergeWith(), we know it's passed the clipping compat
197 // check, and doesn't extend past a side of the clip that's in use by the merged batch.
198 // Therefore it's safe to simply always merge flags, and use the bounds as the clip rect.
199 mClipSideFlags |= op->computedState.clipSideFlags;
Chris Craikb565df12015-10-05 13:00:52 -0700200 }
201
Chris Craikd7448e62015-12-15 10:34:36 -0800202 int getClipSideFlags() const { return mClipSideFlags; }
Chris Craik93e53e02015-12-17 18:42:44 -0800203 const Rect& getClipRect() const { return mBounds; }
Chris Craik15c3f192015-12-03 12:16:56 -0800204
Chris Craikb565df12015-10-05 13:00:52 -0700205private:
Chris Craikd7448e62015-12-15 10:34:36 -0800206 int mClipSideFlags;
Chris Craikb565df12015-10-05 13:00:52 -0700207};
208
Chris Craik0b7e8242015-10-28 16:50:44 -0700209OpReorderer::LayerReorderer::LayerReorderer(uint32_t width, uint32_t height,
Chris Craik98787e62015-11-13 10:55:30 -0800210 const Rect& repaintRect, const BeginLayerOp* beginLayerOp, RenderNode* renderNode)
Chris Craik0b7e8242015-10-28 16:50:44 -0700211 : width(width)
212 , height(height)
Chris Craik98787e62015-11-13 10:55:30 -0800213 , repaintRect(repaintRect)
Chris Craik0b7e8242015-10-28 16:50:44 -0700214 , offscreenBuffer(renderNode ? renderNode->getLayer() : nullptr)
215 , beginLayerOp(beginLayerOp)
216 , renderNode(renderNode) {}
217
Chris Craik6fe991e52015-10-20 09:39:42 -0700218// iterate back toward target to see if anything drawn since should overlap the new op
Chris Craik818c9fb2015-10-23 14:33:42 -0700219// if no target, merging ops still iterate to find similar batch to insert after
Chris Craik6fe991e52015-10-20 09:39:42 -0700220void OpReorderer::LayerReorderer::locateInsertIndex(int batchId, const Rect& clippedBounds,
221 BatchBase** targetBatch, size_t* insertBatchIndex) const {
222 for (int i = mBatches.size() - 1; i >= 0; i--) {
223 BatchBase* overBatch = mBatches[i];
224
225 if (overBatch == *targetBatch) break;
226
227 // TODO: also consider shader shared between batch types
228 if (batchId == overBatch->getBatchId()) {
229 *insertBatchIndex = i + 1;
230 if (!*targetBatch) break; // found insert position, quit
231 }
232
233 if (overBatch->intersects(clippedBounds)) {
234 // NOTE: it may be possible to optimize for special cases where two operations
235 // of the same batch/paint could swap order, such as with a non-mergeable
236 // (clipped) and a mergeable text operation
237 *targetBatch = nullptr;
238 break;
239 }
240 }
241}
242
Chris Craikb87eadd2016-01-06 09:16:05 -0800243void OpReorderer::LayerReorderer::deferLayerClear(const Rect& rect) {
244 mClearRects.push_back(rect);
245}
246
247void OpReorderer::LayerReorderer::flushLayerClears(LinearAllocator& allocator) {
248 if (CC_UNLIKELY(!mClearRects.empty())) {
249 const int vertCount = mClearRects.size() * 4;
250 // put the verts in the frame allocator, since
251 // 1) SimpleRectsOps needs verts, not rects
252 // 2) even if mClearRects stored verts, std::vectors will move their contents
253 Vertex* const verts = (Vertex*) allocator.alloc(vertCount * sizeof(Vertex));
254
255 Vertex* currentVert = verts;
256 Rect bounds = mClearRects[0];
257 for (auto&& rect : mClearRects) {
258 bounds.unionWith(rect);
259 Vertex::set(currentVert++, rect.left, rect.top);
260 Vertex::set(currentVert++, rect.right, rect.top);
261 Vertex::set(currentVert++, rect.left, rect.bottom);
262 Vertex::set(currentVert++, rect.right, rect.bottom);
263 }
264 mClearRects.clear(); // discard rects before drawing so this method isn't reentrant
265
266 // One or more unclipped saveLayers have been enqueued, with deferred clears.
267 // Flush all of these clears with a single draw
268 SkPaint* paint = allocator.create<SkPaint>();
269 paint->setXfermodeMode(SkXfermode::kClear_Mode);
270 SimpleRectsOp* op = new (allocator) SimpleRectsOp(bounds,
271 Matrix4::identity(), nullptr, paint,
272 verts, vertCount);
273 BakedOpState* bakedState = BakedOpState::directConstruct(allocator, bounds, *op);
274
275
276 deferUnmergeableOp(allocator, bakedState, OpBatchType::Vertices);
277 }
278}
279
Chris Craik6fe991e52015-10-20 09:39:42 -0700280void OpReorderer::LayerReorderer::deferUnmergeableOp(LinearAllocator& allocator,
281 BakedOpState* op, batchid_t batchId) {
Chris Craikb87eadd2016-01-06 09:16:05 -0800282 if (batchId != OpBatchType::CopyToLayer) {
283 // if first op after one or more unclipped saveLayers, flush the layer clears
284 flushLayerClears(allocator);
285 }
286
Chris Craik6fe991e52015-10-20 09:39:42 -0700287 OpBatch* targetBatch = mBatchLookup[batchId];
288
289 size_t insertBatchIndex = mBatches.size();
290 if (targetBatch) {
291 locateInsertIndex(batchId, op->computedState.clippedBounds,
292 (BatchBase**)(&targetBatch), &insertBatchIndex);
293 }
294
295 if (targetBatch) {
296 targetBatch->batchOp(op);
297 } else {
298 // new non-merging batch
299 targetBatch = new (allocator) OpBatch(batchId, op);
300 mBatchLookup[batchId] = targetBatch;
301 mBatches.insert(mBatches.begin() + insertBatchIndex, targetBatch);
302 }
303}
304
Chris Craik6fe991e52015-10-20 09:39:42 -0700305void OpReorderer::LayerReorderer::deferMergeableOp(LinearAllocator& allocator,
306 BakedOpState* op, batchid_t batchId, mergeid_t mergeId) {
Chris Craikb87eadd2016-01-06 09:16:05 -0800307 if (batchId != OpBatchType::CopyToLayer) {
308 // if first op after one or more unclipped saveLayers, flush the layer clears
309 flushLayerClears(allocator);
310 }
Chris Craik6fe991e52015-10-20 09:39:42 -0700311 MergingOpBatch* targetBatch = nullptr;
312
313 // Try to merge with any existing batch with same mergeId
314 auto getResult = mMergingBatchLookup[batchId].find(mergeId);
315 if (getResult != mMergingBatchLookup[batchId].end()) {
316 targetBatch = getResult->second;
317 if (!targetBatch->canMergeWith(op)) {
318 targetBatch = nullptr;
319 }
320 }
321
322 size_t insertBatchIndex = mBatches.size();
323 locateInsertIndex(batchId, op->computedState.clippedBounds,
324 (BatchBase**)(&targetBatch), &insertBatchIndex);
325
326 if (targetBatch) {
327 targetBatch->mergeOp(op);
328 } else {
329 // new merging batch
330 targetBatch = new (allocator) MergingOpBatch(batchId, op);
331 mMergingBatchLookup[batchId].insert(std::make_pair(mergeId, targetBatch));
332
333 mBatches.insert(mBatches.begin() + insertBatchIndex, targetBatch);
334 }
335}
336
Chris Craik15c3f192015-12-03 12:16:56 -0800337void OpReorderer::LayerReorderer::replayBakedOpsImpl(void* arg,
338 BakedOpReceiver* unmergedReceivers, MergedOpReceiver* mergedReceivers) const {
Chris Craik5854b342015-10-26 15:49:56 -0700339 ATRACE_NAME("flush drawing commands");
Chris Craik6fe991e52015-10-20 09:39:42 -0700340 for (const BatchBase* batch : mBatches) {
Chris Craik15c3f192015-12-03 12:16:56 -0800341 size_t size = batch->getOps().size();
342 if (size > 1 && batch->isMerging()) {
343 int opId = batch->getOps()[0]->op->opId;
344 const MergingOpBatch* mergingBatch = static_cast<const MergingOpBatch*>(batch);
345 MergedBakedOpList data = {
346 batch->getOps().data(),
347 size,
348 mergingBatch->getClipSideFlags(),
349 mergingBatch->getClipRect()
350 };
Chris Craik15c3f192015-12-03 12:16:56 -0800351 mergedReceivers[opId](arg, data);
352 } else {
353 for (const BakedOpState* op : batch->getOps()) {
354 unmergedReceivers[op->op->opId](arg, *op);
355 }
Chris Craik6fe991e52015-10-20 09:39:42 -0700356 }
357 }
358}
359
360void OpReorderer::LayerReorderer::dump() const {
Chris Craik0b7e8242015-10-28 16:50:44 -0700361 ALOGD("LayerReorderer %p, %ux%u buffer %p, blo %p, rn %p",
362 this, width, height, offscreenBuffer, beginLayerOp, renderNode);
Chris Craik6fe991e52015-10-20 09:39:42 -0700363 for (const BatchBase* batch : mBatches) {
364 batch->dump();
365 }
366}
Chris Craikb565df12015-10-05 13:00:52 -0700367
Chris Craik0b7e8242015-10-28 16:50:44 -0700368OpReorderer::OpReorderer(const LayerUpdateQueue& layers, const SkRect& clip,
369 uint32_t viewportWidth, uint32_t viewportHeight,
Chris Craik98787e62015-11-13 10:55:30 -0800370 const std::vector< sp<RenderNode> >& nodes, const Vector3& lightCenter)
Chris Craik6fe991e52015-10-20 09:39:42 -0700371 : mCanvasState(*this) {
Chris Craik818c9fb2015-10-23 14:33:42 -0700372 ATRACE_NAME("prepare drawing commands");
Chris Craikb565df12015-10-05 13:00:52 -0700373
Chris Craik98787e62015-11-13 10:55:30 -0800374 mLayerReorderers.reserve(layers.entries().size());
375 mLayerStack.reserve(layers.entries().size());
376
377 // Prepare to defer Fbo0
378 mLayerReorderers.emplace_back(viewportWidth, viewportHeight, Rect(clip));
379 mLayerStack.push_back(0);
Chris Craikb565df12015-10-05 13:00:52 -0700380 mCanvasState.initializeSaveStack(viewportWidth, viewportHeight,
Chris Craikddf22152015-10-14 17:42:47 -0700381 clip.fLeft, clip.fTop, clip.fRight, clip.fBottom,
Chris Craik98787e62015-11-13 10:55:30 -0800382 lightCenter);
Chris Craik0b7e8242015-10-28 16:50:44 -0700383
384 // Render all layers to be updated, in order. Defer in reverse order, so that they'll be
385 // updated in the order they're passed in (mLayerReorderers are issued to Renderer in reverse)
386 for (int i = layers.entries().size() - 1; i >= 0; i--) {
387 RenderNode* layerNode = layers.entries()[i].renderNode;
388 const Rect& layerDamage = layers.entries()[i].damage;
Chris Craik8d1f2122015-11-24 16:40:09 -0800389 layerNode->computeOrdering();
Chris Craik0b7e8242015-10-28 16:50:44 -0700390
Chris Craik8ecf41c2015-11-16 10:27:59 -0800391 // map current light center into RenderNode's coordinate space
392 Vector3 lightCenter = mCanvasState.currentSnapshot()->getRelativeLightCenter();
393 layerNode->getLayer()->inverseTransformInWindow.mapPoint3d(lightCenter);
394
395 saveForLayer(layerNode->getWidth(), layerNode->getHeight(), 0, 0,
396 layerDamage, lightCenter, nullptr, layerNode);
Chris Craik0b7e8242015-10-28 16:50:44 -0700397
398 if (layerNode->getDisplayList()) {
Chris Craik8d1f2122015-11-24 16:40:09 -0800399 deferNodeOps(*layerNode);
Chris Craik0b7e8242015-10-28 16:50:44 -0700400 }
401 restoreForLayer();
402 }
403
404 // Defer Fbo0
Chris Craikb565df12015-10-05 13:00:52 -0700405 for (const sp<RenderNode>& node : nodes) {
406 if (node->nothingToDraw()) continue;
Chris Craik8d1f2122015-11-24 16:40:09 -0800407 node->computeOrdering();
Chris Craikb565df12015-10-05 13:00:52 -0700408
Chris Craik0b7e8242015-10-28 16:50:44 -0700409 int count = mCanvasState.save(SkCanvas::kClip_SaveFlag | SkCanvas::kMatrix_SaveFlag);
410 deferNodePropsAndOps(*node);
411 mCanvasState.restoreToCount(count);
Chris Craikb565df12015-10-05 13:00:52 -0700412 }
413}
414
Chris Craik818c9fb2015-10-23 14:33:42 -0700415void OpReorderer::onViewportInitialized() {}
416
417void OpReorderer::onSnapshotRestored(const Snapshot& removed, const Snapshot& restored) {}
418
Chris Craik0b7e8242015-10-28 16:50:44 -0700419void OpReorderer::deferNodePropsAndOps(RenderNode& node) {
Chris Craik8ecf41c2015-11-16 10:27:59 -0800420 const RenderProperties& properties = node.properties();
421 const Outline& outline = properties.getOutline();
422 if (properties.getAlpha() <= 0
423 || (outline.getShouldClip() && outline.isEmpty())
424 || properties.getScaleX() == 0
425 || properties.getScaleY() == 0) {
426 return; // rejected
427 }
428
429 if (properties.getLeft() != 0 || properties.getTop() != 0) {
430 mCanvasState.translate(properties.getLeft(), properties.getTop());
431 }
432 if (properties.getStaticMatrix()) {
433 mCanvasState.concatMatrix(*properties.getStaticMatrix());
434 } else if (properties.getAnimationMatrix()) {
435 mCanvasState.concatMatrix(*properties.getAnimationMatrix());
436 }
437 if (properties.hasTransformMatrix()) {
438 if (properties.isTransformTranslateOnly()) {
439 mCanvasState.translate(properties.getTranslationX(), properties.getTranslationY());
440 } else {
441 mCanvasState.concatMatrix(*properties.getTransformMatrix());
442 }
443 }
444
445 const int width = properties.getWidth();
446 const int height = properties.getHeight();
447
448 Rect saveLayerBounds; // will be set to non-empty if saveLayer needed
449 const bool isLayer = properties.effectiveLayerType() != LayerType::None;
450 int clipFlags = properties.getClippingFlags();
451 if (properties.getAlpha() < 1) {
452 if (isLayer) {
453 clipFlags &= ~CLIP_TO_BOUNDS; // bounds clipping done by layer
454 }
455 if (CC_LIKELY(isLayer || !properties.getHasOverlappingRendering())) {
456 // simply scale rendering content's alpha
457 mCanvasState.scaleAlpha(properties.getAlpha());
458 } else {
459 // schedule saveLayer by initializing saveLayerBounds
460 saveLayerBounds.set(0, 0, width, height);
461 if (clipFlags) {
462 properties.getClippingRectForFlags(clipFlags, &saveLayerBounds);
463 clipFlags = 0; // all clipping done by savelayer
464 }
465 }
466
467 if (CC_UNLIKELY(ATRACE_ENABLED() && properties.promotedToLayer())) {
468 // pretend alpha always causes savelayer to warn about
469 // performance problem affecting old versions
470 ATRACE_FORMAT("%s alpha caused saveLayer %dx%d", node.getName(), width, height);
471 }
472 }
473 if (clipFlags) {
474 Rect clipRect;
475 properties.getClippingRectForFlags(clipFlags, &clipRect);
476 mCanvasState.clipRect(clipRect.left, clipRect.top, clipRect.right, clipRect.bottom,
477 SkRegion::kIntersect_Op);
478 }
479
480 if (properties.getRevealClip().willClip()) {
481 Rect bounds;
482 properties.getRevealClip().getBounds(&bounds);
483 mCanvasState.setClippingRoundRect(mAllocator,
484 bounds, properties.getRevealClip().getRadius());
485 } else if (properties.getOutline().willClip()) {
486 mCanvasState.setClippingOutline(mAllocator, &(properties.getOutline()));
487 }
488
489 if (!mCanvasState.quickRejectConservative(0, 0, width, height)) {
490 // not rejected, so defer render as either Layer, or direct (possibly wrapped in saveLayer)
Chris Craik0b7e8242015-10-28 16:50:44 -0700491 if (node.getLayer()) {
492 // HW layer
493 LayerOp* drawLayerOp = new (mAllocator) LayerOp(node);
494 BakedOpState* bakedOpState = tryBakeOpState(*drawLayerOp);
495 if (bakedOpState) {
Chris Craik8ecf41c2015-11-16 10:27:59 -0800496 // Node's layer already deferred, schedule it to render into parent layer
Chris Craik0b7e8242015-10-28 16:50:44 -0700497 currentLayer().deferUnmergeableOp(mAllocator, bakedOpState, OpBatchType::Bitmap);
498 }
Chris Craik8ecf41c2015-11-16 10:27:59 -0800499 } else if (CC_UNLIKELY(!saveLayerBounds.isEmpty())) {
500 // draw DisplayList contents within temporary, since persisted layer could not be used.
501 // (temp layers are clipped to viewport, since they don't persist offscreen content)
502 SkPaint saveLayerPaint;
503 saveLayerPaint.setAlpha(properties.getAlpha());
Chris Craik268a9c02015-12-09 18:05:12 -0800504 deferBeginLayerOp(*new (mAllocator) BeginLayerOp(
Chris Craik8ecf41c2015-11-16 10:27:59 -0800505 saveLayerBounds,
506 Matrix4::identity(),
Chris Craike4db79d2015-12-22 16:32:23 -0800507 nullptr, // no record-time clip - need only respect defer-time one
Chris Craik8ecf41c2015-11-16 10:27:59 -0800508 &saveLayerPaint));
Chris Craik8d1f2122015-11-24 16:40:09 -0800509 deferNodeOps(node);
Chris Craik268a9c02015-12-09 18:05:12 -0800510 deferEndLayerOp(*new (mAllocator) EndLayerOp());
Chris Craik0b7e8242015-10-28 16:50:44 -0700511 } else {
Chris Craik8d1f2122015-11-24 16:40:09 -0800512 deferNodeOps(node);
Chris Craik0b7e8242015-10-28 16:50:44 -0700513 }
514 }
515}
516
Chris Craik161f54b2015-11-05 11:08:52 -0800517typedef key_value_pair_t<float, const RenderNodeOp*> ZRenderNodeOpPair;
518
519template <typename V>
520static void buildZSortedChildList(V* zTranslatedNodes,
521 const DisplayList& displayList, const DisplayList::Chunk& chunk) {
522 if (chunk.beginChildIndex == chunk.endChildIndex) return;
523
524 for (size_t i = chunk.beginChildIndex; i < chunk.endChildIndex; i++) {
525 RenderNodeOp* childOp = displayList.getChildren()[i];
526 RenderNode* child = childOp->renderNode;
527 float childZ = child->properties().getZ();
528
529 if (!MathUtils::isZero(childZ) && chunk.reorderChildren) {
530 zTranslatedNodes->push_back(ZRenderNodeOpPair(childZ, childOp));
531 childOp->skipInOrderDraw = true;
532 } else if (!child->properties().getProjectBackwards()) {
533 // regular, in order drawing DisplayList
534 childOp->skipInOrderDraw = false;
535 }
536 }
537
538 // Z sort any 3d children (stable-ness makes z compare fall back to standard drawing order)
539 std::stable_sort(zTranslatedNodes->begin(), zTranslatedNodes->end());
540}
541
542template <typename V>
543static size_t findNonNegativeIndex(const V& zTranslatedNodes) {
544 for (size_t i = 0; i < zTranslatedNodes.size(); i++) {
545 if (zTranslatedNodes[i].key >= 0.0f) return i;
546 }
547 return zTranslatedNodes.size();
548}
549
550template <typename V>
551void OpReorderer::defer3dChildren(ChildrenSelectMode mode, const V& zTranslatedNodes) {
552 const int size = zTranslatedNodes.size();
553 if (size == 0
554 || (mode == ChildrenSelectMode::Negative&& zTranslatedNodes[0].key > 0.0f)
555 || (mode == ChildrenSelectMode::Positive && zTranslatedNodes[size - 1].key < 0.0f)) {
556 // no 3d children to draw
557 return;
558 }
559
560 /**
561 * Draw shadows and (potential) casters mostly in order, but allow the shadows of casters
562 * with very similar Z heights to draw together.
563 *
564 * This way, if Views A & B have the same Z height and are both casting shadows, the shadows are
565 * underneath both, and neither's shadow is drawn on top of the other.
566 */
567 const size_t nonNegativeIndex = findNonNegativeIndex(zTranslatedNodes);
568 size_t drawIndex, shadowIndex, endIndex;
569 if (mode == ChildrenSelectMode::Negative) {
570 drawIndex = 0;
571 endIndex = nonNegativeIndex;
572 shadowIndex = endIndex; // draw no shadows
573 } else {
574 drawIndex = nonNegativeIndex;
575 endIndex = size;
576 shadowIndex = drawIndex; // potentially draw shadow for each pos Z child
577 }
578
579 float lastCasterZ = 0.0f;
580 while (shadowIndex < endIndex || drawIndex < endIndex) {
581 if (shadowIndex < endIndex) {
582 const RenderNodeOp* casterNodeOp = zTranslatedNodes[shadowIndex].value;
583 const float casterZ = zTranslatedNodes[shadowIndex].key;
584 // attempt to render the shadow if the caster about to be drawn is its caster,
585 // OR if its caster's Z value is similar to the previous potential caster
586 if (shadowIndex == drawIndex || casterZ - lastCasterZ < 0.1f) {
587 deferShadow(*casterNodeOp);
588
589 lastCasterZ = casterZ; // must do this even if current caster not casting a shadow
590 shadowIndex++;
591 continue;
592 }
593 }
594
595 const RenderNodeOp* childOp = zTranslatedNodes[drawIndex].value;
Chris Craik268a9c02015-12-09 18:05:12 -0800596 deferRenderNodeOpImpl(*childOp);
Chris Craik161f54b2015-11-05 11:08:52 -0800597 drawIndex++;
598 }
599}
600
601void OpReorderer::deferShadow(const RenderNodeOp& casterNodeOp) {
Chris Craikd3daa312015-11-06 10:59:56 -0800602 auto& node = *casterNodeOp.renderNode;
603 auto& properties = node.properties();
604
605 if (properties.getAlpha() <= 0.0f
606 || properties.getOutline().getAlpha() <= 0.0f
607 || !properties.getOutline().getPath()
608 || properties.getScaleX() == 0
609 || properties.getScaleY() == 0) {
610 // no shadow to draw
611 return;
612 }
613
614 const SkPath* casterOutlinePath = properties.getOutline().getPath();
615 const SkPath* revealClipPath = properties.getRevealClip().getPath();
616 if (revealClipPath && revealClipPath->isEmpty()) return;
617
618 float casterAlpha = properties.getAlpha() * properties.getOutline().getAlpha();
619
620 // holds temporary SkPath to store the result of intersections
621 SkPath* frameAllocatedPath = nullptr;
622 const SkPath* casterPath = casterOutlinePath;
623
624 // intersect the shadow-casting path with the reveal, if present
625 if (revealClipPath) {
626 frameAllocatedPath = createFrameAllocatedPath();
627
628 Op(*casterPath, *revealClipPath, kIntersect_SkPathOp, frameAllocatedPath);
629 casterPath = frameAllocatedPath;
630 }
631
632 // intersect the shadow-casting path with the clipBounds, if present
633 if (properties.getClippingFlags() & CLIP_TO_CLIP_BOUNDS) {
634 if (!frameAllocatedPath) {
635 frameAllocatedPath = createFrameAllocatedPath();
636 }
637 Rect clipBounds;
638 properties.getClippingRectForFlags(CLIP_TO_CLIP_BOUNDS, &clipBounds);
639 SkPath clipBoundsPath;
640 clipBoundsPath.addRect(clipBounds.left, clipBounds.top,
641 clipBounds.right, clipBounds.bottom);
642
643 Op(*casterPath, clipBoundsPath, kIntersect_SkPathOp, frameAllocatedPath);
644 casterPath = frameAllocatedPath;
645 }
646
647 ShadowOp* shadowOp = new (mAllocator) ShadowOp(casterNodeOp, casterAlpha, casterPath,
Chris Craik98787e62015-11-13 10:55:30 -0800648 mCanvasState.getLocalClipBounds(),
649 mCanvasState.currentSnapshot()->getRelativeLightCenter());
Chris Craikd3daa312015-11-06 10:59:56 -0800650 BakedOpState* bakedOpState = BakedOpState::tryShadowOpConstruct(
Chris Craike4db79d2015-12-22 16:32:23 -0800651 mAllocator, *mCanvasState.writableSnapshot(), shadowOp);
Chris Craikd3daa312015-11-06 10:59:56 -0800652 if (CC_LIKELY(bakedOpState)) {
653 currentLayer().deferUnmergeableOp(mAllocator, bakedOpState, OpBatchType::Shadow);
654 }
Chris Craik161f54b2015-11-05 11:08:52 -0800655}
Chris Craikd3daa312015-11-06 10:59:56 -0800656
Chris Craik8d1f2122015-11-24 16:40:09 -0800657void OpReorderer::deferProjectedChildren(const RenderNode& renderNode) {
658 const SkPath* projectionReceiverOutline = renderNode.properties().getOutline().getPath();
659 int count = mCanvasState.save(SkCanvas::kMatrix_SaveFlag | SkCanvas::kClip_SaveFlag);
660
661 // can't be null, since DL=null node rejection happens before deferNodePropsAndOps
662 const DisplayList& displayList = *(renderNode.getDisplayList());
663
664 const RecordedOp* op = (displayList.getOps()[displayList.projectionReceiveIndex]);
665 const RenderNodeOp* backgroundOp = static_cast<const RenderNodeOp*>(op);
666 const RenderProperties& backgroundProps = backgroundOp->renderNode->properties();
667
668 // Transform renderer to match background we're projecting onto
669 // (by offsetting canvas by translationX/Y of background rendernode, since only those are set)
670 mCanvasState.translate(backgroundProps.getTranslationX(), backgroundProps.getTranslationY());
671
672 // If the projection receiver has an outline, we mask projected content to it
673 // (which we know, apriori, are all tessellated paths)
674 mCanvasState.setProjectionPathMask(mAllocator, projectionReceiverOutline);
675
676 // draw projected nodes
677 for (size_t i = 0; i < renderNode.mProjectedNodes.size(); i++) {
678 RenderNodeOp* childOp = renderNode.mProjectedNodes[i];
679
680 int restoreTo = mCanvasState.save(SkCanvas::kMatrix_SaveFlag);
681 mCanvasState.concatMatrix(childOp->transformFromCompositingAncestor);
Chris Craik268a9c02015-12-09 18:05:12 -0800682 deferRenderNodeOpImpl(*childOp);
Chris Craik8d1f2122015-11-24 16:40:09 -0800683 mCanvasState.restoreToCount(restoreTo);
684 }
685
686 mCanvasState.restoreToCount(count);
687}
688
Chris Craikb565df12015-10-05 13:00:52 -0700689/**
Chris Craik268a9c02015-12-09 18:05:12 -0800690 * Used to define a list of lambdas referencing private OpReorderer::onXX::defer() methods.
Chris Craikb565df12015-10-05 13:00:52 -0700691 *
Chris Craik8d1f2122015-11-24 16:40:09 -0800692 * This allows opIds embedded in the RecordedOps to be used for dispatching to these lambdas.
693 * E.g. a BitmapOp op then would be dispatched to OpReorderer::onBitmapOp(const BitmapOp&)
Chris Craikb565df12015-10-05 13:00:52 -0700694 */
Chris Craik6fe991e52015-10-20 09:39:42 -0700695#define OP_RECEIVER(Type) \
Chris Craik268a9c02015-12-09 18:05:12 -0800696 [](OpReorderer& reorderer, const RecordedOp& op) { reorderer.defer##Type(static_cast<const Type&>(op)); },
Chris Craik8d1f2122015-11-24 16:40:09 -0800697void OpReorderer::deferNodeOps(const RenderNode& renderNode) {
Chris Craik15c3f192015-12-03 12:16:56 -0800698 typedef void (*OpDispatcher) (OpReorderer& reorderer, const RecordedOp& op);
Chris Craik7cbf63d2016-01-06 13:46:52 -0800699 static OpDispatcher receivers[] = BUILD_DEFERRABLE_OP_LUT(OP_RECEIVER);
Chris Craik8d1f2122015-11-24 16:40:09 -0800700
701 // can't be null, since DL=null node rejection happens before deferNodePropsAndOps
702 const DisplayList& displayList = *(renderNode.getDisplayList());
Chris Craikb36af872015-10-16 14:23:12 -0700703 for (const DisplayList::Chunk& chunk : displayList.getChunks()) {
Chris Craik161f54b2015-11-05 11:08:52 -0800704 FatVector<ZRenderNodeOpPair, 16> zTranslatedNodes;
705 buildZSortedChildList(&zTranslatedNodes, displayList, chunk);
706
707 defer3dChildren(ChildrenSelectMode::Negative, zTranslatedNodes);
Chris Craikb565df12015-10-05 13:00:52 -0700708 for (size_t opIndex = chunk.beginOpIndex; opIndex < chunk.endOpIndex; opIndex++) {
Chris Craikb36af872015-10-16 14:23:12 -0700709 const RecordedOp* op = displayList.getOps()[opIndex];
Chris Craikb565df12015-10-05 13:00:52 -0700710 receivers[op->opId](*this, *op);
Chris Craik8d1f2122015-11-24 16:40:09 -0800711
712 if (CC_UNLIKELY(!renderNode.mProjectedNodes.empty()
713 && displayList.projectionReceiveIndex >= 0
714 && static_cast<int>(opIndex) == displayList.projectionReceiveIndex)) {
715 deferProjectedChildren(renderNode);
716 }
Chris Craikb565df12015-10-05 13:00:52 -0700717 }
Chris Craik161f54b2015-11-05 11:08:52 -0800718 defer3dChildren(ChildrenSelectMode::Positive, zTranslatedNodes);
Chris Craikb565df12015-10-05 13:00:52 -0700719 }
720}
721
Chris Craik268a9c02015-12-09 18:05:12 -0800722void OpReorderer::deferRenderNodeOpImpl(const RenderNodeOp& op) {
Chris Craik161f54b2015-11-05 11:08:52 -0800723 if (op.renderNode->nothingToDraw()) return;
Chris Craik6fe991e52015-10-20 09:39:42 -0700724 int count = mCanvasState.save(SkCanvas::kClip_SaveFlag | SkCanvas::kMatrix_SaveFlag);
Chris Craikb565df12015-10-05 13:00:52 -0700725
Chris Craike4db79d2015-12-22 16:32:23 -0800726 // apply state from RecordedOp (clip first, since op's clip is transformed by current matrix)
727 mCanvasState.writableSnapshot()->mutateClipArea().applyClip(op.localClip,
728 *mCanvasState.currentSnapshot()->transform);
Chris Craikb565df12015-10-05 13:00:52 -0700729 mCanvasState.concatMatrix(op.localMatrix);
Chris Craikb565df12015-10-05 13:00:52 -0700730
Chris Craik0b7e8242015-10-28 16:50:44 -0700731 // then apply state from node properties, and defer ops
732 deferNodePropsAndOps(*op.renderNode);
733
Chris Craik6fe991e52015-10-20 09:39:42 -0700734 mCanvasState.restoreToCount(count);
Chris Craikb565df12015-10-05 13:00:52 -0700735}
736
Chris Craik268a9c02015-12-09 18:05:12 -0800737void OpReorderer::deferRenderNodeOp(const RenderNodeOp& op) {
Chris Craik161f54b2015-11-05 11:08:52 -0800738 if (!op.skipInOrderDraw) {
Chris Craik268a9c02015-12-09 18:05:12 -0800739 deferRenderNodeOpImpl(op);
Chris Craik161f54b2015-11-05 11:08:52 -0800740 }
741}
742
Chris Craik386aa032015-12-07 17:08:25 -0800743/**
744 * Defers an unmergeable, strokeable op, accounting correctly
745 * for paint's style on the bounds being computed.
746 */
Chris Craik268a9c02015-12-09 18:05:12 -0800747void OpReorderer::deferStrokeableOp(const RecordedOp& op, batchid_t batchId,
Chris Craik386aa032015-12-07 17:08:25 -0800748 BakedOpState::StrokeBehavior strokeBehavior) {
749 // Note: here we account for stroke when baking the op
750 BakedOpState* bakedState = BakedOpState::tryStrokeableOpConstruct(
Chris Craike4db79d2015-12-22 16:32:23 -0800751 mAllocator, *mCanvasState.writableSnapshot(), op, strokeBehavior);
Chris Craik386aa032015-12-07 17:08:25 -0800752 if (!bakedState) return; // quick rejected
753 currentLayer().deferUnmergeableOp(mAllocator, bakedState, batchId);
754}
755
756/**
757 * Returns batch id for tessellatable shapes, based on paint. Checks to see if path effect/AA will
758 * be used, since they trigger significantly different rendering paths.
759 *
760 * Note: not used for lines/points, since they don't currently support path effects.
761 */
762static batchid_t tessBatchId(const RecordedOp& op) {
763 const SkPaint& paint = *(op.paint);
Chris Craikb565df12015-10-05 13:00:52 -0700764 return paint.getPathEffect()
765 ? OpBatchType::AlphaMaskTexture
766 : (paint.isAntiAlias() ? OpBatchType::AlphaVertices : OpBatchType::Vertices);
767}
768
Chris Craik268a9c02015-12-09 18:05:12 -0800769void OpReorderer::deferArcOp(const ArcOp& op) {
770 deferStrokeableOp(op, tessBatchId(op));
Chris Craik386aa032015-12-07 17:08:25 -0800771}
772
Chris Craikb87eadd2016-01-06 09:16:05 -0800773static bool hasMergeableClip(const BakedOpState& state) {
774 return state.computedState.clipState
775 || state.computedState.clipState->mode == ClipMode::Rectangle;
776}
777
Chris Craik268a9c02015-12-09 18:05:12 -0800778void OpReorderer::deferBitmapOp(const BitmapOp& op) {
Chris Craik15c3f192015-12-03 12:16:56 -0800779 BakedOpState* bakedState = tryBakeOpState(op);
780 if (!bakedState) return; // quick rejected
Chris Craikb565df12015-10-05 13:00:52 -0700781
Chris Craik15c3f192015-12-03 12:16:56 -0800782 // Don't merge non-simply transformed or neg scale ops, SET_TEXTURE doesn't handle rotation
783 // Don't merge A8 bitmaps - the paint's color isn't compared by mergeId, or in
784 // MergingDrawBatch::canMergeWith()
785 if (bakedState->computedState.transform.isSimple()
786 && bakedState->computedState.transform.positiveScale()
787 && PaintUtils::getXfermodeDirect(op.paint) == SkXfermode::kSrcOver_Mode
Chris Craikb87eadd2016-01-06 09:16:05 -0800788 && op.bitmap->colorType() != kAlpha_8_SkColorType
789 && hasMergeableClip(*bakedState)) {
Chris Craik15c3f192015-12-03 12:16:56 -0800790 mergeid_t mergeId = (mergeid_t) op.bitmap->getGenerationID();
791 // TODO: AssetAtlas in mergeId
792 currentLayer().deferMergeableOp(mAllocator, bakedState, OpBatchType::Bitmap, mergeId);
793 } else {
794 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Bitmap);
795 }
Chris Craikb565df12015-10-05 13:00:52 -0700796}
797
Chris Craik268a9c02015-12-09 18:05:12 -0800798void OpReorderer::deferBitmapMeshOp(const BitmapMeshOp& op) {
Chris Craikf09ff5a2015-12-08 17:21:58 -0800799 BakedOpState* bakedState = tryBakeOpState(op);
800 if (!bakedState) return; // quick rejected
801 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Bitmap);
802}
803
Chris Craik268a9c02015-12-09 18:05:12 -0800804void OpReorderer::deferBitmapRectOp(const BitmapRectOp& op) {
Chris Craikf09ff5a2015-12-08 17:21:58 -0800805 BakedOpState* bakedState = tryBakeOpState(op);
806 if (!bakedState) return; // quick rejected
807 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Bitmap);
808}
809
Chris Craik268a9c02015-12-09 18:05:12 -0800810void OpReorderer::deferCirclePropsOp(const CirclePropsOp& op) {
811 // allocate a temporary oval op (with mAllocator, so it persists until render), so the
812 // renderer doesn't have to handle the RoundRectPropsOp type, and so state baking is simple.
813 float x = *(op.x);
814 float y = *(op.y);
815 float radius = *(op.radius);
816 Rect unmappedBounds(x - radius, y - radius, x + radius, y + radius);
817 const OvalOp* resolvedOp = new (mAllocator) OvalOp(
818 unmappedBounds,
819 op.localMatrix,
Chris Craike4db79d2015-12-22 16:32:23 -0800820 op.localClip,
Chris Craik268a9c02015-12-09 18:05:12 -0800821 op.paint);
822 deferOvalOp(*resolvedOp);
823}
824
Chris Craike29ce6f2015-12-10 16:25:13 -0800825void OpReorderer::deferFunctorOp(const FunctorOp& op) {
826 BakedOpState* bakedState = tryBakeOpState(op);
827 if (!bakedState) return; // quick rejected
Chris Craikd2dfd8f2015-12-16 14:27:20 -0800828 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Functor);
Chris Craike29ce6f2015-12-10 16:25:13 -0800829}
830
Chris Craik268a9c02015-12-09 18:05:12 -0800831void OpReorderer::deferLinesOp(const LinesOp& op) {
Chris Craik386aa032015-12-07 17:08:25 -0800832 batchid_t batch = op.paint->isAntiAlias() ? OpBatchType::AlphaVertices : OpBatchType::Vertices;
Chris Craik268a9c02015-12-09 18:05:12 -0800833 deferStrokeableOp(op, batch, BakedOpState::StrokeBehavior::Forced);
Chris Craik386aa032015-12-07 17:08:25 -0800834}
835
Chris Craik268a9c02015-12-09 18:05:12 -0800836void OpReorderer::deferOvalOp(const OvalOp& op) {
837 deferStrokeableOp(op, tessBatchId(op));
Chris Craik386aa032015-12-07 17:08:25 -0800838}
839
Chris Craik268a9c02015-12-09 18:05:12 -0800840void OpReorderer::deferPatchOp(const PatchOp& op) {
Chris Craikf09ff5a2015-12-08 17:21:58 -0800841 BakedOpState* bakedState = tryBakeOpState(op);
842 if (!bakedState) return; // quick rejected
843
844 if (bakedState->computedState.transform.isPureTranslate()
Chris Craikb87eadd2016-01-06 09:16:05 -0800845 && PaintUtils::getXfermodeDirect(op.paint) == SkXfermode::kSrcOver_Mode
846 && hasMergeableClip(*bakedState)) {
Chris Craikf09ff5a2015-12-08 17:21:58 -0800847 mergeid_t mergeId = (mergeid_t) op.bitmap->getGenerationID();
848 // TODO: AssetAtlas in mergeId
849
850 // Only use the MergedPatch batchId when merged, so Bitmap+Patch don't try to merge together
851 currentLayer().deferMergeableOp(mAllocator, bakedState, OpBatchType::MergedPatch, mergeId);
852 } else {
853 // Use Bitmap batchId since Bitmap+Patch use same shader
854 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Bitmap);
855 }
856}
857
Chris Craik268a9c02015-12-09 18:05:12 -0800858void OpReorderer::deferPathOp(const PathOp& op) {
859 deferStrokeableOp(op, OpBatchType::Bitmap);
Chris Craik386aa032015-12-07 17:08:25 -0800860}
861
Chris Craik268a9c02015-12-09 18:05:12 -0800862void OpReorderer::deferPointsOp(const PointsOp& op) {
Chris Craik386aa032015-12-07 17:08:25 -0800863 batchid_t batch = op.paint->isAntiAlias() ? OpBatchType::AlphaVertices : OpBatchType::Vertices;
Chris Craik268a9c02015-12-09 18:05:12 -0800864 deferStrokeableOp(op, batch, BakedOpState::StrokeBehavior::Forced);
Chris Craika1717272015-11-19 13:02:43 -0800865}
866
Chris Craik268a9c02015-12-09 18:05:12 -0800867void OpReorderer::deferRectOp(const RectOp& op) {
868 deferStrokeableOp(op, tessBatchId(op));
Chris Craik386aa032015-12-07 17:08:25 -0800869}
870
Chris Craik268a9c02015-12-09 18:05:12 -0800871void OpReorderer::deferRoundRectOp(const RoundRectOp& op) {
872 deferStrokeableOp(op, tessBatchId(op));
Chris Craikb565df12015-10-05 13:00:52 -0700873}
874
Chris Craik268a9c02015-12-09 18:05:12 -0800875void OpReorderer::deferRoundRectPropsOp(const RoundRectPropsOp& op) {
876 // allocate a temporary round rect op (with mAllocator, so it persists until render), so the
877 // renderer doesn't have to handle the RoundRectPropsOp type, and so state baking is simple.
878 const RoundRectOp* resolvedOp = new (mAllocator) RoundRectOp(
879 Rect(*(op.left), *(op.top), *(op.right), *(op.bottom)),
880 op.localMatrix,
Chris Craike4db79d2015-12-22 16:32:23 -0800881 op.localClip,
Chris Craik268a9c02015-12-09 18:05:12 -0800882 op.paint, *op.rx, *op.ry);
883 deferRoundRectOp(*resolvedOp);
884}
885
886void OpReorderer::deferSimpleRectsOp(const SimpleRectsOp& op) {
Chris Craik15c3f192015-12-03 12:16:56 -0800887 BakedOpState* bakedState = tryBakeOpState(op);
888 if (!bakedState) return; // quick rejected
889 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::Vertices);
Chris Craikb565df12015-10-05 13:00:52 -0700890}
891
Chris Craikd7448e62015-12-15 10:34:36 -0800892static batchid_t textBatchId(const SkPaint& paint) {
893 // TODO: better handling of shader (since we won't care about color then)
894 return paint.getColor() == SK_ColorBLACK ? OpBatchType::Text : OpBatchType::ColorText;
895}
896
Chris Craik268a9c02015-12-09 18:05:12 -0800897void OpReorderer::deferTextOp(const TextOp& op) {
Chris Craik15c3f192015-12-03 12:16:56 -0800898 BakedOpState* bakedState = tryBakeOpState(op);
899 if (!bakedState) return; // quick rejected
Chris Craika1717272015-11-19 13:02:43 -0800900
Chris Craikd7448e62015-12-15 10:34:36 -0800901 batchid_t batchId = textBatchId(*(op.paint));
Chris Craik15c3f192015-12-03 12:16:56 -0800902 if (bakedState->computedState.transform.isPureTranslate()
Chris Craikb87eadd2016-01-06 09:16:05 -0800903 && PaintUtils::getXfermodeDirect(op.paint) == SkXfermode::kSrcOver_Mode
904 && hasMergeableClip(*bakedState)) {
Chris Craik15c3f192015-12-03 12:16:56 -0800905 mergeid_t mergeId = reinterpret_cast<mergeid_t>(op.paint->getColor());
906 currentLayer().deferMergeableOp(mAllocator, bakedState, batchId, mergeId);
907 } else {
908 currentLayer().deferUnmergeableOp(mAllocator, bakedState, batchId);
909 }
Chris Craika1717272015-11-19 13:02:43 -0800910}
911
Chris Craikd7448e62015-12-15 10:34:36 -0800912void OpReorderer::deferTextOnPathOp(const TextOnPathOp& op) {
913 BakedOpState* bakedState = tryBakeOpState(op);
914 if (!bakedState) return; // quick rejected
915 currentLayer().deferUnmergeableOp(mAllocator, bakedState, textBatchId(*(op.paint)));
916}
917
Chris Craikd2dfd8f2015-12-16 14:27:20 -0800918void OpReorderer::deferTextureLayerOp(const TextureLayerOp& op) {
919 BakedOpState* bakedState = tryBakeOpState(op);
920 if (!bakedState) return; // quick rejected
921 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::TextureLayer);
922}
923
Chris Craik8ecf41c2015-11-16 10:27:59 -0800924void OpReorderer::saveForLayer(uint32_t layerWidth, uint32_t layerHeight,
925 float contentTranslateX, float contentTranslateY,
926 const Rect& repaintRect,
927 const Vector3& lightCenter,
Chris Craik0b7e8242015-10-28 16:50:44 -0700928 const BeginLayerOp* beginLayerOp, RenderNode* renderNode) {
Chris Craik6fe991e52015-10-20 09:39:42 -0700929 mCanvasState.save(SkCanvas::kClip_SaveFlag | SkCanvas::kMatrix_SaveFlag);
Chris Craik818c9fb2015-10-23 14:33:42 -0700930 mCanvasState.writableSnapshot()->initializeViewport(layerWidth, layerHeight);
Chris Craik6fe991e52015-10-20 09:39:42 -0700931 mCanvasState.writableSnapshot()->roundRectClipState = nullptr;
Chris Craik98787e62015-11-13 10:55:30 -0800932 mCanvasState.writableSnapshot()->setRelativeLightCenter(lightCenter);
Chris Craik8ecf41c2015-11-16 10:27:59 -0800933 mCanvasState.writableSnapshot()->transform->loadTranslate(
934 contentTranslateX, contentTranslateY, 0);
935 mCanvasState.writableSnapshot()->setClip(
936 repaintRect.left, repaintRect.top, repaintRect.right, repaintRect.bottom);
Chris Craik98787e62015-11-13 10:55:30 -0800937
Chris Craik8ecf41c2015-11-16 10:27:59 -0800938 // create a new layer repaint, and push its index on the stack
Chris Craik6fe991e52015-10-20 09:39:42 -0700939 mLayerStack.push_back(mLayerReorderers.size());
Chris Craik98787e62015-11-13 10:55:30 -0800940 mLayerReorderers.emplace_back(layerWidth, layerHeight, repaintRect, beginLayerOp, renderNode);
Chris Craik0b7e8242015-10-28 16:50:44 -0700941}
942
943void OpReorderer::restoreForLayer() {
944 // restore canvas, and pop finished layer off of the stack
945 mCanvasState.restore();
946 mLayerStack.pop_back();
947}
948
Chris Craikb87eadd2016-01-06 09:16:05 -0800949// TODO: defer time rejection (when bounds become empty) + tests
950// Option - just skip layers with no bounds at playback + defer?
Chris Craik268a9c02015-12-09 18:05:12 -0800951void OpReorderer::deferBeginLayerOp(const BeginLayerOp& op) {
Chris Craik8ecf41c2015-11-16 10:27:59 -0800952 uint32_t layerWidth = (uint32_t) op.unmappedBounds.getWidth();
953 uint32_t layerHeight = (uint32_t) op.unmappedBounds.getHeight();
954
955 auto previous = mCanvasState.currentSnapshot();
956 Vector3 lightCenter = previous->getRelativeLightCenter();
957
958 // Combine all transforms used to present saveLayer content:
959 // parent content transform * canvas transform * bounds offset
Chris Craikb87eadd2016-01-06 09:16:05 -0800960 Matrix4 contentTransform(*(previous->transform));
Chris Craik8ecf41c2015-11-16 10:27:59 -0800961 contentTransform.multiply(op.localMatrix);
962 contentTransform.translate(op.unmappedBounds.left, op.unmappedBounds.top);
963
964 Matrix4 inverseContentTransform;
965 inverseContentTransform.loadInverse(contentTransform);
966
967 // map the light center into layer-relative space
968 inverseContentTransform.mapPoint3d(lightCenter);
969
970 // Clip bounds of temporary layer to parent's clip rect, so:
971 Rect saveLayerBounds(layerWidth, layerHeight);
972 // 1) transform Rect(width, height) into parent's space
973 // note: left/top offsets put in contentTransform above
974 contentTransform.mapRect(saveLayerBounds);
975 // 2) intersect with parent's clip
976 saveLayerBounds.doIntersect(previous->getRenderTargetClip());
977 // 3) and transform back
978 inverseContentTransform.mapRect(saveLayerBounds);
979 saveLayerBounds.doIntersect(Rect(layerWidth, layerHeight));
980 saveLayerBounds.roundOut();
981
982 // if bounds are reduced, will clip the layer's area by reducing required bounds...
983 layerWidth = saveLayerBounds.getWidth();
984 layerHeight = saveLayerBounds.getHeight();
985 // ...and shifting drawing content to account for left/top side clipping
986 float contentTranslateX = -saveLayerBounds.left;
987 float contentTranslateY = -saveLayerBounds.top;
988
989 saveForLayer(layerWidth, layerHeight,
990 contentTranslateX, contentTranslateY,
991 Rect(layerWidth, layerHeight),
992 lightCenter,
993 &op, nullptr);
Chris Craik6fe991e52015-10-20 09:39:42 -0700994}
Chris Craikb565df12015-10-05 13:00:52 -0700995
Chris Craik268a9c02015-12-09 18:05:12 -0800996void OpReorderer::deferEndLayerOp(const EndLayerOp& /* ignored */) {
Chris Craik6fe991e52015-10-20 09:39:42 -0700997 const BeginLayerOp& beginLayerOp = *currentLayer().beginLayerOp;
Chris Craik6fe991e52015-10-20 09:39:42 -0700998 int finishedLayerIndex = mLayerStack.back();
Chris Craik0b7e8242015-10-28 16:50:44 -0700999
1000 restoreForLayer();
Chris Craik6fe991e52015-10-20 09:39:42 -07001001
1002 // record the draw operation into the previous layer's list of draw commands
1003 // uses state from the associated beginLayerOp, since it has all the state needed for drawing
1004 LayerOp* drawLayerOp = new (mAllocator) LayerOp(
1005 beginLayerOp.unmappedBounds,
1006 beginLayerOp.localMatrix,
Chris Craike4db79d2015-12-22 16:32:23 -08001007 beginLayerOp.localClip,
Chris Craik818c9fb2015-10-23 14:33:42 -07001008 beginLayerOp.paint,
Chris Craik5854b342015-10-26 15:49:56 -07001009 &mLayerReorderers[finishedLayerIndex].offscreenBuffer);
Chris Craik6fe991e52015-10-20 09:39:42 -07001010 BakedOpState* bakedOpState = tryBakeOpState(*drawLayerOp);
1011
1012 if (bakedOpState) {
1013 // Layer will be drawn into parent layer (which is now current, since we popped mLayerStack)
1014 currentLayer().deferUnmergeableOp(mAllocator, bakedOpState, OpBatchType::Bitmap);
1015 } else {
1016 // Layer won't be drawn - delete its drawing batches to prevent it from doing any work
Chris Craikb87eadd2016-01-06 09:16:05 -08001017 // TODO: need to prevent any render work from being done
1018 // - create layerop earlier for reject purposes?
Chris Craik6fe991e52015-10-20 09:39:42 -07001019 mLayerReorderers[finishedLayerIndex].clear();
1020 return;
Chris Craikb565df12015-10-05 13:00:52 -07001021 }
1022}
1023
Chris Craikb87eadd2016-01-06 09:16:05 -08001024void OpReorderer::deferBeginUnclippedLayerOp(const BeginUnclippedLayerOp& op) {
1025 Matrix4 boundsTransform(*(mCanvasState.currentSnapshot()->transform));
1026 boundsTransform.multiply(op.localMatrix);
1027
1028 Rect dstRect(op.unmappedBounds);
1029 boundsTransform.mapRect(dstRect);
1030 dstRect.doIntersect(mCanvasState.currentSnapshot()->getRenderTargetClip());
1031
1032 // Allocate a holding position for the layer object (copyTo will produce, copyFrom will consume)
1033 OffscreenBuffer** layerHandle = mAllocator.create<OffscreenBuffer*>();
1034
1035 /**
1036 * First, defer an operation to copy out the content from the rendertarget into a layer.
1037 */
1038 auto copyToOp = new (mAllocator) CopyToLayerOp(op, layerHandle);
1039 BakedOpState* bakedState = BakedOpState::directConstruct(mAllocator, dstRect, *copyToOp);
1040 currentLayer().deferUnmergeableOp(mAllocator, bakedState, OpBatchType::CopyToLayer);
1041
1042 /**
1043 * Defer a clear rect, so that clears from multiple unclipped layers can be drawn
1044 * both 1) simultaneously, and 2) as long after the copyToLayer executes as possible
1045 */
1046 currentLayer().deferLayerClear(dstRect);
1047
1048 /**
1049 * And stash an operation to copy that layer back under the rendertarget until
1050 * a balanced EndUnclippedLayerOp is seen
1051 */
1052 auto copyFromOp = new (mAllocator) CopyFromLayerOp(op, layerHandle);
1053 bakedState = BakedOpState::directConstruct(mAllocator, dstRect, *copyFromOp);
1054 currentLayer().activeUnclippedSaveLayers.push_back(bakedState);
1055}
1056
1057void OpReorderer::deferEndUnclippedLayerOp(const EndUnclippedLayerOp& op) {
1058 LOG_ALWAYS_FATAL_IF(currentLayer().activeUnclippedSaveLayers.empty(), "no layer to end!");
1059
1060 BakedOpState* copyFromLayerOp = currentLayer().activeUnclippedSaveLayers.back();
1061 currentLayer().deferUnmergeableOp(mAllocator, copyFromLayerOp, OpBatchType::CopyFromLayer);
1062 currentLayer().activeUnclippedSaveLayers.pop_back();
1063}
1064
Chris Craikb565df12015-10-05 13:00:52 -07001065} // namespace uirenderer
1066} // namespace android