Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2015 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ANDROID_HWUI_OP_REORDERER_H |
| 18 | #define ANDROID_HWUI_OP_REORDERER_H |
| 19 | |
| 20 | #include "BakedOpState.h" |
| 21 | #include "CanvasState.h" |
| 22 | #include "DisplayList.h" |
| 23 | #include "RecordedOp.h" |
| 24 | |
| 25 | #include <vector> |
| 26 | #include <unordered_map> |
| 27 | |
Chris Craik | ddf2215 | 2015-10-14 17:42:47 -0700 | [diff] [blame] | 28 | struct SkRect; |
| 29 | |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 30 | namespace android { |
| 31 | namespace uirenderer { |
| 32 | |
| 33 | class BakedOpState; |
| 34 | class BatchBase; |
Chris Craik | 0b7e824 | 2015-10-28 16:50:44 -0700 | [diff] [blame] | 35 | class LayerUpdateQueue; |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 36 | class MergingOpBatch; |
Chris Craik | 5854b34 | 2015-10-26 15:49:56 -0700 | [diff] [blame] | 37 | class OffscreenBuffer; |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 38 | class OpBatch; |
| 39 | class Rect; |
| 40 | |
| 41 | typedef int batchid_t; |
| 42 | typedef const void* mergeid_t; |
| 43 | |
| 44 | namespace OpBatchType { |
| 45 | enum { |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 46 | Bitmap, |
Chris Craik | f09ff5a | 2015-12-08 17:21:58 -0800 | [diff] [blame] | 47 | MergedPatch, |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 48 | AlphaVertices, |
| 49 | Vertices, |
| 50 | AlphaMaskTexture, |
| 51 | Text, |
| 52 | ColorText, |
Chris Craik | d3daa31 | 2015-11-06 10:59:56 -0800 | [diff] [blame] | 53 | Shadow, |
Chris Craik | d2dfd8f | 2015-12-16 14:27:20 -0800 | [diff] [blame] | 54 | TextureLayer, |
| 55 | Functor, |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 56 | |
| 57 | Count // must be last |
| 58 | }; |
| 59 | } |
| 60 | |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 61 | class OpReorderer : public CanvasStateClient { |
Chris Craik | 15c3f19 | 2015-12-03 12:16:56 -0800 | [diff] [blame] | 62 | typedef void (*BakedOpReceiver)(void*, const BakedOpState&); |
| 63 | typedef void (*MergedOpReceiver)(void*, const MergedBakedOpList& opList); |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 64 | |
| 65 | /** |
| 66 | * Stores the deferred render operations and state used to compute ordering |
| 67 | * for a single FBO/layer. |
| 68 | */ |
| 69 | class LayerReorderer { |
| 70 | public: |
Chris Craik | 0b7e824 | 2015-10-28 16:50:44 -0700 | [diff] [blame] | 71 | // Create LayerReorderer for Fbo0 |
Chris Craik | 98787e6 | 2015-11-13 10:55:30 -0800 | [diff] [blame] | 72 | LayerReorderer(uint32_t width, uint32_t height, const Rect& repaintRect) |
| 73 | : LayerReorderer(width, height, repaintRect, nullptr, nullptr) {}; |
Chris Craik | 0b7e824 | 2015-10-28 16:50:44 -0700 | [diff] [blame] | 74 | |
| 75 | // Create LayerReorderer for an offscreen layer, where beginLayerOp is present for a |
| 76 | // saveLayer, renderNode is present for a HW layer. |
| 77 | LayerReorderer(uint32_t width, uint32_t height, |
Chris Craik | 98787e6 | 2015-11-13 10:55:30 -0800 | [diff] [blame] | 78 | const Rect& repaintRect, const BeginLayerOp* beginLayerOp, RenderNode* renderNode); |
Chris Craik | 818c9fb | 2015-10-23 14:33:42 -0700 | [diff] [blame] | 79 | |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 80 | // iterate back toward target to see if anything drawn since should overlap the new op |
| 81 | // if no target, merging ops still iterate to find similar batch to insert after |
| 82 | void locateInsertIndex(int batchId, const Rect& clippedBounds, |
| 83 | BatchBase** targetBatch, size_t* insertBatchIndex) const; |
| 84 | |
| 85 | void deferUnmergeableOp(LinearAllocator& allocator, BakedOpState* op, batchid_t batchId); |
| 86 | |
| 87 | // insertion point of a new batch, will hopefully be immediately after similar batch |
| 88 | // (generally, should be similar shader) |
| 89 | void deferMergeableOp(LinearAllocator& allocator, |
| 90 | BakedOpState* op, batchid_t batchId, mergeid_t mergeId); |
| 91 | |
Chris Craik | 15c3f19 | 2015-12-03 12:16:56 -0800 | [diff] [blame] | 92 | void replayBakedOpsImpl(void* arg, BakedOpReceiver* receivers, MergedOpReceiver*) const; |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 93 | |
Chris Craik | 818c9fb | 2015-10-23 14:33:42 -0700 | [diff] [blame] | 94 | bool empty() const { |
| 95 | return mBatches.empty(); |
| 96 | } |
| 97 | |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 98 | void clear() { |
| 99 | mBatches.clear(); |
| 100 | } |
| 101 | |
| 102 | void dump() const; |
| 103 | |
Chris Craik | 818c9fb | 2015-10-23 14:33:42 -0700 | [diff] [blame] | 104 | const uint32_t width; |
| 105 | const uint32_t height; |
Chris Craik | 98787e6 | 2015-11-13 10:55:30 -0800 | [diff] [blame] | 106 | const Rect repaintRect; |
Chris Craik | 0b7e824 | 2015-10-28 16:50:44 -0700 | [diff] [blame] | 107 | OffscreenBuffer* offscreenBuffer; |
| 108 | const BeginLayerOp* beginLayerOp; |
| 109 | const RenderNode* renderNode; |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 110 | private: |
| 111 | std::vector<BatchBase*> mBatches; |
| 112 | |
| 113 | /** |
| 114 | * Maps the mergeid_t returned by an op's getMergeId() to the most recently seen |
| 115 | * MergingDrawBatch of that id. These ids are unique per draw type and guaranteed to not |
| 116 | * collide, which avoids the need to resolve mergeid collisions. |
| 117 | */ |
| 118 | std::unordered_map<mergeid_t, MergingOpBatch*> mMergingBatchLookup[OpBatchType::Count]; |
| 119 | |
| 120 | // Maps batch ids to the most recent *non-merging* batch of that id |
| 121 | OpBatch* mBatchLookup[OpBatchType::Count] = { nullptr }; |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 122 | }; |
Chris Craik | 98787e6 | 2015-11-13 10:55:30 -0800 | [diff] [blame] | 123 | |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 124 | public: |
Chris Craik | 0b7e824 | 2015-10-28 16:50:44 -0700 | [diff] [blame] | 125 | OpReorderer(const LayerUpdateQueue& layers, const SkRect& clip, |
| 126 | uint32_t viewportWidth, uint32_t viewportHeight, |
Chris Craik | 98787e6 | 2015-11-13 10:55:30 -0800 | [diff] [blame] | 127 | const std::vector< sp<RenderNode> >& nodes, const Vector3& lightCenter); |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 128 | |
Chris Craik | 818c9fb | 2015-10-23 14:33:42 -0700 | [diff] [blame] | 129 | virtual ~OpReorderer() {} |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 130 | |
| 131 | /** |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 132 | * replayBakedOps() is templated based on what class will receive ops being replayed. |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 133 | * |
| 134 | * It constructs a lookup array of lambdas, which allows a recorded BakeOpState to use |
| 135 | * state->op->opId to lookup a receiver that will be called when the op is replayed. |
| 136 | * |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 137 | */ |
Chris Craik | 5854b34 | 2015-10-26 15:49:56 -0700 | [diff] [blame] | 138 | template <typename StaticDispatcher, typename Renderer> |
| 139 | void replayBakedOps(Renderer& renderer) { |
Chris Craik | 15c3f19 | 2015-12-03 12:16:56 -0800 | [diff] [blame] | 140 | /** |
| 141 | * defines a LUT of lambdas which allow a recorded BakedOpState to use state->op->opId to |
| 142 | * dispatch the op via a method on a static dispatcher when the op is replayed. |
| 143 | * |
| 144 | * For example a BitmapOp would resolve, via the lambda lookup, to calling: |
| 145 | * |
| 146 | * StaticDispatcher::onBitmapOp(Renderer& renderer, const BitmapOp& op, const BakedOpState& state); |
| 147 | */ |
| 148 | #define X(Type) \ |
| 149 | [](void* renderer, const BakedOpState& state) { \ |
| 150 | StaticDispatcher::on##Type(*(static_cast<Renderer*>(renderer)), static_cast<const Type&>(*(state.op)), state); \ |
| 151 | }, |
| 152 | static BakedOpReceiver unmergedReceivers[] = { |
| 153 | MAP_OPS(X) |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 154 | }; |
Chris Craik | 15c3f19 | 2015-12-03 12:16:56 -0800 | [diff] [blame] | 155 | #undef X |
| 156 | |
| 157 | /** |
| 158 | * defines a LUT of lambdas which allow merged arrays of BakedOpState* to be passed to a |
| 159 | * static dispatcher when the group of merged ops is replayed. Unmergeable ops trigger |
| 160 | * a LOG_ALWAYS_FATAL(). |
| 161 | */ |
| 162 | #define X(Type) \ |
| 163 | [](void* renderer, const MergedBakedOpList& opList) { \ |
| 164 | LOG_ALWAYS_FATAL("op type %d does not support merging", opList.states[0]->op->opId); \ |
| 165 | }, |
| 166 | #define Y(Type) \ |
| 167 | [](void* renderer, const MergedBakedOpList& opList) { \ |
| 168 | StaticDispatcher::onMerged##Type##s(*(static_cast<Renderer*>(renderer)), opList); \ |
| 169 | }, |
| 170 | static MergedOpReceiver mergedReceivers[] = { |
| 171 | MAP_OPS_BASED_ON_MERGEABILITY(X, Y) |
| 172 | }; |
| 173 | #undef X |
| 174 | #undef Y |
Chris Craik | 818c9fb | 2015-10-23 14:33:42 -0700 | [diff] [blame] | 175 | |
| 176 | // Relay through layers in reverse order, since layers |
| 177 | // later in the list will be drawn by earlier ones |
| 178 | for (int i = mLayerReorderers.size() - 1; i >= 1; i--) { |
| 179 | LayerReorderer& layer = mLayerReorderers[i]; |
Chris Craik | 0b7e824 | 2015-10-28 16:50:44 -0700 | [diff] [blame] | 180 | if (layer.renderNode) { |
| 181 | // cached HW layer - can't skip layer if empty |
Chris Craik | 98787e6 | 2015-11-13 10:55:30 -0800 | [diff] [blame] | 182 | renderer.startRepaintLayer(layer.offscreenBuffer, layer.repaintRect); |
Chris Craik | 15c3f19 | 2015-12-03 12:16:56 -0800 | [diff] [blame] | 183 | layer.replayBakedOpsImpl((void*)&renderer, unmergedReceivers, mergedReceivers); |
Chris Craik | 0b7e824 | 2015-10-28 16:50:44 -0700 | [diff] [blame] | 184 | renderer.endLayer(); |
| 185 | } else if (!layer.empty()) { // save layer - skip entire layer if empty |
Chris Craik | d3daa31 | 2015-11-06 10:59:56 -0800 | [diff] [blame] | 186 | layer.offscreenBuffer = renderer.startTemporaryLayer(layer.width, layer.height); |
Chris Craik | 15c3f19 | 2015-12-03 12:16:56 -0800 | [diff] [blame] | 187 | layer.replayBakedOpsImpl((void*)&renderer, unmergedReceivers, mergedReceivers); |
Chris Craik | 5854b34 | 2015-10-26 15:49:56 -0700 | [diff] [blame] | 188 | renderer.endLayer(); |
Chris Craik | 818c9fb | 2015-10-23 14:33:42 -0700 | [diff] [blame] | 189 | } |
| 190 | } |
| 191 | |
| 192 | const LayerReorderer& fbo0 = mLayerReorderers[0]; |
Chris Craik | 98787e6 | 2015-11-13 10:55:30 -0800 | [diff] [blame] | 193 | renderer.startFrame(fbo0.width, fbo0.height, fbo0.repaintRect); |
Chris Craik | 15c3f19 | 2015-12-03 12:16:56 -0800 | [diff] [blame] | 194 | fbo0.replayBakedOpsImpl((void*)&renderer, unmergedReceivers, mergedReceivers); |
Chris Craik | e4db79d | 2015-12-22 16:32:23 -0800 | [diff] [blame^] | 195 | renderer.endFrame(fbo0.repaintRect); |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 196 | } |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 197 | |
| 198 | void dump() const { |
| 199 | for (auto&& layer : mLayerReorderers) { |
| 200 | layer.dump(); |
| 201 | } |
| 202 | } |
| 203 | |
| 204 | /////////////////////////////////////////////////////////////////// |
| 205 | /// CanvasStateClient interface |
| 206 | /////////////////////////////////////////////////////////////////// |
| 207 | virtual void onViewportInitialized() override; |
| 208 | virtual void onSnapshotRestored(const Snapshot& removed, const Snapshot& restored) override; |
| 209 | virtual GLuint getTargetFbo() const override { return 0; } |
| 210 | |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 211 | private: |
Chris Craik | 161f54b | 2015-11-05 11:08:52 -0800 | [diff] [blame] | 212 | enum class ChildrenSelectMode { |
| 213 | Negative, |
| 214 | Positive |
| 215 | }; |
Chris Craik | 0b7e824 | 2015-10-28 16:50:44 -0700 | [diff] [blame] | 216 | void saveForLayer(uint32_t layerWidth, uint32_t layerHeight, |
Chris Craik | 8ecf41c | 2015-11-16 10:27:59 -0800 | [diff] [blame] | 217 | float contentTranslateX, float contentTranslateY, |
| 218 | const Rect& repaintRect, |
| 219 | const Vector3& lightCenter, |
| 220 | const BeginLayerOp* beginLayerOp, RenderNode* renderNode); |
Chris Craik | 0b7e824 | 2015-10-28 16:50:44 -0700 | [diff] [blame] | 221 | void restoreForLayer(); |
| 222 | |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 223 | LayerReorderer& currentLayer() { return mLayerReorderers[mLayerStack.back()]; } |
| 224 | |
| 225 | BakedOpState* tryBakeOpState(const RecordedOp& recordedOp) { |
Chris Craik | e4db79d | 2015-12-22 16:32:23 -0800 | [diff] [blame^] | 226 | return BakedOpState::tryConstruct(mAllocator, *mCanvasState.writableSnapshot(), recordedOp); |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 227 | } |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 228 | |
Chris Craik | 8d1f212 | 2015-11-24 16:40:09 -0800 | [diff] [blame] | 229 | // should always be surrounded by a save/restore pair, and not called if DisplayList is null |
Chris Craik | 0b7e824 | 2015-10-28 16:50:44 -0700 | [diff] [blame] | 230 | void deferNodePropsAndOps(RenderNode& node); |
| 231 | |
Chris Craik | 161f54b | 2015-11-05 11:08:52 -0800 | [diff] [blame] | 232 | template <typename V> |
| 233 | void defer3dChildren(ChildrenSelectMode mode, const V& zTranslatedNodes); |
| 234 | |
Chris Craik | 8d1f212 | 2015-11-24 16:40:09 -0800 | [diff] [blame] | 235 | void deferShadow(const RenderNodeOp& casterOp); |
| 236 | |
| 237 | void deferProjectedChildren(const RenderNode& renderNode); |
| 238 | |
| 239 | void deferNodeOps(const RenderNode& renderNode); |
| 240 | |
Chris Craik | 268a9c0 | 2015-12-09 18:05:12 -0800 | [diff] [blame] | 241 | void deferRenderNodeOpImpl(const RenderNodeOp& op); |
Chris Craik | 161f54b | 2015-11-05 11:08:52 -0800 | [diff] [blame] | 242 | |
Chris Craik | 15c3f19 | 2015-12-03 12:16:56 -0800 | [diff] [blame] | 243 | void replayBakedOpsImpl(void* arg, BakedOpReceiver* receivers); |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 244 | |
Chris Craik | d3daa31 | 2015-11-06 10:59:56 -0800 | [diff] [blame] | 245 | SkPath* createFrameAllocatedPath() { |
| 246 | mFrameAllocatedPaths.emplace_back(new SkPath); |
| 247 | return mFrameAllocatedPaths.back().get(); |
| 248 | } |
Chris Craik | 386aa03 | 2015-12-07 17:08:25 -0800 | [diff] [blame] | 249 | |
Chris Craik | 268a9c0 | 2015-12-09 18:05:12 -0800 | [diff] [blame] | 250 | void deferStrokeableOp(const RecordedOp& op, batchid_t batchId, |
Chris Craik | 386aa03 | 2015-12-07 17:08:25 -0800 | [diff] [blame] | 251 | BakedOpState::StrokeBehavior strokeBehavior = BakedOpState::StrokeBehavior::StyleDefined); |
| 252 | |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 253 | /** |
Chris Craik | 268a9c0 | 2015-12-09 18:05:12 -0800 | [diff] [blame] | 254 | * Declares all OpReorderer::deferXXXXOp() methods for every RecordedOp type. |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 255 | * |
| 256 | * These private methods are called from within deferImpl to defer each individual op |
| 257 | * type differently. |
| 258 | */ |
| 259 | #define INTERNAL_OP_HANDLER(Type) \ |
Chris Craik | 268a9c0 | 2015-12-09 18:05:12 -0800 | [diff] [blame] | 260 | void defer##Type(const Type& op); |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 261 | MAP_OPS(INTERNAL_OP_HANDLER) |
| 262 | |
Chris Craik | d3daa31 | 2015-11-06 10:59:56 -0800 | [diff] [blame] | 263 | std::vector<std::unique_ptr<SkPath> > mFrameAllocatedPaths; |
| 264 | |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 265 | // List of every deferred layer's render state. Replayed in reverse order to render a frame. |
| 266 | std::vector<LayerReorderer> mLayerReorderers; |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 267 | |
Chris Craik | 6fe991e5 | 2015-10-20 09:39:42 -0700 | [diff] [blame] | 268 | /* |
| 269 | * Stack of indices within mLayerReorderers representing currently active layers. If drawing |
| 270 | * layerA within a layerB, will contain, in order: |
| 271 | * - 0 (representing FBO 0, always present) |
| 272 | * - layerB's index |
| 273 | * - layerA's index |
| 274 | * |
| 275 | * Note that this doesn't vector doesn't always map onto all values of mLayerReorderers. When a |
| 276 | * layer is finished deferring, it will still be represented in mLayerReorderers, but it's index |
| 277 | * won't be in mLayerStack. This is because it can be replayed, but can't have any more drawing |
| 278 | * ops added to it. |
| 279 | */ |
| 280 | std::vector<size_t> mLayerStack; |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 281 | |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 282 | CanvasState mCanvasState; |
| 283 | |
| 284 | // contains ResolvedOps and Batches |
| 285 | LinearAllocator mAllocator; |
Chris Craik | b565df1 | 2015-10-05 13:00:52 -0700 | [diff] [blame] | 286 | }; |
| 287 | |
| 288 | }; // namespace uirenderer |
| 289 | }; // namespace android |
| 290 | |
| 291 | #endif // ANDROID_HWUI_OP_REORDERER_H |