blob: b824d02eb936f4e355c66e10e80ed58aa433f4c9 [file] [log] [blame]
Chris Craikb565df12015-10-05 13:00:52 -07001/*
2 * Copyright (C) 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ANDROID_HWUI_OP_REORDERER_H
18#define ANDROID_HWUI_OP_REORDERER_H
19
20#include "BakedOpState.h"
21#include "CanvasState.h"
22#include "DisplayList.h"
23#include "RecordedOp.h"
24
25#include <vector>
26#include <unordered_map>
27
Chris Craikddf22152015-10-14 17:42:47 -070028struct SkRect;
29
Chris Craikb565df12015-10-05 13:00:52 -070030namespace android {
31namespace uirenderer {
32
33class BakedOpState;
34class BatchBase;
Chris Craik0b7e8242015-10-28 16:50:44 -070035class LayerUpdateQueue;
Chris Craikb565df12015-10-05 13:00:52 -070036class MergingOpBatch;
Chris Craik5854b342015-10-26 15:49:56 -070037class OffscreenBuffer;
Chris Craikb565df12015-10-05 13:00:52 -070038class OpBatch;
39class Rect;
40
41typedef int batchid_t;
42typedef const void* mergeid_t;
43
44namespace OpBatchType {
45 enum {
Chris Craikb565df12015-10-05 13:00:52 -070046 Bitmap,
Chris Craikf09ff5a2015-12-08 17:21:58 -080047 MergedPatch,
Chris Craikb565df12015-10-05 13:00:52 -070048 AlphaVertices,
49 Vertices,
50 AlphaMaskTexture,
51 Text,
52 ColorText,
Chris Craikd3daa312015-11-06 10:59:56 -080053 Shadow,
Chris Craikd2dfd8f2015-12-16 14:27:20 -080054 TextureLayer,
55 Functor,
Chris Craikb87eadd2016-01-06 09:16:05 -080056 CopyToLayer,
57 CopyFromLayer,
Chris Craikb565df12015-10-05 13:00:52 -070058
59 Count // must be last
60 };
61}
62
Chris Craik6fe991e52015-10-20 09:39:42 -070063class OpReorderer : public CanvasStateClient {
Chris Craik15c3f192015-12-03 12:16:56 -080064 typedef void (*BakedOpReceiver)(void*, const BakedOpState&);
65 typedef void (*MergedOpReceiver)(void*, const MergedBakedOpList& opList);
Chris Craik6fe991e52015-10-20 09:39:42 -070066
67 /**
68 * Stores the deferred render operations and state used to compute ordering
69 * for a single FBO/layer.
70 */
71 class LayerReorderer {
72 public:
Chris Craik0b7e8242015-10-28 16:50:44 -070073 // Create LayerReorderer for Fbo0
Chris Craik98787e62015-11-13 10:55:30 -080074 LayerReorderer(uint32_t width, uint32_t height, const Rect& repaintRect)
75 : LayerReorderer(width, height, repaintRect, nullptr, nullptr) {};
Chris Craik0b7e8242015-10-28 16:50:44 -070076
77 // Create LayerReorderer for an offscreen layer, where beginLayerOp is present for a
78 // saveLayer, renderNode is present for a HW layer.
79 LayerReorderer(uint32_t width, uint32_t height,
Chris Craik98787e62015-11-13 10:55:30 -080080 const Rect& repaintRect, const BeginLayerOp* beginLayerOp, RenderNode* renderNode);
Chris Craik818c9fb2015-10-23 14:33:42 -070081
Chris Craik6fe991e52015-10-20 09:39:42 -070082 // iterate back toward target to see if anything drawn since should overlap the new op
83 // if no target, merging ops still iterate to find similar batch to insert after
84 void locateInsertIndex(int batchId, const Rect& clippedBounds,
85 BatchBase** targetBatch, size_t* insertBatchIndex) const;
86
87 void deferUnmergeableOp(LinearAllocator& allocator, BakedOpState* op, batchid_t batchId);
88
89 // insertion point of a new batch, will hopefully be immediately after similar batch
90 // (generally, should be similar shader)
91 void deferMergeableOp(LinearAllocator& allocator,
92 BakedOpState* op, batchid_t batchId, mergeid_t mergeId);
93
Chris Craik15c3f192015-12-03 12:16:56 -080094 void replayBakedOpsImpl(void* arg, BakedOpReceiver* receivers, MergedOpReceiver*) const;
Chris Craik6fe991e52015-10-20 09:39:42 -070095
Chris Craikb87eadd2016-01-06 09:16:05 -080096 void deferLayerClear(const Rect& dstRect);
97
Chris Craik818c9fb2015-10-23 14:33:42 -070098 bool empty() const {
99 return mBatches.empty();
100 }
101
Chris Craik6fe991e52015-10-20 09:39:42 -0700102 void clear() {
103 mBatches.clear();
104 }
105
106 void dump() const;
107
Chris Craik818c9fb2015-10-23 14:33:42 -0700108 const uint32_t width;
109 const uint32_t height;
Chris Craik98787e62015-11-13 10:55:30 -0800110 const Rect repaintRect;
Chris Craik0b7e8242015-10-28 16:50:44 -0700111 OffscreenBuffer* offscreenBuffer;
112 const BeginLayerOp* beginLayerOp;
113 const RenderNode* renderNode;
Chris Craikb87eadd2016-01-06 09:16:05 -0800114
115 // list of deferred CopyFromLayer ops, to be deferred upon encountering EndUnclippedLayerOps
116 std::vector<BakedOpState*> activeUnclippedSaveLayers;
Chris Craik6fe991e52015-10-20 09:39:42 -0700117 private:
Chris Craikb87eadd2016-01-06 09:16:05 -0800118 void flushLayerClears(LinearAllocator& allocator);
119
Chris Craik6fe991e52015-10-20 09:39:42 -0700120 std::vector<BatchBase*> mBatches;
121
122 /**
123 * Maps the mergeid_t returned by an op's getMergeId() to the most recently seen
124 * MergingDrawBatch of that id. These ids are unique per draw type and guaranteed to not
125 * collide, which avoids the need to resolve mergeid collisions.
126 */
127 std::unordered_map<mergeid_t, MergingOpBatch*> mMergingBatchLookup[OpBatchType::Count];
128
129 // Maps batch ids to the most recent *non-merging* batch of that id
130 OpBatch* mBatchLookup[OpBatchType::Count] = { nullptr };
Chris Craikb87eadd2016-01-06 09:16:05 -0800131
132 std::vector<Rect> mClearRects;
Chris Craik6fe991e52015-10-20 09:39:42 -0700133 };
Chris Craik98787e62015-11-13 10:55:30 -0800134
Chris Craikb565df12015-10-05 13:00:52 -0700135public:
Chris Craik0b7e8242015-10-28 16:50:44 -0700136 OpReorderer(const LayerUpdateQueue& layers, const SkRect& clip,
137 uint32_t viewportWidth, uint32_t viewportHeight,
Chris Craik98787e62015-11-13 10:55:30 -0800138 const std::vector< sp<RenderNode> >& nodes, const Vector3& lightCenter);
Chris Craikb565df12015-10-05 13:00:52 -0700139
Chris Craik818c9fb2015-10-23 14:33:42 -0700140 virtual ~OpReorderer() {}
Chris Craikb565df12015-10-05 13:00:52 -0700141
142 /**
Chris Craik6fe991e52015-10-20 09:39:42 -0700143 * replayBakedOps() is templated based on what class will receive ops being replayed.
Chris Craikb565df12015-10-05 13:00:52 -0700144 *
145 * It constructs a lookup array of lambdas, which allows a recorded BakeOpState to use
146 * state->op->opId to lookup a receiver that will be called when the op is replayed.
147 *
Chris Craikb565df12015-10-05 13:00:52 -0700148 */
Chris Craik5854b342015-10-26 15:49:56 -0700149 template <typename StaticDispatcher, typename Renderer>
150 void replayBakedOps(Renderer& renderer) {
Chris Craik15c3f192015-12-03 12:16:56 -0800151 /**
Chris Craik7cbf63d2016-01-06 13:46:52 -0800152 * Defines a LUT of lambdas which allow a recorded BakedOpState to use state->op->opId to
Chris Craik15c3f192015-12-03 12:16:56 -0800153 * dispatch the op via a method on a static dispatcher when the op is replayed.
154 *
155 * For example a BitmapOp would resolve, via the lambda lookup, to calling:
156 *
157 * StaticDispatcher::onBitmapOp(Renderer& renderer, const BitmapOp& op, const BakedOpState& state);
158 */
159 #define X(Type) \
160 [](void* renderer, const BakedOpState& state) { \
Chris Craikb87eadd2016-01-06 09:16:05 -0800161 StaticDispatcher::on##Type(*(static_cast<Renderer*>(renderer)), \
162 static_cast<const Type&>(*(state.op)), state); \
Chris Craik15c3f192015-12-03 12:16:56 -0800163 },
Chris Craik7cbf63d2016-01-06 13:46:52 -0800164 static BakedOpReceiver unmergedReceivers[] = BUILD_RENDERABLE_OP_LUT(X);
Chris Craik15c3f192015-12-03 12:16:56 -0800165 #undef X
166
167 /**
Chris Craik7cbf63d2016-01-06 13:46:52 -0800168 * Defines a LUT of lambdas which allow merged arrays of BakedOpState* to be passed to a
169 * static dispatcher when the group of merged ops is replayed.
Chris Craik15c3f192015-12-03 12:16:56 -0800170 */
171 #define X(Type) \
172 [](void* renderer, const MergedBakedOpList& opList) { \
Chris Craik15c3f192015-12-03 12:16:56 -0800173 StaticDispatcher::onMerged##Type##s(*(static_cast<Renderer*>(renderer)), opList); \
174 },
Chris Craik7cbf63d2016-01-06 13:46:52 -0800175 static MergedOpReceiver mergedReceivers[] = BUILD_MERGEABLE_OP_LUT(X);
Chris Craik15c3f192015-12-03 12:16:56 -0800176 #undef X
Chris Craik818c9fb2015-10-23 14:33:42 -0700177
178 // Relay through layers in reverse order, since layers
179 // later in the list will be drawn by earlier ones
180 for (int i = mLayerReorderers.size() - 1; i >= 1; i--) {
181 LayerReorderer& layer = mLayerReorderers[i];
Chris Craik0b7e8242015-10-28 16:50:44 -0700182 if (layer.renderNode) {
183 // cached HW layer - can't skip layer if empty
Chris Craik98787e62015-11-13 10:55:30 -0800184 renderer.startRepaintLayer(layer.offscreenBuffer, layer.repaintRect);
Chris Craik15c3f192015-12-03 12:16:56 -0800185 layer.replayBakedOpsImpl((void*)&renderer, unmergedReceivers, mergedReceivers);
Chris Craik0b7e8242015-10-28 16:50:44 -0700186 renderer.endLayer();
187 } else if (!layer.empty()) { // save layer - skip entire layer if empty
Chris Craikd3daa312015-11-06 10:59:56 -0800188 layer.offscreenBuffer = renderer.startTemporaryLayer(layer.width, layer.height);
Chris Craik15c3f192015-12-03 12:16:56 -0800189 layer.replayBakedOpsImpl((void*)&renderer, unmergedReceivers, mergedReceivers);
Chris Craik5854b342015-10-26 15:49:56 -0700190 renderer.endLayer();
Chris Craik818c9fb2015-10-23 14:33:42 -0700191 }
192 }
193
194 const LayerReorderer& fbo0 = mLayerReorderers[0];
Chris Craik98787e62015-11-13 10:55:30 -0800195 renderer.startFrame(fbo0.width, fbo0.height, fbo0.repaintRect);
Chris Craik15c3f192015-12-03 12:16:56 -0800196 fbo0.replayBakedOpsImpl((void*)&renderer, unmergedReceivers, mergedReceivers);
Chris Craike4db79d2015-12-22 16:32:23 -0800197 renderer.endFrame(fbo0.repaintRect);
Chris Craikb565df12015-10-05 13:00:52 -0700198 }
Chris Craik6fe991e52015-10-20 09:39:42 -0700199
200 void dump() const {
201 for (auto&& layer : mLayerReorderers) {
202 layer.dump();
203 }
204 }
205
206 ///////////////////////////////////////////////////////////////////
207 /// CanvasStateClient interface
208 ///////////////////////////////////////////////////////////////////
209 virtual void onViewportInitialized() override;
210 virtual void onSnapshotRestored(const Snapshot& removed, const Snapshot& restored) override;
211 virtual GLuint getTargetFbo() const override { return 0; }
212
Chris Craikb565df12015-10-05 13:00:52 -0700213private:
Chris Craik161f54b2015-11-05 11:08:52 -0800214 enum class ChildrenSelectMode {
215 Negative,
216 Positive
217 };
Chris Craik0b7e8242015-10-28 16:50:44 -0700218 void saveForLayer(uint32_t layerWidth, uint32_t layerHeight,
Chris Craik8ecf41c2015-11-16 10:27:59 -0800219 float contentTranslateX, float contentTranslateY,
220 const Rect& repaintRect,
221 const Vector3& lightCenter,
222 const BeginLayerOp* beginLayerOp, RenderNode* renderNode);
Chris Craik0b7e8242015-10-28 16:50:44 -0700223 void restoreForLayer();
224
Chris Craik6fe991e52015-10-20 09:39:42 -0700225 LayerReorderer& currentLayer() { return mLayerReorderers[mLayerStack.back()]; }
226
227 BakedOpState* tryBakeOpState(const RecordedOp& recordedOp) {
Chris Craike4db79d2015-12-22 16:32:23 -0800228 return BakedOpState::tryConstruct(mAllocator, *mCanvasState.writableSnapshot(), recordedOp);
Chris Craik6fe991e52015-10-20 09:39:42 -0700229 }
Chris Craikb565df12015-10-05 13:00:52 -0700230
Chris Craik8d1f2122015-11-24 16:40:09 -0800231 // should always be surrounded by a save/restore pair, and not called if DisplayList is null
Chris Craik0b7e8242015-10-28 16:50:44 -0700232 void deferNodePropsAndOps(RenderNode& node);
233
Chris Craik161f54b2015-11-05 11:08:52 -0800234 template <typename V>
235 void defer3dChildren(ChildrenSelectMode mode, const V& zTranslatedNodes);
236
Chris Craik8d1f2122015-11-24 16:40:09 -0800237 void deferShadow(const RenderNodeOp& casterOp);
238
239 void deferProjectedChildren(const RenderNode& renderNode);
240
241 void deferNodeOps(const RenderNode& renderNode);
242
Chris Craik268a9c02015-12-09 18:05:12 -0800243 void deferRenderNodeOpImpl(const RenderNodeOp& op);
Chris Craik161f54b2015-11-05 11:08:52 -0800244
Chris Craik15c3f192015-12-03 12:16:56 -0800245 void replayBakedOpsImpl(void* arg, BakedOpReceiver* receivers);
Chris Craikb565df12015-10-05 13:00:52 -0700246
Chris Craikd3daa312015-11-06 10:59:56 -0800247 SkPath* createFrameAllocatedPath() {
Chris Craikb87eadd2016-01-06 09:16:05 -0800248 return mAllocator.create<SkPath>();
Chris Craikd3daa312015-11-06 10:59:56 -0800249 }
Chris Craik386aa032015-12-07 17:08:25 -0800250
Chris Craik268a9c02015-12-09 18:05:12 -0800251 void deferStrokeableOp(const RecordedOp& op, batchid_t batchId,
Chris Craik386aa032015-12-07 17:08:25 -0800252 BakedOpState::StrokeBehavior strokeBehavior = BakedOpState::StrokeBehavior::StyleDefined);
253
Chris Craikb565df12015-10-05 13:00:52 -0700254 /**
Chris Craik268a9c02015-12-09 18:05:12 -0800255 * Declares all OpReorderer::deferXXXXOp() methods for every RecordedOp type.
Chris Craikb565df12015-10-05 13:00:52 -0700256 *
257 * These private methods are called from within deferImpl to defer each individual op
258 * type differently.
259 */
Chris Craik7cbf63d2016-01-06 13:46:52 -0800260#define X(Type) void defer##Type(const Type& op);
261 MAP_DEFERRABLE_OPS(X)
262#undef X
Chris Craikb565df12015-10-05 13:00:52 -0700263
Chris Craik6fe991e52015-10-20 09:39:42 -0700264 // List of every deferred layer's render state. Replayed in reverse order to render a frame.
265 std::vector<LayerReorderer> mLayerReorderers;
Chris Craikb565df12015-10-05 13:00:52 -0700266
Chris Craik6fe991e52015-10-20 09:39:42 -0700267 /*
268 * Stack of indices within mLayerReorderers representing currently active layers. If drawing
269 * layerA within a layerB, will contain, in order:
270 * - 0 (representing FBO 0, always present)
271 * - layerB's index
272 * - layerA's index
273 *
274 * Note that this doesn't vector doesn't always map onto all values of mLayerReorderers. When a
275 * layer is finished deferring, it will still be represented in mLayerReorderers, but it's index
276 * won't be in mLayerStack. This is because it can be replayed, but can't have any more drawing
277 * ops added to it.
278 */
279 std::vector<size_t> mLayerStack;
Chris Craikb565df12015-10-05 13:00:52 -0700280
Chris Craikb565df12015-10-05 13:00:52 -0700281 CanvasState mCanvasState;
282
283 // contains ResolvedOps and Batches
284 LinearAllocator mAllocator;
Chris Craikb565df12015-10-05 13:00:52 -0700285};
286
287}; // namespace uirenderer
288}; // namespace android
289
290#endif // ANDROID_HWUI_OP_REORDERER_H