blob: 54d64a3ca29e97ab254da820d072c301dfac7517 [file] [log] [blame]
reed@google.comac10a2d2010-12-22 21:39:39 +00001/*
2 Copyright 2010 Google Inc.
3
4 Licensed under the Apache License, Version 2.0 (the "License");
5 you may not use this file except in compliance with the License.
6 You may obtain a copy of the License at
7
8 http://www.apache.org/licenses/LICENSE-2.0
9
10 Unless required by applicable law or agreed to in writing, software
11 distributed under the License is distributed on an "AS IS" BASIS,
12 WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 See the License for the specific language governing permissions and
14 limitations under the License.
15 */
16
17
18#include "SkGr.h"
19
20/* Fill out buffer with the compressed format Ganesh expects from a colortable
21 based bitmap. [palette (colortable) + indices].
bsalomon@google.com5782d712011-01-21 21:03:59 +000022
23 At the moment Ganesh only supports 8bit version. If Ganesh allowed we others
reed@google.comac10a2d2010-12-22 21:39:39 +000024 we could detect that the colortable.count is <= 16, and then repack the
25 indices as nibbles to save RAM, but it would take more time (i.e. a lot
26 slower than memcpy), so skipping that for now.
bsalomon@google.com5782d712011-01-21 21:03:59 +000027
reed@google.comac10a2d2010-12-22 21:39:39 +000028 Ganesh wants a full 256 palette entry, even though Skia's ctable is only as big
29 as the colortable.count says it is.
30 */
31static void build_compressed_data(void* buffer, const SkBitmap& bitmap) {
32 SkASSERT(SkBitmap::kIndex8_Config == bitmap.config());
bsalomon@google.com5782d712011-01-21 21:03:59 +000033
reed@google.comac10a2d2010-12-22 21:39:39 +000034 SkAutoLockPixels apl(bitmap);
35 if (!bitmap.readyToDraw()) {
36 SkASSERT(!"bitmap not ready to draw!");
37 return;
38 }
39
40 SkColorTable* ctable = bitmap.getColorTable();
41 char* dst = (char*)buffer;
bsalomon@google.com5782d712011-01-21 21:03:59 +000042
reed@google.comac10a2d2010-12-22 21:39:39 +000043 memcpy(dst, ctable->lockColors(), ctable->count() * sizeof(SkPMColor));
44 ctable->unlockColors(false);
bsalomon@google.com5782d712011-01-21 21:03:59 +000045
reed@google.comac10a2d2010-12-22 21:39:39 +000046 // always skip a full 256 number of entries, even if we memcpy'd fewer
47 dst += GrGpu::kColorTableSize;
48
49 if (bitmap.width() == bitmap.rowBytes()) {
50 memcpy(dst, bitmap.getPixels(), bitmap.getSize());
51 } else {
52 // need to trim off the extra bytes per row
53 size_t width = bitmap.width();
54 size_t rowBytes = bitmap.rowBytes();
55 const char* src = (const char*)bitmap.getPixels();
56 for (int y = 0; y < bitmap.height(); y++) {
57 memcpy(dst, src, width);
58 src += rowBytes;
59 dst += width;
60 }
61 }
62}
63
64////////////////////////////////////////////////////////////////////////////////
65
bsalomon@google.com5782d712011-01-21 21:03:59 +000066GrTextureEntry* sk_gr_create_bitmap_texture(GrContext* ctx,
reed@google.comac10a2d2010-12-22 21:39:39 +000067 GrTextureKey* key,
68 const GrSamplerState& sampler,
69 const SkBitmap& origBitmap) {
70 SkAutoLockPixels alp(origBitmap);
71 if (!origBitmap.readyToDraw()) {
72 return NULL;
73 }
74
75 SkBitmap tmpBitmap;
76
77 const SkBitmap* bitmap = &origBitmap;
bsalomon@google.com5782d712011-01-21 21:03:59 +000078
reed@google.comac10a2d2010-12-22 21:39:39 +000079 GrGpu::TextureDesc desc = {
80 0,
81 GrGpu::kNone_AALevel,
82 bitmap->width(),
83 bitmap->height(),
84 SkGr::Bitmap2PixelConfig(*bitmap)
85 };
bsalomon@google.com5782d712011-01-21 21:03:59 +000086
reed@google.comac10a2d2010-12-22 21:39:39 +000087 if (SkBitmap::kIndex8_Config == bitmap->config()) {
88 // build_compressed_data doesn't do npot->pot expansion
89 // and paletted textures can't be sub-updated
90 if (ctx->supportsIndex8PixelConfig(sampler,
91 bitmap->width(), bitmap->height())) {
bsalomon@google.com5782d712011-01-21 21:03:59 +000092 size_t imagesize = bitmap->width() * bitmap->height() +
reed@google.comac10a2d2010-12-22 21:39:39 +000093 GrGpu::kColorTableSize;
94 SkAutoMalloc storage(imagesize);
bsalomon@google.com5782d712011-01-21 21:03:59 +000095
reed@google.comac10a2d2010-12-22 21:39:39 +000096 build_compressed_data(storage.get(), origBitmap);
97
98 // our compressed data will be trimmed, so pass width() for its
99 // "rowBytes", since they are the same now.
100 return ctx->createAndLockTexture(key, sampler, desc, storage.get(),
101 bitmap->width());
102
103 } else {
104 origBitmap.copyTo(&tmpBitmap, SkBitmap::kARGB_8888_Config);
105 // now bitmap points to our temp, which has been promoted to 32bits
106 bitmap = &tmpBitmap;
107 }
bsalomon@google.com5782d712011-01-21 21:03:59 +0000108 }
reed@google.comac10a2d2010-12-22 21:39:39 +0000109
110 desc.fFormat = SkGr::Bitmap2PixelConfig(*bitmap);
111 return ctx->createAndLockTexture(key, sampler, desc, bitmap->getPixels(),
112 bitmap->rowBytes());
113}
114
115////////////////////////////////////////////////////////////////////////////////
reed@google.comac10a2d2010-12-22 21:39:39 +0000116
reed@google.comac10a2d2010-12-22 21:39:39 +0000117
bsalomon@google.com5aaa69e2011-03-04 20:29:08 +0000118GrPathCmd SkGrPathIter::next(GrPoint pts[]) {
reed@google.comac10a2d2010-12-22 21:39:39 +0000119 GrAssert(NULL != pts);
120#if SK_SCALAR_IS_GR_SCALAR
121 return sk_path_verb_to_gr_path_command(fIter.next((SkPoint*)pts));
122#else
123 Command cmd = sk_path_verb_to_gr_path_command(fIter.next(fPoints));
124 int n = NumCommandPoints(cmd);
125 for (int i = 0; i < n; ++i) {
126 pts[i].fX = SkScalarToGrScalar(fPoints[i].fX);
127 pts[i].fY = SkScalarToGrScalar(fPoints[i].fY);
128 }
129 return cmd;
130#endif
131}
132
bsalomon@google.com5aaa69e2011-03-04 20:29:08 +0000133GrPathCmd SkGrPathIter::next() {
reed@google.comac10a2d2010-12-22 21:39:39 +0000134 return sk_path_verb_to_gr_path_command(fIter.next(NULL));
135}
136
137void SkGrPathIter::rewind() {
bsalomon@google.comd302f142011-03-03 13:54:13 +0000138 fIter.setPath(*fPath, false);
reed@google.comac10a2d2010-12-22 21:39:39 +0000139}
140
bsalomon@google.com5aaa69e2011-03-04 20:29:08 +0000141GrConvexHint SkGrPathIter::hint() const {
142 return fPath->isConvex() ? kConvex_ConvexHint :
143 kNone_ConvexHint;
reed@google.comac10a2d2010-12-22 21:39:39 +0000144}
145
146///////////////////////////////////////////////////////////////////////////////
147
bsalomon@google.comd302f142011-03-03 13:54:13 +0000148void SkGrClipIterator::reset(const SkClipStack& clipStack) {
149 fClipStack = &clipStack;
150 fIter.reset(clipStack);
151 // Gr has no notion of replace, skip to the
152 // last replace in the clip stack.
153 int lastReplace = 0;
154 int curr = 0;
155 while (NULL != (fCurr = fIter.next())) {
156 if (SkRegion::kReplace_Op == fCurr->fOp) {
157 lastReplace = curr;
158 }
159 ++curr;
160 }
161 fIter.reset(clipStack);
162 for (int i = 0; i < lastReplace+1; ++i) {
163 fCurr = fIter.next();
164 }
165}
166
167GrClipType SkGrClipIterator::getType() const {
168 GrAssert(!this->isDone());
169 if (NULL != fCurr->fRect) {
170 return kRect_ClipType;
reed@google.comac10a2d2010-12-22 21:39:39 +0000171 } else {
bsalomon@google.comd302f142011-03-03 13:54:13 +0000172 GrAssert(NULL != fCurr->fPath);
173 return kPath_ClipType;
174 }
175}
176
177GrSetOp SkGrClipIterator::getOp() const {
178 // we skipped to the last "replace" op
179 // when this iter was reset.
180 // GrClip doesn't allow replace, so treat it as
181 // intersect.
182 GrSetOp skToGrOps[] = {
183 kDifference_SetOp, // kDifference_Op
184 kIntersect_SetOp, // kIntersect_Op
185 kUnion_SetOp, // kUnion_Op
186 kXor_SetOp, // kXOR_Op
187 kReverseDifference_SetOp, // kReverseDifference_Op
188 kIntersect_SetOp // kReplace_op
189 };
190 GR_STATIC_ASSERT(0 == SkRegion::kDifference_Op);
191 GR_STATIC_ASSERT(1 == SkRegion::kIntersect_Op);
192 GR_STATIC_ASSERT(2 == SkRegion::kUnion_Op);
193 GR_STATIC_ASSERT(3 == SkRegion::kXOR_Op);
194 GR_STATIC_ASSERT(4 == SkRegion::kReverseDifference_Op);
195 GR_STATIC_ASSERT(5 == SkRegion::kReplace_Op);
196 return skToGrOps[fCurr->fOp];
197}
198
199GrPathFill SkGrClipIterator::getPathFill() const {
200 switch (fCurr->fPath->getFillType()) {
201 case SkPath::kWinding_FillType:
202 return kWinding_PathFill;
203 case SkPath::kEvenOdd_FillType:
204 return kEvenOdd_PathFill;
205 case SkPath::kInverseWinding_FillType:
206 return kInverseWinding_PathFill;
207 case SkPath::kInverseEvenOdd_FillType:
208 return kInverseEvenOdd_PathFill;
209 default:
210 GrCrash("Unsupported path fill in clip.");
211 return kWinding_PathFill; // suppress warning
reed@google.comac10a2d2010-12-22 21:39:39 +0000212 }
213}
214
215///////////////////////////////////////////////////////////////////////////////
216
217GrTexture::PixelConfig SkGr::BitmapConfig2PixelConfig(SkBitmap::Config config,
218 bool isOpaque) {
219 switch (config) {
220 case SkBitmap::kA8_Config:
221 return GrTexture::kAlpha_8_PixelConfig;
222 case SkBitmap::kIndex8_Config:
223 return GrTexture::kIndex_8_PixelConfig;
224 case SkBitmap::kRGB_565_Config:
225 return GrTexture::kRGB_565_PixelConfig;
226 case SkBitmap::kARGB_4444_Config:
227 return GrTexture::kRGBA_4444_PixelConfig;
228 case SkBitmap::kARGB_8888_Config:
229 if (isOpaque) {
230 return GrTexture::kRGBX_8888_PixelConfig;
231 } else {
232 return GrTexture::kRGBA_8888_PixelConfig;
233 }
234 default:
235 return GrTexture::kUnknown_PixelConfig;
236 }
237}
238
239void SkGr::AbandonAllTextures(GrContext* ctx) {
240 ctx->abandonAllTextures();
241}
242
243