Enforce maximum texture size.
When an app tries to render a bitmap or path larger than the GPU's maximum
texture size, the drawing command is ignored and a warning is logged. This
change also makes texture drawing more robust by catching potential errors
during texture creation.
This change also fixes a crash in the FontRenderer. The destructor would
sometimes try to free an uninitialized array.
Change-Id: I95ae0939c52192d97b340aa02417bf6d0c962c57
diff --git a/libs/hwui/PathCache.cpp b/libs/hwui/PathCache.cpp
index fa6ea25..4a01ffa 100644
--- a/libs/hwui/PathCache.cpp
+++ b/libs/hwui/PathCache.cpp
@@ -34,6 +34,10 @@
mCache(GenerationCache<PathCacheEntry, PathTexture*>::kUnlimitedCapacity),
mSize(0), mMaxSize(maxByteSize) {
mCache.setOnEntryRemovedListener(this);
+
+ GLint maxTextureSize;
+ glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTextureSize);
+ mMaxTextureSize = maxTextureSize;
}
PathCache::~PathCache() {
@@ -94,9 +98,18 @@
PathTexture* PathCache::addTexture(const PathCacheEntry& entry,
const SkPath *path, const SkPaint* paint) {
const SkRect& bounds = path->getBounds();
+
+ const float pathWidth = bounds.width();
+ const float pathHeight = bounds.height();
+
+ if (pathWidth > mMaxTextureSize || pathHeight > mMaxTextureSize) {
+ LOGW("Path too large to be rendered into a texture");
+ return NULL;
+ }
+
const float offset = entry.strokeWidth * 1.5f;
- const uint32_t width = uint32_t(bounds.width() + offset * 2.0 + 0.5);
- const uint32_t height = uint32_t(bounds.height() + offset * 2.0 + 0.5);
+ const uint32_t width = uint32_t(pathWidth + offset * 2.0 + 0.5);
+ const uint32_t height = uint32_t(pathHeight + offset * 2.0 + 0.5);
const uint32_t size = width * height;
// Don't even try to cache a bitmap that's bigger than the cache