Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2017 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "ShaderCache.h" |
Adlai Holler | d234521 | 2020-10-07 14:16:40 -0400 | [diff] [blame] | 18 | #include <GrDirectContext.h> |
Kevin Lubick | 1175dc0 | 2022-02-28 12:41:27 -0500 | [diff] [blame] | 19 | #include <SkData.h> |
rnlee | ce9762b | 2021-05-21 15:40:53 -0700 | [diff] [blame] | 20 | #include <gui/TraceUtils.h> |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 21 | #include <log/log.h> |
Yichi Chen | 9f95955 | 2018-03-29 21:21:54 +0800 | [diff] [blame] | 22 | #include <openssl/sha.h> |
John Reck | 283bb46 | 2018-12-13 16:40:14 -0800 | [diff] [blame] | 23 | #include <algorithm> |
| 24 | #include <array> |
| 25 | #include <thread> |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 26 | #include "FileBlobCache.h" |
Lingfeng Yang | 3a9f223 | 2018-01-24 10:40:18 -0800 | [diff] [blame] | 27 | #include "Properties.h" |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 28 | |
| 29 | namespace android { |
| 30 | namespace uirenderer { |
| 31 | namespace skiapipeline { |
| 32 | |
| 33 | // Cache size limits. |
| 34 | static const size_t maxKeySize = 1024; |
Leon Scroggins III | 05f5eca | 2021-06-07 16:09:37 -0400 | [diff] [blame] | 35 | static const size_t maxValueSize = 2 * 1024 * 1024; |
Nolan Scobie | f50917c | 2023-02-09 13:43:52 -0500 | [diff] [blame] | 36 | static const size_t maxTotalSize = 4 * 1024 * 1024; |
| 37 | static_assert(maxKeySize + maxValueSize < maxTotalSize); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 38 | |
| 39 | ShaderCache::ShaderCache() { |
| 40 | // There is an "incomplete FileBlobCache type" compilation error, if ctor is moved to header. |
| 41 | } |
| 42 | |
| 43 | ShaderCache ShaderCache::sCache; |
| 44 | |
| 45 | ShaderCache& ShaderCache::get() { |
| 46 | return sCache; |
| 47 | } |
| 48 | |
Yichi Chen | 9f95955 | 2018-03-29 21:21:54 +0800 | [diff] [blame] | 49 | bool ShaderCache::validateCache(const void* identity, ssize_t size) { |
John Reck | 283bb46 | 2018-12-13 16:40:14 -0800 | [diff] [blame] | 50 | if (nullptr == identity && size == 0) return true; |
Yichi Chen | 9f95955 | 2018-03-29 21:21:54 +0800 | [diff] [blame] | 51 | |
| 52 | if (nullptr == identity || size < 0) { |
| 53 | if (CC_UNLIKELY(Properties::debugLevel & kDebugCaches)) { |
| 54 | ALOGW("ShaderCache::validateCache invalid cache identity"); |
| 55 | } |
| 56 | mBlobCache->clear(); |
| 57 | return false; |
| 58 | } |
| 59 | |
| 60 | SHA256_CTX ctx; |
| 61 | SHA256_Init(&ctx); |
| 62 | |
| 63 | SHA256_Update(&ctx, identity, size); |
| 64 | mIDHash.resize(SHA256_DIGEST_LENGTH); |
| 65 | SHA256_Final(mIDHash.data(), &ctx); |
| 66 | |
| 67 | std::array<uint8_t, SHA256_DIGEST_LENGTH> hash; |
| 68 | auto key = sIDKey; |
| 69 | auto loaded = mBlobCache->get(&key, sizeof(key), hash.data(), hash.size()); |
| 70 | |
John Reck | 283bb46 | 2018-12-13 16:40:14 -0800 | [diff] [blame] | 71 | if (loaded && std::equal(hash.begin(), hash.end(), mIDHash.begin())) return true; |
Yichi Chen | 9f95955 | 2018-03-29 21:21:54 +0800 | [diff] [blame] | 72 | |
| 73 | if (CC_UNLIKELY(Properties::debugLevel & kDebugCaches)) { |
| 74 | ALOGW("ShaderCache::validateCache cache validation fails"); |
| 75 | } |
| 76 | mBlobCache->clear(); |
| 77 | return false; |
| 78 | } |
| 79 | |
| 80 | void ShaderCache::initShaderDiskCache(const void* identity, ssize_t size) { |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 81 | ATRACE_NAME("initShaderDiskCache"); |
Matt Buckley | e278da1 | 2023-06-20 22:51:05 +0000 | [diff] [blame] | 82 | std::lock_guard lock(mMutex); |
Lingfeng Yang | 3a9f223 | 2018-01-24 10:40:18 -0800 | [diff] [blame] | 83 | |
| 84 | // Emulators can switch between different renders either as part of config |
| 85 | // or snapshot migration. Also, program binaries may not work well on some |
| 86 | // desktop / laptop GPUs. Thus, disable the shader disk cache for emulator builds. |
| 87 | if (!Properties::runningInEmulator && mFilename.length() > 0) { |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 88 | mBlobCache.reset(new FileBlobCache(maxKeySize, maxValueSize, maxTotalSize, mFilename)); |
Yichi Chen | 9f95955 | 2018-03-29 21:21:54 +0800 | [diff] [blame] | 89 | validateCache(identity, size); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 90 | mInitialized = true; |
Matt Buckley | e9adfbd | 2023-07-06 23:15:30 +0000 | [diff] [blame] | 91 | if (identity != nullptr && size > 0 && mIDHash.size()) { |
| 92 | set(&sIDKey, sizeof(sIDKey), mIDHash.data(), mIDHash.size()); |
| 93 | } |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 94 | } |
| 95 | } |
| 96 | |
| 97 | void ShaderCache::setFilename(const char* filename) { |
Matt Buckley | e278da1 | 2023-06-20 22:51:05 +0000 | [diff] [blame] | 98 | std::lock_guard lock(mMutex); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 99 | mFilename = filename; |
| 100 | } |
| 101 | |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 102 | sk_sp<SkData> ShaderCache::load(const SkData& key) { |
| 103 | ATRACE_NAME("ShaderCache::load"); |
| 104 | size_t keySize = key.size(); |
Matt Buckley | e278da1 | 2023-06-20 22:51:05 +0000 | [diff] [blame] | 105 | std::lock_guard lock(mMutex); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 106 | if (!mInitialized) { |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 107 | return nullptr; |
| 108 | } |
| 109 | |
| 110 | // mObservedBlobValueSize is reasonably big to avoid memory reallocation |
| 111 | // Allocate a buffer with malloc. SkData takes ownership of that allocation and will call free. |
| 112 | void* valueBuffer = malloc(mObservedBlobValueSize); |
| 113 | if (!valueBuffer) { |
| 114 | return nullptr; |
| 115 | } |
Matt Buckley | e9adfbd | 2023-07-06 23:15:30 +0000 | [diff] [blame] | 116 | size_t valueSize = mBlobCache->get(key.data(), keySize, valueBuffer, mObservedBlobValueSize); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 117 | int maxTries = 3; |
| 118 | while (valueSize > mObservedBlobValueSize && maxTries > 0) { |
| 119 | mObservedBlobValueSize = std::min(valueSize, maxValueSize); |
John Reck | 283bb46 | 2018-12-13 16:40:14 -0800 | [diff] [blame] | 120 | void* newValueBuffer = realloc(valueBuffer, mObservedBlobValueSize); |
Stan Iliev | 003a9f6 | 2018-03-29 13:33:53 -0400 | [diff] [blame] | 121 | if (!newValueBuffer) { |
| 122 | free(valueBuffer); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 123 | return nullptr; |
| 124 | } |
Stan Iliev | 003a9f6 | 2018-03-29 13:33:53 -0400 | [diff] [blame] | 125 | valueBuffer = newValueBuffer; |
Matt Buckley | e9adfbd | 2023-07-06 23:15:30 +0000 | [diff] [blame] | 126 | valueSize = mBlobCache->get(key.data(), keySize, valueBuffer, mObservedBlobValueSize); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 127 | maxTries--; |
| 128 | } |
| 129 | if (!valueSize) { |
| 130 | free(valueBuffer); |
| 131 | return nullptr; |
| 132 | } |
| 133 | if (valueSize > mObservedBlobValueSize) { |
John Reck | 283bb46 | 2018-12-13 16:40:14 -0800 | [diff] [blame] | 134 | ALOGE("ShaderCache::load value size is too big %d", (int)valueSize); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 135 | free(valueBuffer); |
| 136 | return nullptr; |
| 137 | } |
Leon Scroggins III | 8cedb66 | 2022-05-02 10:38:38 -0400 | [diff] [blame] | 138 | mNumShadersCachedInRam++; |
| 139 | ATRACE_FORMAT("HWUI RAM cache: %d shaders", mNumShadersCachedInRam); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 140 | return SkData::MakeFromMalloc(valueBuffer, valueSize); |
| 141 | } |
| 142 | |
Matt Buckley | e9adfbd | 2023-07-06 23:15:30 +0000 | [diff] [blame] | 143 | void ShaderCache::set(const void* key, size_t keySize, const void* value, size_t valueSize) { |
| 144 | switch (mBlobCache->set(key, keySize, value, valueSize)) { |
Leon Scroggins III | 77644a2 | 2022-05-03 15:50:51 -0400 | [diff] [blame] | 145 | case BlobCache::InsertResult::kInserted: |
| 146 | // This is what we expect/hope. It means the cache is large enough. |
| 147 | return; |
| 148 | case BlobCache::InsertResult::kDidClean: { |
| 149 | ATRACE_FORMAT("ShaderCache: evicted an entry to fit {key: %lu value %lu}!", keySize, |
| 150 | valueSize); |
Matt Buckley | e9adfbd | 2023-07-06 23:15:30 +0000 | [diff] [blame] | 151 | if (mIDHash.size()) { |
| 152 | set(&sIDKey, sizeof(sIDKey), mIDHash.data(), mIDHash.size()); |
| 153 | } |
Leon Scroggins III | 77644a2 | 2022-05-03 15:50:51 -0400 | [diff] [blame] | 154 | return; |
| 155 | } |
| 156 | case BlobCache::InsertResult::kNotEnoughSpace: { |
| 157 | ATRACE_FORMAT("ShaderCache: could not fit {key: %lu value %lu}!", keySize, valueSize); |
| 158 | return; |
| 159 | } |
| 160 | case BlobCache::InsertResult::kInvalidValueSize: |
| 161 | case BlobCache::InsertResult::kInvalidKeySize: { |
| 162 | ATRACE_FORMAT("ShaderCache: invalid size {key: %lu value %lu}!", keySize, valueSize); |
| 163 | return; |
| 164 | } |
| 165 | case BlobCache::InsertResult::kKeyTooBig: |
| 166 | case BlobCache::InsertResult::kValueTooBig: |
| 167 | case BlobCache::InsertResult::kCombinedTooBig: { |
| 168 | ATRACE_FORMAT("ShaderCache: entry too big: {key: %lu value %lu}!", keySize, valueSize); |
| 169 | return; |
| 170 | } |
| 171 | } |
| 172 | } |
Leon Scroggins III | 77644a2 | 2022-05-03 15:50:51 -0400 | [diff] [blame] | 173 | |
Yichi Chen | 9f95955 | 2018-03-29 21:21:54 +0800 | [diff] [blame] | 174 | void ShaderCache::saveToDiskLocked() { |
| 175 | ATRACE_NAME("ShaderCache::saveToDiskLocked"); |
Nolan Scobie | 193cd96 | 2023-02-08 20:03:31 -0500 | [diff] [blame] | 176 | if (mInitialized && mBlobCache) { |
Matt Buckley | e278da1 | 2023-06-20 22:51:05 +0000 | [diff] [blame] | 177 | // The most straightforward way to make ownership shared |
| 178 | mMutex.unlock(); |
| 179 | mMutex.lock_shared(); |
Yichi Chen | 9f95955 | 2018-03-29 21:21:54 +0800 | [diff] [blame] | 180 | mBlobCache->writeToFile(); |
Matt Buckley | e278da1 | 2023-06-20 22:51:05 +0000 | [diff] [blame] | 181 | mMutex.unlock_shared(); |
| 182 | mMutex.lock(); |
Yichi Chen | 9f95955 | 2018-03-29 21:21:54 +0800 | [diff] [blame] | 183 | } |
Yichi Chen | 9f95955 | 2018-03-29 21:21:54 +0800 | [diff] [blame] | 184 | } |
| 185 | |
Leon Scroggins III | 8cedb66 | 2022-05-02 10:38:38 -0400 | [diff] [blame] | 186 | void ShaderCache::store(const SkData& key, const SkData& data, const SkString& /*description*/) { |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 187 | ATRACE_NAME("ShaderCache::store"); |
Matt Buckley | e278da1 | 2023-06-20 22:51:05 +0000 | [diff] [blame] | 188 | std::lock_guard lock(mMutex); |
Leon Scroggins III | 8cedb66 | 2022-05-02 10:38:38 -0400 | [diff] [blame] | 189 | mNumShadersCachedInRam++; |
| 190 | ATRACE_FORMAT("HWUI RAM cache: %d shaders", mNumShadersCachedInRam); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 191 | |
| 192 | if (!mInitialized) { |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 193 | return; |
| 194 | } |
| 195 | |
| 196 | size_t valueSize = data.size(); |
| 197 | size_t keySize = key.size(); |
| 198 | if (keySize == 0 || valueSize == 0 || valueSize >= maxValueSize) { |
| 199 | ALOGW("ShaderCache::store: sizes %d %d not allowed", (int)keySize, (int)valueSize); |
| 200 | return; |
| 201 | } |
| 202 | |
| 203 | const void* value = data.data(); |
| 204 | |
Stan Iliev | 14211aa | 2019-01-14 12:29:30 -0500 | [diff] [blame] | 205 | if (mInStoreVkPipelineInProgress) { |
| 206 | if (mOldPipelineCacheSize == -1) { |
| 207 | // Record the initial pipeline cache size stored in the file. |
Matt Buckley | e9adfbd | 2023-07-06 23:15:30 +0000 | [diff] [blame] | 208 | mOldPipelineCacheSize = mBlobCache->get(key.data(), keySize, nullptr, 0); |
Stan Iliev | 14211aa | 2019-01-14 12:29:30 -0500 | [diff] [blame] | 209 | } |
| 210 | if (mNewPipelineCacheSize != -1 && mNewPipelineCacheSize == valueSize) { |
| 211 | // There has not been change in pipeline cache size. Stop trying to save. |
| 212 | mTryToStorePipelineCache = false; |
| 213 | return; |
| 214 | } |
| 215 | mNewPipelineCacheSize = valueSize; |
| 216 | } else { |
| 217 | mCacheDirty = true; |
| 218 | // If there are new shaders compiled, we probably have new pipeline state too. |
| 219 | // Store pipeline cache on the next flush. |
| 220 | mNewPipelineCacheSize = -1; |
| 221 | mTryToStorePipelineCache = true; |
| 222 | } |
Matt Buckley | e9adfbd | 2023-07-06 23:15:30 +0000 | [diff] [blame] | 223 | set(key.data(), keySize, value, valueSize); |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 224 | |
Nolan Scobie | 193cd96 | 2023-02-08 20:03:31 -0500 | [diff] [blame] | 225 | if (!mSavePending && mDeferredSaveDelayMs > 0) { |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 226 | mSavePending = true; |
| 227 | std::thread deferredSaveThread([this]() { |
Nolan Scobie | 193cd96 | 2023-02-08 20:03:31 -0500 | [diff] [blame] | 228 | usleep(mDeferredSaveDelayMs * 1000); // milliseconds to microseconds |
Matt Buckley | e278da1 | 2023-06-20 22:51:05 +0000 | [diff] [blame] | 229 | std::lock_guard lock(mMutex); |
Stan Iliev | 14211aa | 2019-01-14 12:29:30 -0500 | [diff] [blame] | 230 | // Store file on disk if there a new shader or Vulkan pipeline cache size changed. |
| 231 | if (mCacheDirty || mNewPipelineCacheSize != mOldPipelineCacheSize) { |
| 232 | saveToDiskLocked(); |
| 233 | mOldPipelineCacheSize = mNewPipelineCacheSize; |
| 234 | mTryToStorePipelineCache = false; |
| 235 | mCacheDirty = false; |
| 236 | } |
Nolan Scobie | 193cd96 | 2023-02-08 20:03:31 -0500 | [diff] [blame] | 237 | mSavePending = false; |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 238 | }); |
| 239 | deferredSaveThread.detach(); |
| 240 | } |
| 241 | } |
| 242 | |
Adlai Holler | d234521 | 2020-10-07 14:16:40 -0400 | [diff] [blame] | 243 | void ShaderCache::onVkFrameFlushed(GrDirectContext* context) { |
Stan Iliev | 14211aa | 2019-01-14 12:29:30 -0500 | [diff] [blame] | 244 | { |
Matt Buckley | e278da1 | 2023-06-20 22:51:05 +0000 | [diff] [blame] | 245 | mMutex.lock_shared(); |
Stan Iliev | 14211aa | 2019-01-14 12:29:30 -0500 | [diff] [blame] | 246 | if (!mInitialized || !mTryToStorePipelineCache) { |
Matt Buckley | e278da1 | 2023-06-20 22:51:05 +0000 | [diff] [blame] | 247 | mMutex.unlock_shared(); |
Stan Iliev | 14211aa | 2019-01-14 12:29:30 -0500 | [diff] [blame] | 248 | return; |
| 249 | } |
Matt Buckley | e278da1 | 2023-06-20 22:51:05 +0000 | [diff] [blame] | 250 | mMutex.unlock_shared(); |
Stan Iliev | 14211aa | 2019-01-14 12:29:30 -0500 | [diff] [blame] | 251 | } |
| 252 | mInStoreVkPipelineInProgress = true; |
| 253 | context->storeVkPipelineCacheData(); |
| 254 | mInStoreVkPipelineInProgress = false; |
| 255 | } |
| 256 | |
Stan Iliev | d495f43 | 2017-10-09 15:49:32 -0400 | [diff] [blame] | 257 | } /* namespace skiapipeline */ |
| 258 | } /* namespace uirenderer */ |
| 259 | } /* namespace android */ |