Lines Matching full:cache
6 #include "xnnpack/cache.h"
87 static inline size_t cache_size(struct xnn_cache* cache) { in cache_size() argument
88 switch (cache->type) { in cache_size()
90 return cache->code.size; in cache_size()
92 return cache->weights.size; in cache_size()
100 static inline void* cache_start(struct xnn_cache* cache) { in cache_start() argument
101 switch (cache->type) { in cache_start()
103 return cache->code.start; in cache_start()
105 return cache->weights.start; in cache_start()
112 enum xnn_status xnn_init_cache_with_size(struct xnn_cache* cache, size_t num_buckets, enum xnn_cach… in xnn_init_cache_with_size() argument
114 memset(cache, 0, sizeof(struct xnn_cache)); in xnn_init_cache_with_size()
115 …cache->buckets = (struct xnn_cache_bucket*) xnn_allocate_zero_memory(num_buckets * sizeof(struct x… in xnn_init_cache_with_size()
116 if (cache->buckets == NULL) { in xnn_init_cache_with_size()
117 xnn_log_error("fail to allocate memory for cache buckets"); in xnn_init_cache_with_size()
121 cache->type = cache_type; in xnn_init_cache_with_size()
122 cache->num_buckets = num_buckets; in xnn_init_cache_with_size()
126 enum xnn_status xnn_init_code_cache_with_size(struct xnn_code_cache* cache, size_t num_buckets) in xnn_init_code_cache_with_size() argument
128 memset(cache, 0, sizeof(struct xnn_code_cache)); in xnn_init_code_cache_with_size()
130 status = xnn_init_cache_with_size(&cache->cache, num_buckets, xnn_cache_type_code); in xnn_init_code_cache_with_size()
135 status = xnn_allocate_code_memory(&cache->cache.code, XNN_DEFAULT_CODE_BUFFER_SIZE); in xnn_init_code_cache_with_size()
143 xnn_release_code_cache(cache); in xnn_init_code_cache_with_size()
147 enum xnn_status xnn_init_code_cache(struct xnn_code_cache* cache) in xnn_init_code_cache() argument
149 return xnn_init_code_cache_with_size(cache, XNN_CACHE_INITIAL_BUCKETS); in xnn_init_code_cache()
152 static bool cache_buckets_grow(struct xnn_cache* cache) in cache_buckets_grow() argument
154 const size_t new_num_buckets = cache->num_buckets * XNN_CACHE_GROWTH_FACTOR; in cache_buckets_grow()
157 xnn_init_cache_with_size(&tmp_cache, new_num_buckets, cache->type); in cache_buckets_grow()
159 for (size_t i = 0; i < cache->num_buckets; i++) { in cache_buckets_grow()
160 struct xnn_cache_bucket b = cache->buckets[i]; in cache_buckets_grow()
178 xnn_release_memory(cache->buckets); in cache_buckets_grow()
180 cache->buckets = tmp_cache.buckets; in cache_buckets_grow()
181 cache->num_buckets = tmp_cache.num_buckets; in cache_buckets_grow()
185 static inline bool bytes_equal(struct xnn_cache* cache, void* ptr, size_t size, size_t offset) in bytes_equal() argument
187 return memcmp(ptr, (void*) ((uintptr_t) cache_start(cache) + offset), size) == 0; in bytes_equal()
190 static bool lookup(struct xnn_cache* cache, void* ptr, size_t size, uint32_t hash, size_t* index) in lookup() argument
192 assert(is_po2(cache->num_buckets)); in lookup()
193 const size_t mask = cache->num_buckets - 1; in lookup()
195 const struct xnn_cache_bucket* buckets = cache->buckets; in lookup()
201 bytes_equal(cache, ptr, buckets[idx].size, buckets[idx].offset))) { in lookup()
212 static bool insert(struct xnn_cache* cache, void* ptr, size_t size) in insert() argument
216 const bool found = lookup(cache, ptr, size, hash, &idx); in insert()
222 if (cache->num_entries * XNN_CACHE_MAX_LOAD_ENTRIES_MULTIPLIER > in insert()
223 cache->num_buckets * XNN_CACHE_MAX_LOAD_BUCKETS_MULTIPLIER) { in insert()
224 if (!cache_buckets_grow(cache)) { in insert()
226 xnn_log_error("failed to grow cache buckets"); in insert()
229 xnn_log_debug("successfully grew cache buckets"); in insert()
231 // If the cache grew, idx is stale, since that is based on the old cache's num_buckets. in insert()
232 const bool found_in_grown_cache = lookup(cache, ptr, size, hash, &idx); in insert()
237 // Check that ptr points into cache's buffer. in insert()
238 assert((uintptr_t) ptr >= (uintptr_t) cache_start(cache)); in insert()
239 if (cache->type == xnn_cache_type_code) { in insert()
240 assert((uintptr_t) ptr < (uintptr_t) cache_start(cache) + cache_size(cache)); in insert()
243 const size_t offset = (uintptr_t) ptr - (uintptr_t) cache_start(cache); in insert()
246 cache->buckets[idx].size = size; in insert()
247 cache->buckets[idx].hash = hash; in insert()
248 cache->buckets[idx].offset = offset; in insert()
249 cache->num_entries++; in insert()
253 // Checks if a generated microkernel is already in the cache, returns the offset
255 static size_t lookup_cache(struct xnn_cache* cache, void* ptr, size_t size) in lookup_cache() argument
259 if (lookup(cache, ptr, size, hash, &bucket_idx)) { in lookup_cache()
260 cache->hits++; in lookup_cache()
261 return cache->buckets[bucket_idx].offset; in lookup_cache()
263 cache->misses++; in lookup_cache()
268 size_t xnn_get_or_insert_cache(struct xnn_cache* cache, void* ptr, size_t size) in xnn_get_or_insert_cache() argument
270 const size_t found_offset = lookup_cache(cache, ptr, size); in xnn_get_or_insert_cache()
272 if (cache->type == xnn_cache_type_code) { in xnn_get_or_insert_cache()
273 // Found in the cache, rewind the buffer because code generators update buffer size. in xnn_get_or_insert_cache()
274 cache->code.size -= size; in xnn_get_or_insert_cache()
279 if (cache->type == xnn_cache_type_weights) { in xnn_get_or_insert_cache()
280 // Cache miss, weights packing functions don't update buffer size, update it here. in xnn_get_or_insert_cache()
281 cache->weights.size += size; in xnn_get_or_insert_cache()
284 const size_t offset = (uintptr_t) ptr - (uintptr_t) cache_start(cache); in xnn_get_or_insert_cache()
285 if (!insert(cache, ptr, size)) { in xnn_get_or_insert_cache()
291 size_t xnn_get_or_insert_code_cache(struct xnn_code_cache* cache, void* ptr, size_t size) in xnn_get_or_insert_code_cache() argument
293 return xnn_get_or_insert_cache(&cache->cache, ptr, size); in xnn_get_or_insert_code_cache()
296 enum xnn_status xnn_release_code_cache(struct xnn_code_cache* cache) in xnn_release_code_cache() argument
298 if XNN_LIKELY(cache != NULL) { in xnn_release_code_cache()
299 assert(cache->cache.type == xnn_cache_type_code); in xnn_release_code_cache()
300 xnn_release_code_memory(&cache->cache.code); in xnn_release_code_cache()
301 xnn_release_memory(cache->cache.buckets); in xnn_release_code_cache()
307 struct xnn_weights_cache* cache, in xnn_internal_init_weights_cache() argument
311 memset(cache, 0, sizeof(struct xnn_weights_cache)); in xnn_internal_init_weights_cache()
314 status = xnn_init_cache_with_size(&cache->cache, num_buckets, xnn_cache_type_weights); in xnn_internal_init_weights_cache()
319 status = xnn_allocate_weights_memory(&cache->cache.weights, buffer_size); in xnn_internal_init_weights_cache()
324 status = xnn_mutex_init(&cache->mutex); in xnn_internal_init_weights_cache()
332 xnn_release_weights_cache(cache); in xnn_internal_init_weights_cache()
336 enum xnn_status xnn_init_weights_cache_with_size(struct xnn_weights_cache* cache, size_t size) in xnn_init_weights_cache_with_size() argument
338 return xnn_internal_init_weights_cache(cache, XNN_CACHE_INITIAL_BUCKETS, size); in xnn_init_weights_cache_with_size()
341 enum xnn_status xnn_init_weights_cache(struct xnn_weights_cache* cache) in xnn_init_weights_cache() argument
343 return xnn_init_weights_cache_with_size(cache, XNN_DEFAULT_WEIGHTS_BUFFER_SIZE); in xnn_init_weights_cache()
347 struct xnn_weights_cache* cache, in xnn_finalize_weights_cache() argument
350 switch (cache->finalization_state) { in xnn_finalize_weights_cache()
353 xnn_log_error("failed to finalize an already final weights cache"); in xnn_finalize_weights_cache()
360 xnn_log_debug("hard finalizing weights cache"); in xnn_finalize_weights_cache()
361 status = xnn_finalize_weights_memory(&cache->cache.weights); in xnn_finalize_weights_cache()
363 xnn_release_memory(cache->cache.buckets); in xnn_finalize_weights_cache()
364 cache->cache.buckets = NULL; in xnn_finalize_weights_cache()
367 xnn_log_debug("soft finalizing weights cache"); in xnn_finalize_weights_cache()
369 …// Finalize weights cache by reserving sufficient space for the insertion of the largest cached we… in xnn_finalize_weights_cache()
370 …// ensures that we have space to write packed weights to check for cache hits without growing and … in xnn_finalize_weights_cache()
373 status = xnn_reserve_weights_memory(&cache->cache.weights, cache->max_weights_size); in xnn_finalize_weights_cache()
377 xnn_log_error("failed to finalize weights cache memory"); in xnn_finalize_weights_cache()
381 cache->finalization_state = finalized_state; in xnn_finalize_weights_cache()
387 enum xnn_status xnn_release_weights_cache(struct xnn_weights_cache* cache) in xnn_release_weights_cache() argument
389 if XNN_LIKELY(cache != NULL) { in xnn_release_weights_cache()
390 assert(cache->cache.type == xnn_cache_type_weights); in xnn_release_weights_cache()
391 xnn_release_weights_memory(&cache->cache.weights); in xnn_release_weights_cache()
392 if (cache->cache.buckets != NULL) { in xnn_release_weights_cache()
393 xnn_release_memory(cache->cache.buckets); in xnn_release_weights_cache()
395 const enum xnn_status status = xnn_mutex_destroy(&cache->mutex); in xnn_release_weights_cache()
403 static inline bool cache_has_space(struct xnn_weights_cache* cache, size_t n) in cache_has_space() argument
405 const struct xnn_weights_buffer buf = cache->cache.weights; in cache_has_space()
409 void* xnn_reserve_space_in_weights_cache(struct xnn_weights_cache* cache, size_t n) { in xnn_reserve_space_in_weights_cache() argument
410 switch (cache->finalization_state) { in xnn_reserve_space_in_weights_cache()
412 xnn_log_error("cannot reserve additional space in a finalized compact weights cache"); in xnn_reserve_space_in_weights_cache()
415 if (!cache_has_space(cache, n)) { in xnn_reserve_space_in_weights_cache()
416 xnn_log_error("cannot reserve additional space in a finalized weights cache"); in xnn_reserve_space_in_weights_cache()
419 …// If the cache is finalized, and has space for `n` bytes, we still want to lock the mutex, becaus… in xnn_reserve_space_in_weights_cache()
426 enum xnn_status status = xnn_mutex_lock(&cache->mutex); in xnn_reserve_space_in_weights_cache()
431 struct xnn_weights_buffer* buffer = &cache->cache.weights; in xnn_reserve_space_in_weights_cache()
434 xnn_mutex_unlock(&cache->mutex); in xnn_reserve_space_in_weights_cache()
441 size_t xnn_get_or_insert_weights_cache(struct xnn_weights_cache* cache, void* ptr, size_t size) in xnn_get_or_insert_weights_cache() argument
445 switch (cache->finalization_state) { in xnn_get_or_insert_weights_cache()
447 xnn_log_error("cannot insert into a finalized compact weights cache"); in xnn_get_or_insert_weights_cache()
451 // Inserting into a finalized weights cache is okay as long as: in xnn_get_or_insert_weights_cache()
453 // 2. incoming packed weights is already in cache in xnn_get_or_insert_weights_cache()
454 if (!cache_has_space(cache, size)) { in xnn_get_or_insert_weights_cache()
455 xnn_log_error("insufficient extra space in finalized weights cache buffer"); in xnn_get_or_insert_weights_cache()
461 const size_t found_offset = lookup_cache(&cache->cache, ptr, size); in xnn_get_or_insert_weights_cache()
463 xnn_log_error("packed weights not found in finalized weights cache"); in xnn_get_or_insert_weights_cache()
470 offset = xnn_get_or_insert_cache(&cache->cache, ptr, size); in xnn_get_or_insert_weights_cache()
473 // weights cache, to ensure there is an extra space at the end for future cache checks. in xnn_get_or_insert_weights_cache()
474 cache->max_weights_size = max(size, cache->max_weights_size); in xnn_get_or_insert_weights_cache()
480 …d in xnn_reserve_space_in_weights_cache when it returns non-NULL, i.e. when cache is not finalized, in xnn_get_or_insert_weights_cache()
482 const enum xnn_status status = xnn_mutex_unlock(&cache->mutex); in xnn_get_or_insert_weights_cache()
488 bool xnn_weights_cache_is_finalized(struct xnn_weights_cache* cache) { in xnn_weights_cache_is_finalized() argument
489 return cache->finalization_state != xnn_cache_state_not_finalized; in xnn_weights_cache_is_finalized()