2022-07-07 20:16:20 +00:00
|
|
|
#ifndef IOU_ALLOC_CACHE_H
|
|
|
|
#define IOU_ALLOC_CACHE_H
|
|
|
|
|
2022-07-07 20:20:54 +00:00
|
|
|
/*
|
|
|
|
* Don't allow the cache to grow beyond this size.
|
|
|
|
*/
|
2024-03-17 00:23:44 +00:00
|
|
|
#define IO_ALLOC_CACHE_MAX 128
|
2022-07-07 20:20:54 +00:00
|
|
|
|
2022-07-07 20:16:20 +00:00
|
|
|
struct io_cache_entry {
|
2023-02-23 16:43:52 +00:00
|
|
|
struct io_wq_work_node node;
|
2022-07-07 20:16:20 +00:00
|
|
|
};
|
|
|
|
|
2022-07-07 20:20:54 +00:00
|
|
|
static inline bool io_alloc_cache_put(struct io_alloc_cache *cache,
|
2022-07-07 20:16:20 +00:00
|
|
|
struct io_cache_entry *entry)
|
|
|
|
{
|
2023-04-04 12:39:57 +00:00
|
|
|
if (cache->nr_cached < cache->max_cached) {
|
2022-07-07 20:20:54 +00:00
|
|
|
cache->nr_cached++;
|
2023-02-23 16:43:52 +00:00
|
|
|
wq_stack_add_head(&entry->node, &cache->list);
|
2023-12-19 22:28:45 +00:00
|
|
|
kasan_mempool_poison_object(entry);
|
2022-07-07 20:20:54 +00:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
return false;
|
2022-07-07 20:16:20 +00:00
|
|
|
}
|
|
|
|
|
2023-04-11 11:06:05 +00:00
|
|
|
static inline bool io_alloc_cache_empty(struct io_alloc_cache *cache)
|
|
|
|
{
|
|
|
|
return !cache->list.next;
|
|
|
|
}
|
|
|
|
|
2022-07-07 20:16:20 +00:00
|
|
|
static inline struct io_cache_entry *io_alloc_cache_get(struct io_alloc_cache *cache)
|
|
|
|
{
|
2023-02-23 16:43:52 +00:00
|
|
|
if (cache->list.next) {
|
|
|
|
struct io_cache_entry *entry;
|
2022-07-07 20:16:20 +00:00
|
|
|
|
2023-02-23 16:43:52 +00:00
|
|
|
entry = container_of(cache->list.next, struct io_cache_entry, node);
|
2023-12-19 22:29:05 +00:00
|
|
|
kasan_mempool_unpoison_object(entry, cache->elem_size);
|
2023-02-23 16:43:52 +00:00
|
|
|
cache->list.next = cache->list.next->next;
|
2023-03-30 12:52:38 +00:00
|
|
|
cache->nr_cached--;
|
2023-02-23 16:43:52 +00:00
|
|
|
return entry;
|
2022-07-07 20:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
2023-04-04 12:39:57 +00:00
|
|
|
static inline void io_alloc_cache_init(struct io_alloc_cache *cache,
|
|
|
|
unsigned max_nr, size_t size)
|
2022-07-07 20:16:20 +00:00
|
|
|
{
|
2023-02-23 16:43:52 +00:00
|
|
|
cache->list.next = NULL;
|
2022-07-07 20:20:54 +00:00
|
|
|
cache->nr_cached = 0;
|
2023-04-04 12:39:57 +00:00
|
|
|
cache->max_cached = max_nr;
|
2023-02-23 16:43:53 +00:00
|
|
|
cache->elem_size = size;
|
2022-07-07 20:16:20 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static inline void io_alloc_cache_free(struct io_alloc_cache *cache,
|
|
|
|
void (*free)(struct io_cache_entry *))
|
|
|
|
{
|
2023-02-23 16:43:52 +00:00
|
|
|
while (1) {
|
|
|
|
struct io_cache_entry *entry = io_alloc_cache_get(cache);
|
2022-07-07 20:16:20 +00:00
|
|
|
|
2023-02-23 16:43:52 +00:00
|
|
|
if (!entry)
|
|
|
|
break;
|
|
|
|
free(entry);
|
2022-07-07 20:16:20 +00:00
|
|
|
}
|
2022-07-07 20:20:54 +00:00
|
|
|
cache->nr_cached = 0;
|
2022-07-07 20:16:20 +00:00
|
|
|
}
|
|
|
|
#endif
|