8 #include <linux/mempool.h>
9 #include <linux/poison.h>
10 #include <linux/slab.h>
11 #include <linux/radix-tree.h>
12 #include <urcu/uatomic.h>
25 void *mempool_alloc(mempool_t *pool, int gfp_mask)
27 return pool->alloc(gfp_mask, pool->data);
30 void mempool_free(void *element, mempool_t *pool)
32 pool->free(element, pool->data);
35 mempool_t *mempool_create(int min_nr, mempool_alloc_t *alloc_fn,
36 mempool_free_t *free_fn, void *pool_data)
38 mempool_t *ret = malloc(sizeof(*ret));
40 ret->alloc = alloc_fn;
42 ret->data = pool_data;
46 void *kmem_cache_alloc(struct kmem_cache *cachep, int flags)
48 struct radix_tree_node *node;
50 if (flags & __GFP_NOWARN)
53 pthread_mutex_lock(&cachep->lock);
54 if (cachep->nr_objs) {
57 cachep->objs = node->private_data;
58 pthread_mutex_unlock(&cachep->lock);
59 node->private_data = NULL;
61 pthread_mutex_unlock(&cachep->lock);
62 node = malloc(cachep->size);
67 uatomic_inc(&nr_allocated);
71 void kmem_cache_free(struct kmem_cache *cachep, void *objp)
74 uatomic_dec(&nr_allocated);
75 pthread_mutex_lock(&cachep->lock);
76 if (cachep->nr_objs > 10) {
77 memset(objp, POISON_FREE, cachep->size);
80 struct radix_tree_node *node = objp;
82 node->private_data = cachep->objs;
85 pthread_mutex_unlock(&cachep->lock);
88 void *kmalloc(size_t size, gfp_t gfp)
90 void *ret = malloc(size);
91 uatomic_inc(&nr_allocated);
99 uatomic_dec(&nr_allocated);
104 kmem_cache_create(const char *name, size_t size, size_t offset,
105 unsigned long flags, void (*ctor)(void *))
107 struct kmem_cache *ret = malloc(sizeof(*ret));
109 pthread_mutex_init(&ret->lock, NULL);