3 #include <linux/spinlock.h>
4 #include <linux/slab.h>
5 #include <linux/spinlock.h>
6 #include <linux/module.h>
10 static void *gl_mem_p;
11 static struct esp_mem_mgmt gl_mm;
13 int set_gl_mem_p(void *mem_p)
20 void *get_gl_mem_p(void)
25 static inline int esp_clz(u32 x)
27 return __builtin_clz(x);
30 static inline int esp_ctz(u32 x)
32 return __builtin_ctz(x);
35 static inline int esp_popcount(u32 x)
46 //return __builtin_popcount(x);
49 /* bit_no value must be 0 */
50 static int get_next_empty_num(u32* bit_map, int bit_map_size, int bit_no)
55 i = (bit_no&0x111111e0)>>5;
56 offset = (bit_no&0x0000001f);
58 x = esp_ctz(bit_map[i] >> offset);
59 if (x < 32 - offset) {
66 while (i < bit_map_size) {
67 x = esp_ctz(bit_map[i]);
77 if (i == bit_map_size)
83 /* this inline for ignore warning of unused */
84 static inline int get_prev_empty_num(u32* bit_map, int bit_map_size, int bit_no)
89 i = (bit_no&0x111111e0)>>5;
90 offset = (bit_no&0x0000001f);
92 x = esp_clz(bit_map[i] << (32 - offset));
101 x = esp_clz(bit_map[i]);
111 if (i == bit_map_size)
118 static int find_1st_empty_pos(u32* bit_map, int bit_map_size, int start_bit_no)
123 i = (start_bit_no&0x111111e0)>>5; /* integer div 32 */
124 offset = (start_bit_no&0x0000001f); /* mod 32 */
126 if (esp_popcount((u32)(bit_map[i]>>offset)) < 32 - offset) {
127 while ((x = esp_ctz(bit_map[i]>>offset)) == 0) {
130 return (i<<5) + offset;
136 while (i < bit_map_size) {
137 if (esp_popcount(bit_map[i]) < 32) {
139 while ((x = esp_ctz(bit_map[i]>>offset)) == 0) {
150 if (i == bit_map_size)
156 /* start_bit must be 0 */
157 static void set_next_full(u32 *bit_map, int bit_map_size, int start_bit_no, int bit_count)
161 i = (start_bit_no&0x111111e0)>>5; /* integer div 32 */
162 offset = (start_bit_no&0x0000001f); /* mod 32 */
164 if (bit_count > 32 - offset) {
165 bit_map[i] |= ~((1<<offset) - 1);
166 bit_count -= (bit_count - (32 - offset));
167 while (bit_count > 0) {
169 if (bit_count >= 32) {
170 bit_map[i] |= 0xffffffff;
172 bit_map[i] |= ((1<<bit_count) -1);
177 bit_map[i] |= (((1<<bit_count) - 1) << offset);
181 /* start_bit must be 1 */
182 static void set_next_empty(u32 *bit_map, int bit_map_size, int start_bit_no, int bit_count)
186 i = (start_bit_no&0x111111e0)>>5; /* integer div 32 */
187 offset = (start_bit_no&0x0000001f); /* mod 32 */
189 if (bit_count > 32 - offset) {
190 bit_map[i] &= ((1<<offset) - 1);
191 bit_count -= (bit_count - (32 - offset));
192 while (bit_count > 0) {
194 if (bit_count >= 32) {
195 bit_map[i] &= 0x00000000;
197 bit_map[i] &= ~((1<<bit_count) -1);
202 bit_map[i] &= ~(((1<<bit_count) - 1) << offset);
208 static inline int bin_roundup(int n, unsigned int div) /*div must be 2^n */
214 if (__builtin_popcount(div) > 1)
217 return (n & ((1<<x)-1) ? ((n>>x) + 1)<<x : n);
221 /* must use hash to make it faster, when have time*/
222 static int set_access(struct esp_mem_mgmt_per *mmp, void *point, int bit_count)
229 for (i = 0; i < DEFAULT_MAX_ACCESSES_PER; i++) {
230 if (mmp->macs[i].p == NULL) {
231 mmp->macs[i].p = point;
232 mmp->macs[i].pieces = bit_count;
242 static int get_clear_access(struct esp_mem_mgmt_per *mmp, void *point)
249 for (i = 0; i < DEFAULT_MAX_ACCESSES_PER; i++) {
250 if (mmp->macs[i].p == point) {
251 pieces = mmp->macs[i].pieces;
252 mmp->macs[i].p = NULL;
253 mmp->macs[i].pieces = 0;
261 static inline void *_esp_malloc(size_t size)
268 if (size >= LITTLE_LIMIT) { /* use large mem */
269 spin_lock_irqsave(&gl_mm.large_mmp.spin_lock, gl_mm.large_mmp.lock_flags);
271 while (pos < LARGE_PIECES_NUM) {
272 if ((pos = find_1st_empty_pos(gl_mm.large_mmp.bit_map, LARGE_BIT_MAP_SIZE, pos)) >= 0) {
273 logd("large find pos [%d]\n", pos);
274 empty_num = get_next_empty_num(gl_mm.large_mmp.bit_map, LARGE_BIT_MAP_SIZE, pos);
275 if ((empty_num<<LARGE_PIECE_SIZE_SHIFT) >= size) {
276 roundup_size = bin_roundup(size, LARGE_PIECE_SIZE);
277 logd("large roundup_size [%d]\n", roundup_size);
278 set_next_full(gl_mm.large_mmp.bit_map, LARGE_BIT_MAP_SIZE, pos, roundup_size>>LARGE_PIECE_SIZE_SHIFT);
279 p = (void *)((pos<<LARGE_PIECE_SIZE_SHIFT) + (u8 *)gl_mm.large_mmp.start_p);
280 set_access(&gl_mm.large_mmp, p, roundup_size>>LARGE_PIECE_SIZE_SHIFT);
281 spin_unlock_irqrestore(&gl_mm.large_mmp.spin_lock, gl_mm.large_mmp.lock_flags);
286 } else { /* no enough memory */
287 spin_unlock_irqrestore(&gl_mm.large_mmp.spin_lock, gl_mm.large_mmp.lock_flags);
291 spin_unlock_irqrestore(&gl_mm.large_mmp.spin_lock, gl_mm.large_mmp.lock_flags);
293 } else { /* use little mem */
294 spin_lock_irqsave(&gl_mm.little_mmp.spin_lock, gl_mm.little_mmp.lock_flags);
296 while (pos < LITTLE_PIECES_NUM) {
297 if ((pos = find_1st_empty_pos(gl_mm.little_mmp.bit_map, LITTLE_BIT_MAP_SIZE, pos)) >= 0) {
298 logd("little find pos [%d]\n", pos);
299 empty_num = get_next_empty_num(gl_mm.little_mmp.bit_map, LITTLE_BIT_MAP_SIZE, pos);
300 if ((empty_num<<LITTLE_PIECE_SIZE_SHIFT) >= size) {
301 roundup_size = bin_roundup(size, LITTLE_PIECE_SIZE);
302 logd("little roundup_size [%d]\n", roundup_size);
303 set_next_full(gl_mm.little_mmp.bit_map, LITTLE_BIT_MAP_SIZE, pos, roundup_size>>LITTLE_PIECE_SIZE_SHIFT);
304 p = (void *)((pos<<LITTLE_PIECE_SIZE_SHIFT) + (u8 *)gl_mm.little_mmp.start_p);
305 set_access(&gl_mm.little_mmp, p, roundup_size>>LITTLE_PIECE_SIZE_SHIFT);
306 spin_unlock_irqrestore(&gl_mm.little_mmp.spin_lock, gl_mm.little_mmp.lock_flags);
311 } else { /* no enough memory */
312 spin_unlock_irqrestore(&gl_mm.little_mmp.spin_lock, gl_mm.little_mmp.lock_flags);
316 spin_unlock_irqrestore(&gl_mm.little_mmp.spin_lock, gl_mm.little_mmp.lock_flags);
322 static inline void _esp_free(void *p)
324 struct esp_mem_mgmt_per *mmp;
329 mmp = &gl_mm.large_mmp;
330 spin_lock_irqsave(&mmp->spin_lock, mmp->lock_flags);
331 pieces = get_clear_access(&gl_mm.large_mmp, p);
332 logd("%s large pieces %d\n", __func__, pieces);
334 start_pos = ((p - mmp->start_p)>>LARGE_PIECE_SIZE_SHIFT);
335 logd("%s large pos %d\n", __func__, start_pos);
336 set_next_empty(mmp->bit_map, LARGE_BIT_MAP_SIZE, start_pos, pieces);
337 spin_unlock_irqrestore(&mmp->spin_lock, mmp->lock_flags);
340 spin_unlock_irqrestore(&mmp->spin_lock, mmp->lock_flags);
342 mmp = &gl_mm.little_mmp;
343 spin_lock_irqsave(&mmp->spin_lock, mmp->lock_flags);
344 pieces = get_clear_access(&gl_mm.little_mmp, p);
345 logd(KERN_ERR "%s little pieces %d\n", __func__, pieces);
347 start_pos = ((p - mmp->start_p)>>LITTLE_PIECE_SIZE_SHIFT);
348 logd(KERN_ERR "%s little pos %d\n", __func__, start_pos);
349 set_next_empty(mmp->bit_map, LITTLE_BIT_MAP_SIZE, start_pos, pieces);
350 spin_unlock_irqrestore(&mmp->spin_lock, mmp->lock_flags);
353 spin_unlock_irqrestore(&mmp->spin_lock, mmp->lock_flags);
355 loge(KERN_ERR "%s point is not alloc from esp prealloc program\n", __func__);
361 void show_bitmap(void)
365 logi("-----LARGE BIT MAP-----\n");
366 for (i = 0; i < LARGE_BIT_MAP_SIZE; i++) {
367 logi("%04d: 0x%08x\n", i, gl_mm.large_mmp.bit_map[i]);
370 logi("-----LITTLE BIT MAP-----\n");
371 for (i = 0; i < LITTLE_BIT_MAP_SIZE; i++) {
372 logi("%04d: 0x%08x\n", i, gl_mm.little_mmp.bit_map[i]);
377 void *esp_malloc(size_t size)
380 void *p = _esp_malloc(size);
384 return _esp_malloc(size);
388 EXPORT_SYMBOL(esp_malloc);
390 void esp_free(void *p)
397 EXPORT_SYMBOL(esp_free);
400 void *esp_pre_malloc(void)
404 po = get_order(PREMALLOC_SIZE);
405 gl_mem_p = (void *)__get_free_pages(GFP_ATOMIC, po);
407 if (gl_mem_p == NULL) {
408 loge("%s no mem for premalloc! \n", __func__);
415 void esp_pre_free(void)
419 if (gl_mem_p == NULL) {
420 loge("%s no mem for prefree! \n", __func__);
424 po = get_order(PREMALLOC_SIZE);
425 free_pages((unsigned long)gl_mem_p, po);
429 int esp_mm_init(void)
431 memset(&gl_mm, 0x00, sizeof(struct esp_mem_mgmt));
433 spin_lock_init(&gl_mm.large_mmp.spin_lock);
434 spin_lock_init(&gl_mm.little_mmp.spin_lock);
436 gl_mm.large_mmp.start_p = gl_mem_p; /* large */
437 gl_mm.little_mmp.start_p = gl_mem_p + PREMALLOC_LARGE_SIZE; /* large */
439 if ((gl_mm.large_mmp.bit_map = (u32*)kzalloc(sizeof(u32)*LARGE_BIT_MAP_SIZE, GFP_KERNEL)) == NULL)
442 if ((gl_mm.little_mmp.bit_map = (u32*)kzalloc(sizeof(u32)*LITTLE_BIT_MAP_SIZE, GFP_KERNEL)) == NULL) {
443 kfree(gl_mm.large_mmp.bit_map);
453 void esp_mm_deinit(void)
455 if (gl_mm.large_mmp.bit_map)
456 kfree(gl_mm.large_mmp.bit_map);
458 if (gl_mm.little_mmp.bit_map)
459 kfree(gl_mm.little_mmp.bit_map);
462 int esp_slab_init(void)
465 if (esp_pre_malloc() == NULL)
468 /*TODO:other mem mgr list , but i have no time to do this*/
474 void esp_slab_deinit(void)
477 /*TODO:other mem mgr list , but i have no time to do this*/
481 #endif /* ESP_SLAB */