Heiko Schocher | 0c06db5 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 1 | |
| 2 | #include <common.h> |
Masahiro Yamada | e3332e1 | 2018-08-24 19:30:15 +0900 | [diff] [blame] | 3 | #include <memalign.h> |
Heiko Schocher | 0c06db5 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 4 | #include <linux/compat.h> |
| 5 | |
| 6 | struct p_current cur = { |
| 7 | .pid = 1, |
| 8 | }; |
| 9 | __maybe_unused struct p_current *current = &cur; |
| 10 | |
| 11 | unsigned long copy_from_user(void *dest, const void *src, |
| 12 | unsigned long count) |
| 13 | { |
| 14 | memcpy((void *)dest, (void *)src, count); |
| 15 | return 0; |
| 16 | } |
| 17 | |
| 18 | void *kmalloc(size_t size, int flags) |
| 19 | { |
Masahiro Yamada | 6b9f9ea | 2015-07-13 13:17:07 +0900 | [diff] [blame] | 20 | void *p; |
Heiko Schocher | 0c06db5 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 21 | |
Masahiro Yamada | e3332e1 | 2018-08-24 19:30:15 +0900 | [diff] [blame] | 22 | p = malloc_cache_aligned(size); |
Masahiro Yamada | 6b9f9ea | 2015-07-13 13:17:07 +0900 | [diff] [blame] | 23 | if (flags & __GFP_ZERO) |
| 24 | memset(p, 0, size); |
| 25 | |
| 26 | return p; |
Heiko Schocher | 0c06db5 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 27 | } |
| 28 | |
Heiko Schocher | 0c06db5 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 29 | struct kmem_cache *get_mem(int element_sz) |
| 30 | { |
| 31 | struct kmem_cache *ret; |
| 32 | |
| 33 | ret = memalign(ARCH_DMA_MINALIGN, sizeof(struct kmem_cache)); |
| 34 | ret->sz = element_sz; |
| 35 | |
| 36 | return ret; |
| 37 | } |
| 38 | |
| 39 | void *kmem_cache_alloc(struct kmem_cache *obj, int flag) |
| 40 | { |
Masahiro Yamada | e3332e1 | 2018-08-24 19:30:15 +0900 | [diff] [blame] | 41 | return malloc_cache_aligned(obj->sz); |
Heiko Schocher | 0c06db5 | 2014-06-24 10:10:03 +0200 | [diff] [blame] | 42 | } |