Skip to content

Commit 1f0ce8b

Browse files
dwmw2Pekka Enberg
authored andcommitted
mm: Move ARCH_SLAB_MINALIGN and ARCH_KMALLOC_MINALIGN to <linux/slab_def.h>
Acked-by: Herbert Xu <[email protected]> Signed-off-by: David Woodhouse <[email protected]> Signed-off-by: Pekka Enberg <[email protected]>
1 parent e40152e commit 1f0ce8b

File tree

2 files changed

+24
-24
lines changed

2 files changed

+24
-24
lines changed

include/linux/slab_def.h

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,30 @@
1616
#include <linux/compiler.h>
1717
#include <linux/kmemtrace.h>
1818

19+
#ifndef ARCH_KMALLOC_MINALIGN
20+
/*
21+
* Enforce a minimum alignment for the kmalloc caches.
22+
* Usually, the kmalloc caches are cache_line_size() aligned, except when
23+
* DEBUG and FORCED_DEBUG are enabled, then they are BYTES_PER_WORD aligned.
24+
* Some archs want to perform DMA into kmalloc caches and need a guaranteed
25+
* alignment larger than the alignment of a 64-bit integer.
26+
* ARCH_KMALLOC_MINALIGN allows that.
27+
* Note that increasing this value may disable some debug features.
28+
*/
29+
#define ARCH_KMALLOC_MINALIGN __alignof__(unsigned long long)
30+
#endif
31+
32+
#ifndef ARCH_SLAB_MINALIGN
33+
/*
34+
* Enforce a minimum alignment for all caches.
35+
* Intended for archs that get misalignment faults even for BYTES_PER_WORD
36+
* aligned buffers. Includes ARCH_KMALLOC_MINALIGN.
37+
* If possible: Do not enable this flag for CONFIG_DEBUG_SLAB, it disables
38+
* some debug features.
39+
*/
40+
#define ARCH_SLAB_MINALIGN 0
41+
#endif
42+
1943
/*
2044
* struct kmem_cache
2145
*

mm/slab.c

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -144,30 +144,6 @@
144144
#define BYTES_PER_WORD sizeof(void *)
145145
#define REDZONE_ALIGN max(BYTES_PER_WORD, __alignof__(unsigned long long))
146146

147-
#ifndef ARCH_KMALLOC_MINALIGN
148-
/*
149-
* Enforce a minimum alignment for the kmalloc caches.
150-
* Usually, the kmalloc caches are cache_line_size() aligned, except when
151-
* DEBUG and FORCED_DEBUG are enabled, then they are BYTES_PER_WORD aligned.
152-
* Some archs want to perform DMA into kmalloc caches and need a guaranteed
153-
* alignment larger than the alignment of a 64-bit integer.
154-
* ARCH_KMALLOC_MINALIGN allows that.
155-
* Note that increasing this value may disable some debug features.
156-
*/
157-
#define ARCH_KMALLOC_MINALIGN __alignof__(unsigned long long)
158-
#endif
159-
160-
#ifndef ARCH_SLAB_MINALIGN
161-
/*
162-
* Enforce a minimum alignment for all caches.
163-
* Intended for archs that get misalignment faults even for BYTES_PER_WORD
164-
* aligned buffers. Includes ARCH_KMALLOC_MINALIGN.
165-
* If possible: Do not enable this flag for CONFIG_DEBUG_SLAB, it disables
166-
* some debug features.
167-
*/
168-
#define ARCH_SLAB_MINALIGN 0
169-
#endif
170-
171147
#ifndef ARCH_KMALLOC_FLAGS
172148
#define ARCH_KMALLOC_FLAGS SLAB_HWCACHE_ALIGN
173149
#endif

0 commit comments

Comments
 (0)