x86, um: initial part of asm-um move
Signed-off-by: Al Viro <viro@zeniv.linux.org.uk> Signed-off-by: H. Peter Anvin <hpa@zytor.com>
This commit is contained in:
17
arch/um/include/asm/cache.h
Normal file
17
arch/um/include/asm/cache.h
Normal file
@@ -0,0 +1,17 @@
|
||||
#ifndef __UM_CACHE_H
|
||||
#define __UM_CACHE_H
|
||||
|
||||
|
||||
#if defined(CONFIG_UML_X86) && !defined(CONFIG_64BIT)
|
||||
# define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT)
|
||||
#elif defined(CONFIG_UML_X86) /* 64-bit */
|
||||
# define L1_CACHE_SHIFT 6 /* Should be 7 on Intel */
|
||||
#else
|
||||
/* XXX: this was taken from x86, now it's completely random. Luckily only
|
||||
* affects SMP padding. */
|
||||
# define L1_CACHE_SHIFT 5
|
||||
#endif
|
||||
|
||||
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
|
||||
|
||||
#endif
|
Reference in New Issue
Block a user