x86, um: initial part of asm-um move

Signed-off-by: Al Viro <viro@zeniv.linux.org.uk>
Signed-off-by: H. Peter Anvin <hpa@zytor.com>
This commit is contained in:
Al Viro
2008-08-17 19:13:17 -04:00
committed by H. Peter Anvin
parent 8569c9140b
commit 8ede0bdb63
145 changed files with 9 additions and 8 deletions

View File

@@ -0,0 +1,17 @@
#ifndef __UM_CACHE_H
#define __UM_CACHE_H
#if defined(CONFIG_UML_X86) && !defined(CONFIG_64BIT)
# define L1_CACHE_SHIFT (CONFIG_X86_L1_CACHE_SHIFT)
#elif defined(CONFIG_UML_X86) /* 64-bit */
# define L1_CACHE_SHIFT 6 /* Should be 7 on Intel */
#else
/* XXX: this was taken from x86, now it's completely random. Luckily only
* affects SMP padding. */
# define L1_CACHE_SHIFT 5
#endif
#define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
#endif