[S390] Inline assembly cleanup.

Major cleanup of all s390 inline assemblies. They now have a common
coding style. Quite a few have been shortened, mainly by using register
asm variables. Use of the EX_TABLE macro helps  as well. The atomic ops,
bit ops and locking inlines new use the Q-constraint if a newer gcc
is used.  That results in slightly better code.

Thanks to Christian Borntraeger for proof reading the changes.

Signed-off-by: Martin Schwidefsky <schwidefsky@de.ibm.com>
This commit is contained in:
Martin Schwidefsky
2006-09-28 16:56:43 +02:00
parent 25d83cbfaa
commit 94c12cc7d1
51 changed files with 1758 additions and 2268 deletions

View File

@@ -10,43 +10,93 @@
#ifdef __KERNEL__
/* interrupt control.. */
#define raw_local_irq_enable() ({ \
unsigned long __dummy; \
__asm__ __volatile__ ( \
"stosm 0(%1),0x03" \
: "=m" (__dummy) : "a" (&__dummy) : "memory" ); \
})
#if __GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 2)
#define raw_local_irq_disable() ({ \
unsigned long __flags; \
__asm__ __volatile__ ( \
"stnsm 0(%1),0xfc" : "=m" (__flags) : "a" (&__flags) ); \
__flags; \
})
#define raw_local_save_flags(x) \
do { \
typecheck(unsigned long, x); \
__asm__ __volatile__("stosm 0(%1),0" : "=m" (x) : "a" (&x), "m" (x) ); \
} while (0)
#define raw_local_irq_restore(x) \
do { \
typecheck(unsigned long, x); \
__asm__ __volatile__("ssm 0(%0)" : : "a" (&x), "m" (x) : "memory"); \
} while (0)
#define raw_irqs_disabled() \
({ \
unsigned long flags; \
raw_local_save_flags(flags); \
!((flags >> __FLAG_SHIFT) & 3); \
/* store then or system mask. */
#define __raw_local_irq_stosm(__or) \
({ \
unsigned long __mask; \
asm volatile( \
" stosm %0,%1" \
: "=Q" (__mask) : "i" (__or) : "memory"); \
__mask; \
})
/* store then and system mask. */
#define __raw_local_irq_stnsm(__and) \
({ \
unsigned long __mask; \
asm volatile( \
" stnsm %0,%1" \
: "=Q" (__mask) : "i" (__and) : "memory"); \
__mask; \
})
/* set system mask. */
#define __raw_local_irq_ssm(__mask) \
({ \
asm volatile("ssm %0" : : "Q" (__mask) : "memory"); \
})
#else /* __GNUC__ */
/* store then or system mask. */
#define __raw_local_irq_stosm(__or) \
({ \
unsigned long __mask; \
asm volatile( \
" stosm 0(%1),%2" \
: "=m" (__mask) \
: "a" (&__mask), "i" (__or) : "memory"); \
__mask; \
})
/* store then and system mask. */
#define __raw_local_irq_stnsm(__and) \
({ \
unsigned long __mask; \
asm volatile( \
" stnsm 0(%1),%2" \
: "=m" (__mask) \
: "a" (&__mask), "i" (__and) : "memory"); \
__mask; \
})
/* set system mask. */
#define __raw_local_irq_ssm(__mask) \
({ \
asm volatile( \
" ssm 0(%0)" \
: : "a" (&__mask), "m" (__mask) : "memory"); \
})
#endif /* __GNUC__ */
/* interrupt control.. */
static inline unsigned long raw_local_irq_enable(void)
{
return __raw_local_irq_stosm(0x03);
}
static inline unsigned long raw_local_irq_disable(void)
{
return __raw_local_irq_stnsm(0xfc);
}
#define raw_local_save_flags(x) \
do { \
typecheck(unsigned long, x); \
(x) = __raw_local_irq_stosm(0x00); \
} while (0)
static inline void raw_local_irq_restore(unsigned long flags)
{
__raw_local_irq_ssm(flags);
}
static inline int raw_irqs_disabled_flags(unsigned long flags)
{
return !((flags >> __FLAG_SHIFT) & 3);
return !(flags & (3UL << (BITS_PER_LONG - 8)));
}
/* For spinlocks etc */