qspinlock_paravirt.h 2.1 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ASM_QSPINLOCK_PARAVIRT_H
  3. #define __ASM_QSPINLOCK_PARAVIRT_H
  4. #include <asm/ibt.h>
  5. /*
  6. * For x86-64, PV_CALLEE_SAVE_REGS_THUNK() saves and restores 8 64-bit
  7. * registers. For i386, however, only 1 32-bit register needs to be saved
  8. * and restored. So an optimized version of __pv_queued_spin_unlock() is
  9. * hand-coded for 64-bit, but it isn't worthwhile to do it for 32-bit.
  10. */
  11. #ifdef CONFIG_64BIT
  12. __PV_CALLEE_SAVE_REGS_THUNK(__pv_queued_spin_unlock_slowpath, ".spinlock.text");
  13. #define __pv_queued_spin_unlock __pv_queued_spin_unlock
  14. #define PV_UNLOCK "__raw_callee_save___pv_queued_spin_unlock"
  15. #define PV_UNLOCK_SLOWPATH "__raw_callee_save___pv_queued_spin_unlock_slowpath"
  16. /*
  17. * Optimized assembly version of __raw_callee_save___pv_queued_spin_unlock
  18. * which combines the registers saving trunk and the body of the following
  19. * C code. Note that it puts the code in the .spinlock.text section which
  20. * is equivalent to adding __lockfunc in the C code:
  21. *
  22. * void __lockfunc __pv_queued_spin_unlock(struct qspinlock *lock)
  23. * {
  24. * u8 lockval = cmpxchg(&lock->locked, _Q_LOCKED_VAL, 0);
  25. *
  26. * if (likely(lockval == _Q_LOCKED_VAL))
  27. * return;
  28. * pv_queued_spin_unlock_slowpath(lock, lockval);
  29. * }
  30. *
  31. * For x86-64,
  32. * rdi = lock (first argument)
  33. * rsi = lockval (second argument)
  34. * rdx = internal variable (set to 0)
  35. */
  36. asm (".pushsection .spinlock.text, \"ax\";"
  37. ".globl " PV_UNLOCK ";"
  38. ".type " PV_UNLOCK ", @function;"
  39. ".align 4,0x90;"
  40. PV_UNLOCK ": "
  41. ASM_ENDBR
  42. FRAME_BEGIN
  43. "push %rdx;"
  44. "mov $0x1,%eax;"
  45. "xor %edx,%edx;"
  46. LOCK_PREFIX "cmpxchg %dl,(%rdi);"
  47. "cmp $0x1,%al;"
  48. "jne .slowpath;"
  49. "pop %rdx;"
  50. FRAME_END
  51. ASM_RET
  52. ".slowpath: "
  53. "push %rsi;"
  54. "movzbl %al,%esi;"
  55. "call " PV_UNLOCK_SLOWPATH ";"
  56. "pop %rsi;"
  57. "pop %rdx;"
  58. FRAME_END
  59. ASM_RET
  60. ".size " PV_UNLOCK ", .-" PV_UNLOCK ";"
  61. ".popsection");
  62. #else /* CONFIG_64BIT */
  63. extern void __lockfunc __pv_queued_spin_unlock(struct qspinlock *lock);
  64. __PV_CALLEE_SAVE_REGS_THUNK(__pv_queued_spin_unlock, ".spinlock.text");
  65. #endif /* CONFIG_64BIT */
  66. #endif