preempt.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef __ASM_PREEMPT_H
  3. #define __ASM_PREEMPT_H
  4. #include <asm/current.h>
  5. #include <linux/thread_info.h>
  6. #include <asm/atomic_ops.h>
  7. #ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
  8. /* We use the MSB mostly because its available */
  9. #define PREEMPT_NEED_RESCHED 0x80000000
  10. #define PREEMPT_ENABLED (0 + PREEMPT_NEED_RESCHED)
  11. static inline int preempt_count(void)
  12. {
  13. return READ_ONCE(S390_lowcore.preempt_count) & ~PREEMPT_NEED_RESCHED;
  14. }
  15. static inline void preempt_count_set(int pc)
  16. {
  17. int old, new;
  18. do {
  19. old = READ_ONCE(S390_lowcore.preempt_count);
  20. new = (old & PREEMPT_NEED_RESCHED) |
  21. (pc & ~PREEMPT_NEED_RESCHED);
  22. } while (__atomic_cmpxchg(&S390_lowcore.preempt_count,
  23. old, new) != old);
  24. }
  25. static inline void set_preempt_need_resched(void)
  26. {
  27. __atomic_and(~PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count);
  28. }
  29. static inline void clear_preempt_need_resched(void)
  30. {
  31. __atomic_or(PREEMPT_NEED_RESCHED, &S390_lowcore.preempt_count);
  32. }
  33. static inline bool test_preempt_need_resched(void)
  34. {
  35. return !(READ_ONCE(S390_lowcore.preempt_count) & PREEMPT_NEED_RESCHED);
  36. }
  37. static inline void __preempt_count_add(int val)
  38. {
  39. /*
  40. * With some obscure config options and CONFIG_PROFILE_ALL_BRANCHES
  41. * enabled, gcc 12 fails to handle __builtin_constant_p().
  42. */
  43. if (!IS_ENABLED(CONFIG_PROFILE_ALL_BRANCHES)) {
  44. if (__builtin_constant_p(val) && (val >= -128) && (val <= 127)) {
  45. __atomic_add_const(val, &S390_lowcore.preempt_count);
  46. return;
  47. }
  48. }
  49. __atomic_add(val, &S390_lowcore.preempt_count);
  50. }
  51. static inline void __preempt_count_sub(int val)
  52. {
  53. __preempt_count_add(-val);
  54. }
  55. static inline bool __preempt_count_dec_and_test(void)
  56. {
  57. return __atomic_add(-1, &S390_lowcore.preempt_count) == 1;
  58. }
  59. static inline bool should_resched(int preempt_offset)
  60. {
  61. return unlikely(READ_ONCE(S390_lowcore.preempt_count) ==
  62. preempt_offset);
  63. }
  64. #else /* CONFIG_HAVE_MARCH_Z196_FEATURES */
  65. #define PREEMPT_ENABLED (0)
  66. static inline int preempt_count(void)
  67. {
  68. return READ_ONCE(S390_lowcore.preempt_count);
  69. }
  70. static inline void preempt_count_set(int pc)
  71. {
  72. S390_lowcore.preempt_count = pc;
  73. }
  74. static inline void set_preempt_need_resched(void)
  75. {
  76. }
  77. static inline void clear_preempt_need_resched(void)
  78. {
  79. }
  80. static inline bool test_preempt_need_resched(void)
  81. {
  82. return false;
  83. }
  84. static inline void __preempt_count_add(int val)
  85. {
  86. S390_lowcore.preempt_count += val;
  87. }
  88. static inline void __preempt_count_sub(int val)
  89. {
  90. S390_lowcore.preempt_count -= val;
  91. }
  92. static inline bool __preempt_count_dec_and_test(void)
  93. {
  94. return !--S390_lowcore.preempt_count && tif_need_resched();
  95. }
  96. static inline bool should_resched(int preempt_offset)
  97. {
  98. return unlikely(preempt_count() == preempt_offset &&
  99. tif_need_resched());
  100. }
  101. #endif /* CONFIG_HAVE_MARCH_Z196_FEATURES */
  102. #define init_task_preempt_count(p) do { } while (0)
  103. /* Deferred to CPU bringup time */
  104. #define init_idle_preempt_count(p, cpu) do { } while (0)
  105. #ifdef CONFIG_PREEMPTION
  106. extern void preempt_schedule(void);
  107. #define __preempt_schedule() preempt_schedule()
  108. extern void preempt_schedule_notrace(void);
  109. #define __preempt_schedule_notrace() preempt_schedule_notrace()
  110. #endif /* CONFIG_PREEMPTION */
  111. #endif /* __ASM_PREEMPT_H */