atomic64_cx8_32.S 2.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * atomic64_t for 586+
  4. *
  5. * Copyright © 2010 Luca Barbieri
  6. */
  7. #include <linux/linkage.h>
  8. #include <asm/alternative.h>
  9. .macro read64 reg
  10. movl %ebx, %eax
  11. movl %ecx, %edx
  12. /* we need LOCK_PREFIX since otherwise cmpxchg8b always does the write */
  13. LOCK_PREFIX
  14. cmpxchg8b (\reg)
  15. .endm
  16. SYM_FUNC_START(atomic64_read_cx8)
  17. read64 %ecx
  18. RET
  19. SYM_FUNC_END(atomic64_read_cx8)
  20. SYM_FUNC_START(atomic64_set_cx8)
  21. 1:
  22. /* we don't need LOCK_PREFIX since aligned 64-bit writes
  23. * are atomic on 586 and newer */
  24. cmpxchg8b (%esi)
  25. jne 1b
  26. RET
  27. SYM_FUNC_END(atomic64_set_cx8)
  28. SYM_FUNC_START(atomic64_xchg_cx8)
  29. 1:
  30. LOCK_PREFIX
  31. cmpxchg8b (%esi)
  32. jne 1b
  33. RET
  34. SYM_FUNC_END(atomic64_xchg_cx8)
  35. .macro addsub_return func ins insc
  36. SYM_FUNC_START(atomic64_\func\()_return_cx8)
  37. pushl %ebp
  38. pushl %ebx
  39. pushl %esi
  40. pushl %edi
  41. movl %eax, %esi
  42. movl %edx, %edi
  43. movl %ecx, %ebp
  44. read64 %ecx
  45. 1:
  46. movl %eax, %ebx
  47. movl %edx, %ecx
  48. \ins\()l %esi, %ebx
  49. \insc\()l %edi, %ecx
  50. LOCK_PREFIX
  51. cmpxchg8b (%ebp)
  52. jne 1b
  53. 10:
  54. movl %ebx, %eax
  55. movl %ecx, %edx
  56. popl %edi
  57. popl %esi
  58. popl %ebx
  59. popl %ebp
  60. RET
  61. SYM_FUNC_END(atomic64_\func\()_return_cx8)
  62. .endm
  63. addsub_return add add adc
  64. addsub_return sub sub sbb
  65. .macro incdec_return func ins insc
  66. SYM_FUNC_START(atomic64_\func\()_return_cx8)
  67. pushl %ebx
  68. read64 %esi
  69. 1:
  70. movl %eax, %ebx
  71. movl %edx, %ecx
  72. \ins\()l $1, %ebx
  73. \insc\()l $0, %ecx
  74. LOCK_PREFIX
  75. cmpxchg8b (%esi)
  76. jne 1b
  77. 10:
  78. movl %ebx, %eax
  79. movl %ecx, %edx
  80. popl %ebx
  81. RET
  82. SYM_FUNC_END(atomic64_\func\()_return_cx8)
  83. .endm
  84. incdec_return inc add adc
  85. incdec_return dec sub sbb
  86. SYM_FUNC_START(atomic64_dec_if_positive_cx8)
  87. pushl %ebx
  88. read64 %esi
  89. 1:
  90. movl %eax, %ebx
  91. movl %edx, %ecx
  92. subl $1, %ebx
  93. sbb $0, %ecx
  94. js 2f
  95. LOCK_PREFIX
  96. cmpxchg8b (%esi)
  97. jne 1b
  98. 2:
  99. movl %ebx, %eax
  100. movl %ecx, %edx
  101. popl %ebx
  102. RET
  103. SYM_FUNC_END(atomic64_dec_if_positive_cx8)
  104. SYM_FUNC_START(atomic64_add_unless_cx8)
  105. pushl %ebp
  106. pushl %ebx
  107. /* these just push these two parameters on the stack */
  108. pushl %edi
  109. pushl %ecx
  110. movl %eax, %ebp
  111. movl %edx, %edi
  112. read64 %esi
  113. 1:
  114. cmpl %eax, 0(%esp)
  115. je 4f
  116. 2:
  117. movl %eax, %ebx
  118. movl %edx, %ecx
  119. addl %ebp, %ebx
  120. adcl %edi, %ecx
  121. LOCK_PREFIX
  122. cmpxchg8b (%esi)
  123. jne 1b
  124. movl $1, %eax
  125. 3:
  126. addl $8, %esp
  127. popl %ebx
  128. popl %ebp
  129. RET
  130. 4:
  131. cmpl %edx, 4(%esp)
  132. jne 2b
  133. xorl %eax, %eax
  134. jmp 3b
  135. SYM_FUNC_END(atomic64_add_unless_cx8)
  136. SYM_FUNC_START(atomic64_inc_not_zero_cx8)
  137. pushl %ebx
  138. read64 %esi
  139. 1:
  140. movl %eax, %ecx
  141. orl %edx, %ecx
  142. jz 3f
  143. movl %eax, %ebx
  144. xorl %ecx, %ecx
  145. addl $1, %ebx
  146. adcl %edx, %ecx
  147. LOCK_PREFIX
  148. cmpxchg8b (%esi)
  149. jne 1b
  150. movl $1, %eax
  151. 3:
  152. popl %ebx
  153. RET
  154. SYM_FUNC_END(atomic64_inc_not_zero_cx8)