atomic-arch-fallback.h 64 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459
  1. // SPDX-License-Identifier: GPL-2.0
  2. // Generated by scripts/atomic/gen-atomic-fallback.sh
  3. // DO NOT MODIFY THIS FILE DIRECTLY
  4. #ifndef _LINUX_ATOMIC_FALLBACK_H
  5. #define _LINUX_ATOMIC_FALLBACK_H
  6. #include <linux/compiler.h>
  7. #ifndef arch_xchg_relaxed
  8. #define arch_xchg_acquire arch_xchg
  9. #define arch_xchg_release arch_xchg
  10. #define arch_xchg_relaxed arch_xchg
  11. #else /* arch_xchg_relaxed */
  12. #ifndef arch_xchg_acquire
  13. #define arch_xchg_acquire(...) \
  14. __atomic_op_acquire(arch_xchg, __VA_ARGS__)
  15. #endif
  16. #ifndef arch_xchg_release
  17. #define arch_xchg_release(...) \
  18. __atomic_op_release(arch_xchg, __VA_ARGS__)
  19. #endif
  20. #ifndef arch_xchg
  21. #define arch_xchg(...) \
  22. __atomic_op_fence(arch_xchg, __VA_ARGS__)
  23. #endif
  24. #endif /* arch_xchg_relaxed */
  25. #ifndef arch_cmpxchg_relaxed
  26. #define arch_cmpxchg_acquire arch_cmpxchg
  27. #define arch_cmpxchg_release arch_cmpxchg
  28. #define arch_cmpxchg_relaxed arch_cmpxchg
  29. #else /* arch_cmpxchg_relaxed */
  30. #ifndef arch_cmpxchg_acquire
  31. #define arch_cmpxchg_acquire(...) \
  32. __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
  33. #endif
  34. #ifndef arch_cmpxchg_release
  35. #define arch_cmpxchg_release(...) \
  36. __atomic_op_release(arch_cmpxchg, __VA_ARGS__)
  37. #endif
  38. #ifndef arch_cmpxchg
  39. #define arch_cmpxchg(...) \
  40. __atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
  41. #endif
  42. #endif /* arch_cmpxchg_relaxed */
  43. #ifndef arch_cmpxchg64_relaxed
  44. #define arch_cmpxchg64_acquire arch_cmpxchg64
  45. #define arch_cmpxchg64_release arch_cmpxchg64
  46. #define arch_cmpxchg64_relaxed arch_cmpxchg64
  47. #else /* arch_cmpxchg64_relaxed */
  48. #ifndef arch_cmpxchg64_acquire
  49. #define arch_cmpxchg64_acquire(...) \
  50. __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
  51. #endif
  52. #ifndef arch_cmpxchg64_release
  53. #define arch_cmpxchg64_release(...) \
  54. __atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
  55. #endif
  56. #ifndef arch_cmpxchg64
  57. #define arch_cmpxchg64(...) \
  58. __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
  59. #endif
  60. #endif /* arch_cmpxchg64_relaxed */
  61. #ifndef arch_try_cmpxchg_relaxed
  62. #ifdef arch_try_cmpxchg
  63. #define arch_try_cmpxchg_acquire arch_try_cmpxchg
  64. #define arch_try_cmpxchg_release arch_try_cmpxchg
  65. #define arch_try_cmpxchg_relaxed arch_try_cmpxchg
  66. #endif /* arch_try_cmpxchg */
  67. #ifndef arch_try_cmpxchg
  68. #define arch_try_cmpxchg(_ptr, _oldp, _new) \
  69. ({ \
  70. typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
  71. ___r = arch_cmpxchg((_ptr), ___o, (_new)); \
  72. if (unlikely(___r != ___o)) \
  73. *___op = ___r; \
  74. likely(___r == ___o); \
  75. })
  76. #endif /* arch_try_cmpxchg */
  77. #ifndef arch_try_cmpxchg_acquire
  78. #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
  79. ({ \
  80. typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
  81. ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
  82. if (unlikely(___r != ___o)) \
  83. *___op = ___r; \
  84. likely(___r == ___o); \
  85. })
  86. #endif /* arch_try_cmpxchg_acquire */
  87. #ifndef arch_try_cmpxchg_release
  88. #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
  89. ({ \
  90. typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
  91. ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
  92. if (unlikely(___r != ___o)) \
  93. *___op = ___r; \
  94. likely(___r == ___o); \
  95. })
  96. #endif /* arch_try_cmpxchg_release */
  97. #ifndef arch_try_cmpxchg_relaxed
  98. #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
  99. ({ \
  100. typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
  101. ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
  102. if (unlikely(___r != ___o)) \
  103. *___op = ___r; \
  104. likely(___r == ___o); \
  105. })
  106. #endif /* arch_try_cmpxchg_relaxed */
  107. #else /* arch_try_cmpxchg_relaxed */
  108. #ifndef arch_try_cmpxchg_acquire
  109. #define arch_try_cmpxchg_acquire(...) \
  110. __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
  111. #endif
  112. #ifndef arch_try_cmpxchg_release
  113. #define arch_try_cmpxchg_release(...) \
  114. __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
  115. #endif
  116. #ifndef arch_try_cmpxchg
  117. #define arch_try_cmpxchg(...) \
  118. __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
  119. #endif
  120. #endif /* arch_try_cmpxchg_relaxed */
  121. #ifndef arch_try_cmpxchg64_relaxed
  122. #ifdef arch_try_cmpxchg64
  123. #define arch_try_cmpxchg64_acquire arch_try_cmpxchg64
  124. #define arch_try_cmpxchg64_release arch_try_cmpxchg64
  125. #define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64
  126. #endif /* arch_try_cmpxchg64 */
  127. #ifndef arch_try_cmpxchg64
  128. #define arch_try_cmpxchg64(_ptr, _oldp, _new) \
  129. ({ \
  130. typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
  131. ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \
  132. if (unlikely(___r != ___o)) \
  133. *___op = ___r; \
  134. likely(___r == ___o); \
  135. })
  136. #endif /* arch_try_cmpxchg64 */
  137. #ifndef arch_try_cmpxchg64_acquire
  138. #define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \
  139. ({ \
  140. typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
  141. ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \
  142. if (unlikely(___r != ___o)) \
  143. *___op = ___r; \
  144. likely(___r == ___o); \
  145. })
  146. #endif /* arch_try_cmpxchg64_acquire */
  147. #ifndef arch_try_cmpxchg64_release
  148. #define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \
  149. ({ \
  150. typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
  151. ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \
  152. if (unlikely(___r != ___o)) \
  153. *___op = ___r; \
  154. likely(___r == ___o); \
  155. })
  156. #endif /* arch_try_cmpxchg64_release */
  157. #ifndef arch_try_cmpxchg64_relaxed
  158. #define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \
  159. ({ \
  160. typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
  161. ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \
  162. if (unlikely(___r != ___o)) \
  163. *___op = ___r; \
  164. likely(___r == ___o); \
  165. })
  166. #endif /* arch_try_cmpxchg64_relaxed */
  167. #else /* arch_try_cmpxchg64_relaxed */
  168. #ifndef arch_try_cmpxchg64_acquire
  169. #define arch_try_cmpxchg64_acquire(...) \
  170. __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__)
  171. #endif
  172. #ifndef arch_try_cmpxchg64_release
  173. #define arch_try_cmpxchg64_release(...) \
  174. __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__)
  175. #endif
  176. #ifndef arch_try_cmpxchg64
  177. #define arch_try_cmpxchg64(...) \
  178. __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__)
  179. #endif
  180. #endif /* arch_try_cmpxchg64_relaxed */
  181. #ifndef arch_atomic_read_acquire
  182. static __always_inline int
  183. arch_atomic_read_acquire(const atomic_t *v)
  184. {
  185. int ret;
  186. if (__native_word(atomic_t)) {
  187. ret = smp_load_acquire(&(v)->counter);
  188. } else {
  189. ret = arch_atomic_read(v);
  190. __atomic_acquire_fence();
  191. }
  192. return ret;
  193. }
  194. #define arch_atomic_read_acquire arch_atomic_read_acquire
  195. #endif
  196. #ifndef arch_atomic_set_release
  197. static __always_inline void
  198. arch_atomic_set_release(atomic_t *v, int i)
  199. {
  200. if (__native_word(atomic_t)) {
  201. smp_store_release(&(v)->counter, i);
  202. } else {
  203. __atomic_release_fence();
  204. arch_atomic_set(v, i);
  205. }
  206. }
  207. #define arch_atomic_set_release arch_atomic_set_release
  208. #endif
  209. #ifndef arch_atomic_add_return_relaxed
  210. #define arch_atomic_add_return_acquire arch_atomic_add_return
  211. #define arch_atomic_add_return_release arch_atomic_add_return
  212. #define arch_atomic_add_return_relaxed arch_atomic_add_return
  213. #else /* arch_atomic_add_return_relaxed */
  214. #ifndef arch_atomic_add_return_acquire
  215. static __always_inline int
  216. arch_atomic_add_return_acquire(int i, atomic_t *v)
  217. {
  218. int ret = arch_atomic_add_return_relaxed(i, v);
  219. __atomic_acquire_fence();
  220. return ret;
  221. }
  222. #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
  223. #endif
  224. #ifndef arch_atomic_add_return_release
  225. static __always_inline int
  226. arch_atomic_add_return_release(int i, atomic_t *v)
  227. {
  228. __atomic_release_fence();
  229. return arch_atomic_add_return_relaxed(i, v);
  230. }
  231. #define arch_atomic_add_return_release arch_atomic_add_return_release
  232. #endif
  233. #ifndef arch_atomic_add_return
  234. static __always_inline int
  235. arch_atomic_add_return(int i, atomic_t *v)
  236. {
  237. int ret;
  238. __atomic_pre_full_fence();
  239. ret = arch_atomic_add_return_relaxed(i, v);
  240. __atomic_post_full_fence();
  241. return ret;
  242. }
  243. #define arch_atomic_add_return arch_atomic_add_return
  244. #endif
  245. #endif /* arch_atomic_add_return_relaxed */
  246. #ifndef arch_atomic_fetch_add_relaxed
  247. #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
  248. #define arch_atomic_fetch_add_release arch_atomic_fetch_add
  249. #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
  250. #else /* arch_atomic_fetch_add_relaxed */
  251. #ifndef arch_atomic_fetch_add_acquire
  252. static __always_inline int
  253. arch_atomic_fetch_add_acquire(int i, atomic_t *v)
  254. {
  255. int ret = arch_atomic_fetch_add_relaxed(i, v);
  256. __atomic_acquire_fence();
  257. return ret;
  258. }
  259. #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
  260. #endif
  261. #ifndef arch_atomic_fetch_add_release
  262. static __always_inline int
  263. arch_atomic_fetch_add_release(int i, atomic_t *v)
  264. {
  265. __atomic_release_fence();
  266. return arch_atomic_fetch_add_relaxed(i, v);
  267. }
  268. #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
  269. #endif
  270. #ifndef arch_atomic_fetch_add
  271. static __always_inline int
  272. arch_atomic_fetch_add(int i, atomic_t *v)
  273. {
  274. int ret;
  275. __atomic_pre_full_fence();
  276. ret = arch_atomic_fetch_add_relaxed(i, v);
  277. __atomic_post_full_fence();
  278. return ret;
  279. }
  280. #define arch_atomic_fetch_add arch_atomic_fetch_add
  281. #endif
  282. #endif /* arch_atomic_fetch_add_relaxed */
  283. #ifndef arch_atomic_sub_return_relaxed
  284. #define arch_atomic_sub_return_acquire arch_atomic_sub_return
  285. #define arch_atomic_sub_return_release arch_atomic_sub_return
  286. #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
  287. #else /* arch_atomic_sub_return_relaxed */
  288. #ifndef arch_atomic_sub_return_acquire
  289. static __always_inline int
  290. arch_atomic_sub_return_acquire(int i, atomic_t *v)
  291. {
  292. int ret = arch_atomic_sub_return_relaxed(i, v);
  293. __atomic_acquire_fence();
  294. return ret;
  295. }
  296. #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
  297. #endif
  298. #ifndef arch_atomic_sub_return_release
  299. static __always_inline int
  300. arch_atomic_sub_return_release(int i, atomic_t *v)
  301. {
  302. __atomic_release_fence();
  303. return arch_atomic_sub_return_relaxed(i, v);
  304. }
  305. #define arch_atomic_sub_return_release arch_atomic_sub_return_release
  306. #endif
  307. #ifndef arch_atomic_sub_return
  308. static __always_inline int
  309. arch_atomic_sub_return(int i, atomic_t *v)
  310. {
  311. int ret;
  312. __atomic_pre_full_fence();
  313. ret = arch_atomic_sub_return_relaxed(i, v);
  314. __atomic_post_full_fence();
  315. return ret;
  316. }
  317. #define arch_atomic_sub_return arch_atomic_sub_return
  318. #endif
  319. #endif /* arch_atomic_sub_return_relaxed */
  320. #ifndef arch_atomic_fetch_sub_relaxed
  321. #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
  322. #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
  323. #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
  324. #else /* arch_atomic_fetch_sub_relaxed */
  325. #ifndef arch_atomic_fetch_sub_acquire
  326. static __always_inline int
  327. arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
  328. {
  329. int ret = arch_atomic_fetch_sub_relaxed(i, v);
  330. __atomic_acquire_fence();
  331. return ret;
  332. }
  333. #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
  334. #endif
  335. #ifndef arch_atomic_fetch_sub_release
  336. static __always_inline int
  337. arch_atomic_fetch_sub_release(int i, atomic_t *v)
  338. {
  339. __atomic_release_fence();
  340. return arch_atomic_fetch_sub_relaxed(i, v);
  341. }
  342. #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
  343. #endif
  344. #ifndef arch_atomic_fetch_sub
  345. static __always_inline int
  346. arch_atomic_fetch_sub(int i, atomic_t *v)
  347. {
  348. int ret;
  349. __atomic_pre_full_fence();
  350. ret = arch_atomic_fetch_sub_relaxed(i, v);
  351. __atomic_post_full_fence();
  352. return ret;
  353. }
  354. #define arch_atomic_fetch_sub arch_atomic_fetch_sub
  355. #endif
  356. #endif /* arch_atomic_fetch_sub_relaxed */
  357. #ifndef arch_atomic_inc
  358. static __always_inline void
  359. arch_atomic_inc(atomic_t *v)
  360. {
  361. arch_atomic_add(1, v);
  362. }
  363. #define arch_atomic_inc arch_atomic_inc
  364. #endif
  365. #ifndef arch_atomic_inc_return_relaxed
  366. #ifdef arch_atomic_inc_return
  367. #define arch_atomic_inc_return_acquire arch_atomic_inc_return
  368. #define arch_atomic_inc_return_release arch_atomic_inc_return
  369. #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
  370. #endif /* arch_atomic_inc_return */
  371. #ifndef arch_atomic_inc_return
  372. static __always_inline int
  373. arch_atomic_inc_return(atomic_t *v)
  374. {
  375. return arch_atomic_add_return(1, v);
  376. }
  377. #define arch_atomic_inc_return arch_atomic_inc_return
  378. #endif
  379. #ifndef arch_atomic_inc_return_acquire
  380. static __always_inline int
  381. arch_atomic_inc_return_acquire(atomic_t *v)
  382. {
  383. return arch_atomic_add_return_acquire(1, v);
  384. }
  385. #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
  386. #endif
  387. #ifndef arch_atomic_inc_return_release
  388. static __always_inline int
  389. arch_atomic_inc_return_release(atomic_t *v)
  390. {
  391. return arch_atomic_add_return_release(1, v);
  392. }
  393. #define arch_atomic_inc_return_release arch_atomic_inc_return_release
  394. #endif
  395. #ifndef arch_atomic_inc_return_relaxed
  396. static __always_inline int
  397. arch_atomic_inc_return_relaxed(atomic_t *v)
  398. {
  399. return arch_atomic_add_return_relaxed(1, v);
  400. }
  401. #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
  402. #endif
  403. #else /* arch_atomic_inc_return_relaxed */
  404. #ifndef arch_atomic_inc_return_acquire
  405. static __always_inline int
  406. arch_atomic_inc_return_acquire(atomic_t *v)
  407. {
  408. int ret = arch_atomic_inc_return_relaxed(v);
  409. __atomic_acquire_fence();
  410. return ret;
  411. }
  412. #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
  413. #endif
  414. #ifndef arch_atomic_inc_return_release
  415. static __always_inline int
  416. arch_atomic_inc_return_release(atomic_t *v)
  417. {
  418. __atomic_release_fence();
  419. return arch_atomic_inc_return_relaxed(v);
  420. }
  421. #define arch_atomic_inc_return_release arch_atomic_inc_return_release
  422. #endif
  423. #ifndef arch_atomic_inc_return
  424. static __always_inline int
  425. arch_atomic_inc_return(atomic_t *v)
  426. {
  427. int ret;
  428. __atomic_pre_full_fence();
  429. ret = arch_atomic_inc_return_relaxed(v);
  430. __atomic_post_full_fence();
  431. return ret;
  432. }
  433. #define arch_atomic_inc_return arch_atomic_inc_return
  434. #endif
  435. #endif /* arch_atomic_inc_return_relaxed */
  436. #ifndef arch_atomic_fetch_inc_relaxed
  437. #ifdef arch_atomic_fetch_inc
  438. #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
  439. #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
  440. #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
  441. #endif /* arch_atomic_fetch_inc */
  442. #ifndef arch_atomic_fetch_inc
  443. static __always_inline int
  444. arch_atomic_fetch_inc(atomic_t *v)
  445. {
  446. return arch_atomic_fetch_add(1, v);
  447. }
  448. #define arch_atomic_fetch_inc arch_atomic_fetch_inc
  449. #endif
  450. #ifndef arch_atomic_fetch_inc_acquire
  451. static __always_inline int
  452. arch_atomic_fetch_inc_acquire(atomic_t *v)
  453. {
  454. return arch_atomic_fetch_add_acquire(1, v);
  455. }
  456. #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
  457. #endif
  458. #ifndef arch_atomic_fetch_inc_release
  459. static __always_inline int
  460. arch_atomic_fetch_inc_release(atomic_t *v)
  461. {
  462. return arch_atomic_fetch_add_release(1, v);
  463. }
  464. #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
  465. #endif
  466. #ifndef arch_atomic_fetch_inc_relaxed
  467. static __always_inline int
  468. arch_atomic_fetch_inc_relaxed(atomic_t *v)
  469. {
  470. return arch_atomic_fetch_add_relaxed(1, v);
  471. }
  472. #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
  473. #endif
  474. #else /* arch_atomic_fetch_inc_relaxed */
  475. #ifndef arch_atomic_fetch_inc_acquire
  476. static __always_inline int
  477. arch_atomic_fetch_inc_acquire(atomic_t *v)
  478. {
  479. int ret = arch_atomic_fetch_inc_relaxed(v);
  480. __atomic_acquire_fence();
  481. return ret;
  482. }
  483. #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
  484. #endif
  485. #ifndef arch_atomic_fetch_inc_release
  486. static __always_inline int
  487. arch_atomic_fetch_inc_release(atomic_t *v)
  488. {
  489. __atomic_release_fence();
  490. return arch_atomic_fetch_inc_relaxed(v);
  491. }
  492. #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
  493. #endif
  494. #ifndef arch_atomic_fetch_inc
  495. static __always_inline int
  496. arch_atomic_fetch_inc(atomic_t *v)
  497. {
  498. int ret;
  499. __atomic_pre_full_fence();
  500. ret = arch_atomic_fetch_inc_relaxed(v);
  501. __atomic_post_full_fence();
  502. return ret;
  503. }
  504. #define arch_atomic_fetch_inc arch_atomic_fetch_inc
  505. #endif
  506. #endif /* arch_atomic_fetch_inc_relaxed */
  507. #ifndef arch_atomic_dec
  508. static __always_inline void
  509. arch_atomic_dec(atomic_t *v)
  510. {
  511. arch_atomic_sub(1, v);
  512. }
  513. #define arch_atomic_dec arch_atomic_dec
  514. #endif
  515. #ifndef arch_atomic_dec_return_relaxed
  516. #ifdef arch_atomic_dec_return
  517. #define arch_atomic_dec_return_acquire arch_atomic_dec_return
  518. #define arch_atomic_dec_return_release arch_atomic_dec_return
  519. #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
  520. #endif /* arch_atomic_dec_return */
  521. #ifndef arch_atomic_dec_return
  522. static __always_inline int
  523. arch_atomic_dec_return(atomic_t *v)
  524. {
  525. return arch_atomic_sub_return(1, v);
  526. }
  527. #define arch_atomic_dec_return arch_atomic_dec_return
  528. #endif
  529. #ifndef arch_atomic_dec_return_acquire
  530. static __always_inline int
  531. arch_atomic_dec_return_acquire(atomic_t *v)
  532. {
  533. return arch_atomic_sub_return_acquire(1, v);
  534. }
  535. #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
  536. #endif
  537. #ifndef arch_atomic_dec_return_release
  538. static __always_inline int
  539. arch_atomic_dec_return_release(atomic_t *v)
  540. {
  541. return arch_atomic_sub_return_release(1, v);
  542. }
  543. #define arch_atomic_dec_return_release arch_atomic_dec_return_release
  544. #endif
  545. #ifndef arch_atomic_dec_return_relaxed
  546. static __always_inline int
  547. arch_atomic_dec_return_relaxed(atomic_t *v)
  548. {
  549. return arch_atomic_sub_return_relaxed(1, v);
  550. }
  551. #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
  552. #endif
  553. #else /* arch_atomic_dec_return_relaxed */
  554. #ifndef arch_atomic_dec_return_acquire
  555. static __always_inline int
  556. arch_atomic_dec_return_acquire(atomic_t *v)
  557. {
  558. int ret = arch_atomic_dec_return_relaxed(v);
  559. __atomic_acquire_fence();
  560. return ret;
  561. }
  562. #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
  563. #endif
  564. #ifndef arch_atomic_dec_return_release
  565. static __always_inline int
  566. arch_atomic_dec_return_release(atomic_t *v)
  567. {
  568. __atomic_release_fence();
  569. return arch_atomic_dec_return_relaxed(v);
  570. }
  571. #define arch_atomic_dec_return_release arch_atomic_dec_return_release
  572. #endif
  573. #ifndef arch_atomic_dec_return
  574. static __always_inline int
  575. arch_atomic_dec_return(atomic_t *v)
  576. {
  577. int ret;
  578. __atomic_pre_full_fence();
  579. ret = arch_atomic_dec_return_relaxed(v);
  580. __atomic_post_full_fence();
  581. return ret;
  582. }
  583. #define arch_atomic_dec_return arch_atomic_dec_return
  584. #endif
  585. #endif /* arch_atomic_dec_return_relaxed */
  586. #ifndef arch_atomic_fetch_dec_relaxed
  587. #ifdef arch_atomic_fetch_dec
  588. #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
  589. #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
  590. #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
  591. #endif /* arch_atomic_fetch_dec */
  592. #ifndef arch_atomic_fetch_dec
  593. static __always_inline int
  594. arch_atomic_fetch_dec(atomic_t *v)
  595. {
  596. return arch_atomic_fetch_sub(1, v);
  597. }
  598. #define arch_atomic_fetch_dec arch_atomic_fetch_dec
  599. #endif
  600. #ifndef arch_atomic_fetch_dec_acquire
  601. static __always_inline int
  602. arch_atomic_fetch_dec_acquire(atomic_t *v)
  603. {
  604. return arch_atomic_fetch_sub_acquire(1, v);
  605. }
  606. #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
  607. #endif
  608. #ifndef arch_atomic_fetch_dec_release
  609. static __always_inline int
  610. arch_atomic_fetch_dec_release(atomic_t *v)
  611. {
  612. return arch_atomic_fetch_sub_release(1, v);
  613. }
  614. #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
  615. #endif
  616. #ifndef arch_atomic_fetch_dec_relaxed
  617. static __always_inline int
  618. arch_atomic_fetch_dec_relaxed(atomic_t *v)
  619. {
  620. return arch_atomic_fetch_sub_relaxed(1, v);
  621. }
  622. #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
  623. #endif
  624. #else /* arch_atomic_fetch_dec_relaxed */
  625. #ifndef arch_atomic_fetch_dec_acquire
  626. static __always_inline int
  627. arch_atomic_fetch_dec_acquire(atomic_t *v)
  628. {
  629. int ret = arch_atomic_fetch_dec_relaxed(v);
  630. __atomic_acquire_fence();
  631. return ret;
  632. }
  633. #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
  634. #endif
  635. #ifndef arch_atomic_fetch_dec_release
  636. static __always_inline int
  637. arch_atomic_fetch_dec_release(atomic_t *v)
  638. {
  639. __atomic_release_fence();
  640. return arch_atomic_fetch_dec_relaxed(v);
  641. }
  642. #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
  643. #endif
  644. #ifndef arch_atomic_fetch_dec
  645. static __always_inline int
  646. arch_atomic_fetch_dec(atomic_t *v)
  647. {
  648. int ret;
  649. __atomic_pre_full_fence();
  650. ret = arch_atomic_fetch_dec_relaxed(v);
  651. __atomic_post_full_fence();
  652. return ret;
  653. }
  654. #define arch_atomic_fetch_dec arch_atomic_fetch_dec
  655. #endif
  656. #endif /* arch_atomic_fetch_dec_relaxed */
  657. #ifndef arch_atomic_fetch_and_relaxed
  658. #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
  659. #define arch_atomic_fetch_and_release arch_atomic_fetch_and
  660. #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
  661. #else /* arch_atomic_fetch_and_relaxed */
  662. #ifndef arch_atomic_fetch_and_acquire
  663. static __always_inline int
  664. arch_atomic_fetch_and_acquire(int i, atomic_t *v)
  665. {
  666. int ret = arch_atomic_fetch_and_relaxed(i, v);
  667. __atomic_acquire_fence();
  668. return ret;
  669. }
  670. #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
  671. #endif
  672. #ifndef arch_atomic_fetch_and_release
  673. static __always_inline int
  674. arch_atomic_fetch_and_release(int i, atomic_t *v)
  675. {
  676. __atomic_release_fence();
  677. return arch_atomic_fetch_and_relaxed(i, v);
  678. }
  679. #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
  680. #endif
  681. #ifndef arch_atomic_fetch_and
  682. static __always_inline int
  683. arch_atomic_fetch_and(int i, atomic_t *v)
  684. {
  685. int ret;
  686. __atomic_pre_full_fence();
  687. ret = arch_atomic_fetch_and_relaxed(i, v);
  688. __atomic_post_full_fence();
  689. return ret;
  690. }
  691. #define arch_atomic_fetch_and arch_atomic_fetch_and
  692. #endif
  693. #endif /* arch_atomic_fetch_and_relaxed */
  694. #ifndef arch_atomic_andnot
  695. static __always_inline void
  696. arch_atomic_andnot(int i, atomic_t *v)
  697. {
  698. arch_atomic_and(~i, v);
  699. }
  700. #define arch_atomic_andnot arch_atomic_andnot
  701. #endif
  702. #ifndef arch_atomic_fetch_andnot_relaxed
  703. #ifdef arch_atomic_fetch_andnot
  704. #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
  705. #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
  706. #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
  707. #endif /* arch_atomic_fetch_andnot */
  708. #ifndef arch_atomic_fetch_andnot
  709. static __always_inline int
  710. arch_atomic_fetch_andnot(int i, atomic_t *v)
  711. {
  712. return arch_atomic_fetch_and(~i, v);
  713. }
  714. #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
  715. #endif
  716. #ifndef arch_atomic_fetch_andnot_acquire
  717. static __always_inline int
  718. arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
  719. {
  720. return arch_atomic_fetch_and_acquire(~i, v);
  721. }
  722. #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
  723. #endif
  724. #ifndef arch_atomic_fetch_andnot_release
  725. static __always_inline int
  726. arch_atomic_fetch_andnot_release(int i, atomic_t *v)
  727. {
  728. return arch_atomic_fetch_and_release(~i, v);
  729. }
  730. #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
  731. #endif
  732. #ifndef arch_atomic_fetch_andnot_relaxed
  733. static __always_inline int
  734. arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
  735. {
  736. return arch_atomic_fetch_and_relaxed(~i, v);
  737. }
  738. #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
  739. #endif
  740. #else /* arch_atomic_fetch_andnot_relaxed */
  741. #ifndef arch_atomic_fetch_andnot_acquire
  742. static __always_inline int
  743. arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
  744. {
  745. int ret = arch_atomic_fetch_andnot_relaxed(i, v);
  746. __atomic_acquire_fence();
  747. return ret;
  748. }
  749. #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
  750. #endif
  751. #ifndef arch_atomic_fetch_andnot_release
  752. static __always_inline int
  753. arch_atomic_fetch_andnot_release(int i, atomic_t *v)
  754. {
  755. __atomic_release_fence();
  756. return arch_atomic_fetch_andnot_relaxed(i, v);
  757. }
  758. #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
  759. #endif
  760. #ifndef arch_atomic_fetch_andnot
  761. static __always_inline int
  762. arch_atomic_fetch_andnot(int i, atomic_t *v)
  763. {
  764. int ret;
  765. __atomic_pre_full_fence();
  766. ret = arch_atomic_fetch_andnot_relaxed(i, v);
  767. __atomic_post_full_fence();
  768. return ret;
  769. }
  770. #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
  771. #endif
  772. #endif /* arch_atomic_fetch_andnot_relaxed */
  773. #ifndef arch_atomic_fetch_or_relaxed
  774. #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
  775. #define arch_atomic_fetch_or_release arch_atomic_fetch_or
  776. #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
  777. #else /* arch_atomic_fetch_or_relaxed */
  778. #ifndef arch_atomic_fetch_or_acquire
  779. static __always_inline int
  780. arch_atomic_fetch_or_acquire(int i, atomic_t *v)
  781. {
  782. int ret = arch_atomic_fetch_or_relaxed(i, v);
  783. __atomic_acquire_fence();
  784. return ret;
  785. }
  786. #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
  787. #endif
  788. #ifndef arch_atomic_fetch_or_release
  789. static __always_inline int
  790. arch_atomic_fetch_or_release(int i, atomic_t *v)
  791. {
  792. __atomic_release_fence();
  793. return arch_atomic_fetch_or_relaxed(i, v);
  794. }
  795. #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
  796. #endif
  797. #ifndef arch_atomic_fetch_or
  798. static __always_inline int
  799. arch_atomic_fetch_or(int i, atomic_t *v)
  800. {
  801. int ret;
  802. __atomic_pre_full_fence();
  803. ret = arch_atomic_fetch_or_relaxed(i, v);
  804. __atomic_post_full_fence();
  805. return ret;
  806. }
  807. #define arch_atomic_fetch_or arch_atomic_fetch_or
  808. #endif
  809. #endif /* arch_atomic_fetch_or_relaxed */
  810. #ifndef arch_atomic_fetch_xor_relaxed
  811. #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
  812. #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
  813. #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
  814. #else /* arch_atomic_fetch_xor_relaxed */
  815. #ifndef arch_atomic_fetch_xor_acquire
  816. static __always_inline int
  817. arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
  818. {
  819. int ret = arch_atomic_fetch_xor_relaxed(i, v);
  820. __atomic_acquire_fence();
  821. return ret;
  822. }
  823. #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
  824. #endif
  825. #ifndef arch_atomic_fetch_xor_release
  826. static __always_inline int
  827. arch_atomic_fetch_xor_release(int i, atomic_t *v)
  828. {
  829. __atomic_release_fence();
  830. return arch_atomic_fetch_xor_relaxed(i, v);
  831. }
  832. #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
  833. #endif
  834. #ifndef arch_atomic_fetch_xor
  835. static __always_inline int
  836. arch_atomic_fetch_xor(int i, atomic_t *v)
  837. {
  838. int ret;
  839. __atomic_pre_full_fence();
  840. ret = arch_atomic_fetch_xor_relaxed(i, v);
  841. __atomic_post_full_fence();
  842. return ret;
  843. }
  844. #define arch_atomic_fetch_xor arch_atomic_fetch_xor
  845. #endif
  846. #endif /* arch_atomic_fetch_xor_relaxed */
  847. #ifndef arch_atomic_xchg_relaxed
  848. #define arch_atomic_xchg_acquire arch_atomic_xchg
  849. #define arch_atomic_xchg_release arch_atomic_xchg
  850. #define arch_atomic_xchg_relaxed arch_atomic_xchg
  851. #else /* arch_atomic_xchg_relaxed */
  852. #ifndef arch_atomic_xchg_acquire
  853. static __always_inline int
  854. arch_atomic_xchg_acquire(atomic_t *v, int i)
  855. {
  856. int ret = arch_atomic_xchg_relaxed(v, i);
  857. __atomic_acquire_fence();
  858. return ret;
  859. }
  860. #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
  861. #endif
  862. #ifndef arch_atomic_xchg_release
  863. static __always_inline int
  864. arch_atomic_xchg_release(atomic_t *v, int i)
  865. {
  866. __atomic_release_fence();
  867. return arch_atomic_xchg_relaxed(v, i);
  868. }
  869. #define arch_atomic_xchg_release arch_atomic_xchg_release
  870. #endif
  871. #ifndef arch_atomic_xchg
  872. static __always_inline int
  873. arch_atomic_xchg(atomic_t *v, int i)
  874. {
  875. int ret;
  876. __atomic_pre_full_fence();
  877. ret = arch_atomic_xchg_relaxed(v, i);
  878. __atomic_post_full_fence();
  879. return ret;
  880. }
  881. #define arch_atomic_xchg arch_atomic_xchg
  882. #endif
  883. #endif /* arch_atomic_xchg_relaxed */
  884. #ifndef arch_atomic_cmpxchg_relaxed
  885. #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
  886. #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
  887. #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
  888. #else /* arch_atomic_cmpxchg_relaxed */
  889. #ifndef arch_atomic_cmpxchg_acquire
  890. static __always_inline int
  891. arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
  892. {
  893. int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
  894. __atomic_acquire_fence();
  895. return ret;
  896. }
  897. #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
  898. #endif
  899. #ifndef arch_atomic_cmpxchg_release
  900. static __always_inline int
  901. arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
  902. {
  903. __atomic_release_fence();
  904. return arch_atomic_cmpxchg_relaxed(v, old, new);
  905. }
  906. #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
  907. #endif
  908. #ifndef arch_atomic_cmpxchg
  909. static __always_inline int
  910. arch_atomic_cmpxchg(atomic_t *v, int old, int new)
  911. {
  912. int ret;
  913. __atomic_pre_full_fence();
  914. ret = arch_atomic_cmpxchg_relaxed(v, old, new);
  915. __atomic_post_full_fence();
  916. return ret;
  917. }
  918. #define arch_atomic_cmpxchg arch_atomic_cmpxchg
  919. #endif
  920. #endif /* arch_atomic_cmpxchg_relaxed */
  921. #ifndef arch_atomic_try_cmpxchg_relaxed
  922. #ifdef arch_atomic_try_cmpxchg
  923. #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
  924. #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
  925. #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
  926. #endif /* arch_atomic_try_cmpxchg */
  927. #ifndef arch_atomic_try_cmpxchg
  928. static __always_inline bool
  929. arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
  930. {
  931. int r, o = *old;
  932. r = arch_atomic_cmpxchg(v, o, new);
  933. if (unlikely(r != o))
  934. *old = r;
  935. return likely(r == o);
  936. }
  937. #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
  938. #endif
  939. #ifndef arch_atomic_try_cmpxchg_acquire
  940. static __always_inline bool
  941. arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
  942. {
  943. int r, o = *old;
  944. r = arch_atomic_cmpxchg_acquire(v, o, new);
  945. if (unlikely(r != o))
  946. *old = r;
  947. return likely(r == o);
  948. }
  949. #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
  950. #endif
  951. #ifndef arch_atomic_try_cmpxchg_release
  952. static __always_inline bool
  953. arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
  954. {
  955. int r, o = *old;
  956. r = arch_atomic_cmpxchg_release(v, o, new);
  957. if (unlikely(r != o))
  958. *old = r;
  959. return likely(r == o);
  960. }
  961. #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
  962. #endif
  963. #ifndef arch_atomic_try_cmpxchg_relaxed
  964. static __always_inline bool
  965. arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
  966. {
  967. int r, o = *old;
  968. r = arch_atomic_cmpxchg_relaxed(v, o, new);
  969. if (unlikely(r != o))
  970. *old = r;
  971. return likely(r == o);
  972. }
  973. #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
  974. #endif
  975. #else /* arch_atomic_try_cmpxchg_relaxed */
  976. #ifndef arch_atomic_try_cmpxchg_acquire
  977. static __always_inline bool
  978. arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
  979. {
  980. bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
  981. __atomic_acquire_fence();
  982. return ret;
  983. }
  984. #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
  985. #endif
  986. #ifndef arch_atomic_try_cmpxchg_release
  987. static __always_inline bool
  988. arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
  989. {
  990. __atomic_release_fence();
  991. return arch_atomic_try_cmpxchg_relaxed(v, old, new);
  992. }
  993. #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
  994. #endif
  995. #ifndef arch_atomic_try_cmpxchg
  996. static __always_inline bool
  997. arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
  998. {
  999. bool ret;
  1000. __atomic_pre_full_fence();
  1001. ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
  1002. __atomic_post_full_fence();
  1003. return ret;
  1004. }
  1005. #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
  1006. #endif
  1007. #endif /* arch_atomic_try_cmpxchg_relaxed */
  1008. #ifndef arch_atomic_sub_and_test
  1009. /**
  1010. * arch_atomic_sub_and_test - subtract value from variable and test result
  1011. * @i: integer value to subtract
  1012. * @v: pointer of type atomic_t
  1013. *
  1014. * Atomically subtracts @i from @v and returns
  1015. * true if the result is zero, or false for all
  1016. * other cases.
  1017. */
  1018. static __always_inline bool
  1019. arch_atomic_sub_and_test(int i, atomic_t *v)
  1020. {
  1021. return arch_atomic_sub_return(i, v) == 0;
  1022. }
  1023. #define arch_atomic_sub_and_test arch_atomic_sub_and_test
  1024. #endif
  1025. #ifndef arch_atomic_dec_and_test
  1026. /**
  1027. * arch_atomic_dec_and_test - decrement and test
  1028. * @v: pointer of type atomic_t
  1029. *
  1030. * Atomically decrements @v by 1 and
  1031. * returns true if the result is 0, or false for all other
  1032. * cases.
  1033. */
  1034. static __always_inline bool
  1035. arch_atomic_dec_and_test(atomic_t *v)
  1036. {
  1037. return arch_atomic_dec_return(v) == 0;
  1038. }
  1039. #define arch_atomic_dec_and_test arch_atomic_dec_and_test
  1040. #endif
  1041. #ifndef arch_atomic_inc_and_test
  1042. /**
  1043. * arch_atomic_inc_and_test - increment and test
  1044. * @v: pointer of type atomic_t
  1045. *
  1046. * Atomically increments @v by 1
  1047. * and returns true if the result is zero, or false for all
  1048. * other cases.
  1049. */
  1050. static __always_inline bool
  1051. arch_atomic_inc_and_test(atomic_t *v)
  1052. {
  1053. return arch_atomic_inc_return(v) == 0;
  1054. }
  1055. #define arch_atomic_inc_and_test arch_atomic_inc_and_test
  1056. #endif
  1057. #ifndef arch_atomic_add_negative
  1058. /**
  1059. * arch_atomic_add_negative - add and test if negative
  1060. * @i: integer value to add
  1061. * @v: pointer of type atomic_t
  1062. *
  1063. * Atomically adds @i to @v and returns true
  1064. * if the result is negative, or false when
  1065. * result is greater than or equal to zero.
  1066. */
  1067. static __always_inline bool
  1068. arch_atomic_add_negative(int i, atomic_t *v)
  1069. {
  1070. return arch_atomic_add_return(i, v) < 0;
  1071. }
  1072. #define arch_atomic_add_negative arch_atomic_add_negative
  1073. #endif
  1074. #ifndef arch_atomic_fetch_add_unless
  1075. /**
  1076. * arch_atomic_fetch_add_unless - add unless the number is already a given value
  1077. * @v: pointer of type atomic_t
  1078. * @a: the amount to add to v...
  1079. * @u: ...unless v is equal to u.
  1080. *
  1081. * Atomically adds @a to @v, so long as @v was not already @u.
  1082. * Returns original value of @v
  1083. */
  1084. static __always_inline int
  1085. arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
  1086. {
  1087. int c = arch_atomic_read(v);
  1088. do {
  1089. if (unlikely(c == u))
  1090. break;
  1091. } while (!arch_atomic_try_cmpxchg(v, &c, c + a));
  1092. return c;
  1093. }
  1094. #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
  1095. #endif
  1096. #ifndef arch_atomic_add_unless
  1097. /**
  1098. * arch_atomic_add_unless - add unless the number is already a given value
  1099. * @v: pointer of type atomic_t
  1100. * @a: the amount to add to v...
  1101. * @u: ...unless v is equal to u.
  1102. *
  1103. * Atomically adds @a to @v, if @v was not already @u.
  1104. * Returns true if the addition was done.
  1105. */
  1106. static __always_inline bool
  1107. arch_atomic_add_unless(atomic_t *v, int a, int u)
  1108. {
  1109. return arch_atomic_fetch_add_unless(v, a, u) != u;
  1110. }
  1111. #define arch_atomic_add_unless arch_atomic_add_unless
  1112. #endif
  1113. #ifndef arch_atomic_inc_not_zero
  1114. /**
  1115. * arch_atomic_inc_not_zero - increment unless the number is zero
  1116. * @v: pointer of type atomic_t
  1117. *
  1118. * Atomically increments @v by 1, if @v is non-zero.
  1119. * Returns true if the increment was done.
  1120. */
  1121. static __always_inline bool
  1122. arch_atomic_inc_not_zero(atomic_t *v)
  1123. {
  1124. return arch_atomic_add_unless(v, 1, 0);
  1125. }
  1126. #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
  1127. #endif
  1128. #ifndef arch_atomic_inc_unless_negative
  1129. static __always_inline bool
  1130. arch_atomic_inc_unless_negative(atomic_t *v)
  1131. {
  1132. int c = arch_atomic_read(v);
  1133. do {
  1134. if (unlikely(c < 0))
  1135. return false;
  1136. } while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
  1137. return true;
  1138. }
  1139. #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
  1140. #endif
  1141. #ifndef arch_atomic_dec_unless_positive
  1142. static __always_inline bool
  1143. arch_atomic_dec_unless_positive(atomic_t *v)
  1144. {
  1145. int c = arch_atomic_read(v);
  1146. do {
  1147. if (unlikely(c > 0))
  1148. return false;
  1149. } while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
  1150. return true;
  1151. }
  1152. #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
  1153. #endif
  1154. #ifndef arch_atomic_dec_if_positive
  1155. static __always_inline int
  1156. arch_atomic_dec_if_positive(atomic_t *v)
  1157. {
  1158. int dec, c = arch_atomic_read(v);
  1159. do {
  1160. dec = c - 1;
  1161. if (unlikely(dec < 0))
  1162. break;
  1163. } while (!arch_atomic_try_cmpxchg(v, &c, dec));
  1164. return dec;
  1165. }
  1166. #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
  1167. #endif
  1168. #ifdef CONFIG_GENERIC_ATOMIC64
  1169. #include <asm-generic/atomic64.h>
  1170. #endif
  1171. #ifndef arch_atomic64_read_acquire
  1172. static __always_inline s64
  1173. arch_atomic64_read_acquire(const atomic64_t *v)
  1174. {
  1175. s64 ret;
  1176. if (__native_word(atomic64_t)) {
  1177. ret = smp_load_acquire(&(v)->counter);
  1178. } else {
  1179. ret = arch_atomic64_read(v);
  1180. __atomic_acquire_fence();
  1181. }
  1182. return ret;
  1183. }
  1184. #define arch_atomic64_read_acquire arch_atomic64_read_acquire
  1185. #endif
  1186. #ifndef arch_atomic64_set_release
  1187. static __always_inline void
  1188. arch_atomic64_set_release(atomic64_t *v, s64 i)
  1189. {
  1190. if (__native_word(atomic64_t)) {
  1191. smp_store_release(&(v)->counter, i);
  1192. } else {
  1193. __atomic_release_fence();
  1194. arch_atomic64_set(v, i);
  1195. }
  1196. }
  1197. #define arch_atomic64_set_release arch_atomic64_set_release
  1198. #endif
  1199. #ifndef arch_atomic64_add_return_relaxed
  1200. #define arch_atomic64_add_return_acquire arch_atomic64_add_return
  1201. #define arch_atomic64_add_return_release arch_atomic64_add_return
  1202. #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
  1203. #else /* arch_atomic64_add_return_relaxed */
  1204. #ifndef arch_atomic64_add_return_acquire
  1205. static __always_inline s64
  1206. arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
  1207. {
  1208. s64 ret = arch_atomic64_add_return_relaxed(i, v);
  1209. __atomic_acquire_fence();
  1210. return ret;
  1211. }
  1212. #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
  1213. #endif
  1214. #ifndef arch_atomic64_add_return_release
  1215. static __always_inline s64
  1216. arch_atomic64_add_return_release(s64 i, atomic64_t *v)
  1217. {
  1218. __atomic_release_fence();
  1219. return arch_atomic64_add_return_relaxed(i, v);
  1220. }
  1221. #define arch_atomic64_add_return_release arch_atomic64_add_return_release
  1222. #endif
  1223. #ifndef arch_atomic64_add_return
  1224. static __always_inline s64
  1225. arch_atomic64_add_return(s64 i, atomic64_t *v)
  1226. {
  1227. s64 ret;
  1228. __atomic_pre_full_fence();
  1229. ret = arch_atomic64_add_return_relaxed(i, v);
  1230. __atomic_post_full_fence();
  1231. return ret;
  1232. }
  1233. #define arch_atomic64_add_return arch_atomic64_add_return
  1234. #endif
  1235. #endif /* arch_atomic64_add_return_relaxed */
  1236. #ifndef arch_atomic64_fetch_add_relaxed
  1237. #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
  1238. #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
  1239. #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
  1240. #else /* arch_atomic64_fetch_add_relaxed */
  1241. #ifndef arch_atomic64_fetch_add_acquire
  1242. static __always_inline s64
  1243. arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
  1244. {
  1245. s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
  1246. __atomic_acquire_fence();
  1247. return ret;
  1248. }
  1249. #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
  1250. #endif
  1251. #ifndef arch_atomic64_fetch_add_release
  1252. static __always_inline s64
  1253. arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
  1254. {
  1255. __atomic_release_fence();
  1256. return arch_atomic64_fetch_add_relaxed(i, v);
  1257. }
  1258. #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
  1259. #endif
  1260. #ifndef arch_atomic64_fetch_add
  1261. static __always_inline s64
  1262. arch_atomic64_fetch_add(s64 i, atomic64_t *v)
  1263. {
  1264. s64 ret;
  1265. __atomic_pre_full_fence();
  1266. ret = arch_atomic64_fetch_add_relaxed(i, v);
  1267. __atomic_post_full_fence();
  1268. return ret;
  1269. }
  1270. #define arch_atomic64_fetch_add arch_atomic64_fetch_add
  1271. #endif
  1272. #endif /* arch_atomic64_fetch_add_relaxed */
  1273. #ifndef arch_atomic64_sub_return_relaxed
  1274. #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
  1275. #define arch_atomic64_sub_return_release arch_atomic64_sub_return
  1276. #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
  1277. #else /* arch_atomic64_sub_return_relaxed */
  1278. #ifndef arch_atomic64_sub_return_acquire
  1279. static __always_inline s64
  1280. arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
  1281. {
  1282. s64 ret = arch_atomic64_sub_return_relaxed(i, v);
  1283. __atomic_acquire_fence();
  1284. return ret;
  1285. }
  1286. #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
  1287. #endif
  1288. #ifndef arch_atomic64_sub_return_release
  1289. static __always_inline s64
  1290. arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
  1291. {
  1292. __atomic_release_fence();
  1293. return arch_atomic64_sub_return_relaxed(i, v);
  1294. }
  1295. #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
  1296. #endif
  1297. #ifndef arch_atomic64_sub_return
  1298. static __always_inline s64
  1299. arch_atomic64_sub_return(s64 i, atomic64_t *v)
  1300. {
  1301. s64 ret;
  1302. __atomic_pre_full_fence();
  1303. ret = arch_atomic64_sub_return_relaxed(i, v);
  1304. __atomic_post_full_fence();
  1305. return ret;
  1306. }
  1307. #define arch_atomic64_sub_return arch_atomic64_sub_return
  1308. #endif
  1309. #endif /* arch_atomic64_sub_return_relaxed */
  1310. #ifndef arch_atomic64_fetch_sub_relaxed
  1311. #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
  1312. #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
  1313. #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
  1314. #else /* arch_atomic64_fetch_sub_relaxed */
  1315. #ifndef arch_atomic64_fetch_sub_acquire
  1316. static __always_inline s64
  1317. arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
  1318. {
  1319. s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
  1320. __atomic_acquire_fence();
  1321. return ret;
  1322. }
  1323. #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
  1324. #endif
  1325. #ifndef arch_atomic64_fetch_sub_release
  1326. static __always_inline s64
  1327. arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
  1328. {
  1329. __atomic_release_fence();
  1330. return arch_atomic64_fetch_sub_relaxed(i, v);
  1331. }
  1332. #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
  1333. #endif
  1334. #ifndef arch_atomic64_fetch_sub
  1335. static __always_inline s64
  1336. arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
  1337. {
  1338. s64 ret;
  1339. __atomic_pre_full_fence();
  1340. ret = arch_atomic64_fetch_sub_relaxed(i, v);
  1341. __atomic_post_full_fence();
  1342. return ret;
  1343. }
  1344. #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
  1345. #endif
  1346. #endif /* arch_atomic64_fetch_sub_relaxed */
  1347. #ifndef arch_atomic64_inc
  1348. static __always_inline void
  1349. arch_atomic64_inc(atomic64_t *v)
  1350. {
  1351. arch_atomic64_add(1, v);
  1352. }
  1353. #define arch_atomic64_inc arch_atomic64_inc
  1354. #endif
  1355. #ifndef arch_atomic64_inc_return_relaxed
  1356. #ifdef arch_atomic64_inc_return
  1357. #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
  1358. #define arch_atomic64_inc_return_release arch_atomic64_inc_return
  1359. #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
  1360. #endif /* arch_atomic64_inc_return */
  1361. #ifndef arch_atomic64_inc_return
  1362. static __always_inline s64
  1363. arch_atomic64_inc_return(atomic64_t *v)
  1364. {
  1365. return arch_atomic64_add_return(1, v);
  1366. }
  1367. #define arch_atomic64_inc_return arch_atomic64_inc_return
  1368. #endif
  1369. #ifndef arch_atomic64_inc_return_acquire
  1370. static __always_inline s64
  1371. arch_atomic64_inc_return_acquire(atomic64_t *v)
  1372. {
  1373. return arch_atomic64_add_return_acquire(1, v);
  1374. }
  1375. #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
  1376. #endif
  1377. #ifndef arch_atomic64_inc_return_release
  1378. static __always_inline s64
  1379. arch_atomic64_inc_return_release(atomic64_t *v)
  1380. {
  1381. return arch_atomic64_add_return_release(1, v);
  1382. }
  1383. #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
  1384. #endif
  1385. #ifndef arch_atomic64_inc_return_relaxed
  1386. static __always_inline s64
  1387. arch_atomic64_inc_return_relaxed(atomic64_t *v)
  1388. {
  1389. return arch_atomic64_add_return_relaxed(1, v);
  1390. }
  1391. #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
  1392. #endif
  1393. #else /* arch_atomic64_inc_return_relaxed */
  1394. #ifndef arch_atomic64_inc_return_acquire
  1395. static __always_inline s64
  1396. arch_atomic64_inc_return_acquire(atomic64_t *v)
  1397. {
  1398. s64 ret = arch_atomic64_inc_return_relaxed(v);
  1399. __atomic_acquire_fence();
  1400. return ret;
  1401. }
  1402. #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
  1403. #endif
  1404. #ifndef arch_atomic64_inc_return_release
  1405. static __always_inline s64
  1406. arch_atomic64_inc_return_release(atomic64_t *v)
  1407. {
  1408. __atomic_release_fence();
  1409. return arch_atomic64_inc_return_relaxed(v);
  1410. }
  1411. #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
  1412. #endif
  1413. #ifndef arch_atomic64_inc_return
  1414. static __always_inline s64
  1415. arch_atomic64_inc_return(atomic64_t *v)
  1416. {
  1417. s64 ret;
  1418. __atomic_pre_full_fence();
  1419. ret = arch_atomic64_inc_return_relaxed(v);
  1420. __atomic_post_full_fence();
  1421. return ret;
  1422. }
  1423. #define arch_atomic64_inc_return arch_atomic64_inc_return
  1424. #endif
  1425. #endif /* arch_atomic64_inc_return_relaxed */
  1426. #ifndef arch_atomic64_fetch_inc_relaxed
  1427. #ifdef arch_atomic64_fetch_inc
  1428. #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
  1429. #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
  1430. #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
  1431. #endif /* arch_atomic64_fetch_inc */
  1432. #ifndef arch_atomic64_fetch_inc
  1433. static __always_inline s64
  1434. arch_atomic64_fetch_inc(atomic64_t *v)
  1435. {
  1436. return arch_atomic64_fetch_add(1, v);
  1437. }
  1438. #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
  1439. #endif
  1440. #ifndef arch_atomic64_fetch_inc_acquire
  1441. static __always_inline s64
  1442. arch_atomic64_fetch_inc_acquire(atomic64_t *v)
  1443. {
  1444. return arch_atomic64_fetch_add_acquire(1, v);
  1445. }
  1446. #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
  1447. #endif
  1448. #ifndef arch_atomic64_fetch_inc_release
  1449. static __always_inline s64
  1450. arch_atomic64_fetch_inc_release(atomic64_t *v)
  1451. {
  1452. return arch_atomic64_fetch_add_release(1, v);
  1453. }
  1454. #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
  1455. #endif
  1456. #ifndef arch_atomic64_fetch_inc_relaxed
  1457. static __always_inline s64
  1458. arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
  1459. {
  1460. return arch_atomic64_fetch_add_relaxed(1, v);
  1461. }
  1462. #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
  1463. #endif
  1464. #else /* arch_atomic64_fetch_inc_relaxed */
  1465. #ifndef arch_atomic64_fetch_inc_acquire
  1466. static __always_inline s64
  1467. arch_atomic64_fetch_inc_acquire(atomic64_t *v)
  1468. {
  1469. s64 ret = arch_atomic64_fetch_inc_relaxed(v);
  1470. __atomic_acquire_fence();
  1471. return ret;
  1472. }
  1473. #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
  1474. #endif
  1475. #ifndef arch_atomic64_fetch_inc_release
  1476. static __always_inline s64
  1477. arch_atomic64_fetch_inc_release(atomic64_t *v)
  1478. {
  1479. __atomic_release_fence();
  1480. return arch_atomic64_fetch_inc_relaxed(v);
  1481. }
  1482. #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
  1483. #endif
  1484. #ifndef arch_atomic64_fetch_inc
  1485. static __always_inline s64
  1486. arch_atomic64_fetch_inc(atomic64_t *v)
  1487. {
  1488. s64 ret;
  1489. __atomic_pre_full_fence();
  1490. ret = arch_atomic64_fetch_inc_relaxed(v);
  1491. __atomic_post_full_fence();
  1492. return ret;
  1493. }
  1494. #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
  1495. #endif
  1496. #endif /* arch_atomic64_fetch_inc_relaxed */
  1497. #ifndef arch_atomic64_dec
  1498. static __always_inline void
  1499. arch_atomic64_dec(atomic64_t *v)
  1500. {
  1501. arch_atomic64_sub(1, v);
  1502. }
  1503. #define arch_atomic64_dec arch_atomic64_dec
  1504. #endif
  1505. #ifndef arch_atomic64_dec_return_relaxed
  1506. #ifdef arch_atomic64_dec_return
  1507. #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
  1508. #define arch_atomic64_dec_return_release arch_atomic64_dec_return
  1509. #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
  1510. #endif /* arch_atomic64_dec_return */
  1511. #ifndef arch_atomic64_dec_return
  1512. static __always_inline s64
  1513. arch_atomic64_dec_return(atomic64_t *v)
  1514. {
  1515. return arch_atomic64_sub_return(1, v);
  1516. }
  1517. #define arch_atomic64_dec_return arch_atomic64_dec_return
  1518. #endif
  1519. #ifndef arch_atomic64_dec_return_acquire
  1520. static __always_inline s64
  1521. arch_atomic64_dec_return_acquire(atomic64_t *v)
  1522. {
  1523. return arch_atomic64_sub_return_acquire(1, v);
  1524. }
  1525. #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
  1526. #endif
  1527. #ifndef arch_atomic64_dec_return_release
  1528. static __always_inline s64
  1529. arch_atomic64_dec_return_release(atomic64_t *v)
  1530. {
  1531. return arch_atomic64_sub_return_release(1, v);
  1532. }
  1533. #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
  1534. #endif
  1535. #ifndef arch_atomic64_dec_return_relaxed
  1536. static __always_inline s64
  1537. arch_atomic64_dec_return_relaxed(atomic64_t *v)
  1538. {
  1539. return arch_atomic64_sub_return_relaxed(1, v);
  1540. }
  1541. #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
  1542. #endif
  1543. #else /* arch_atomic64_dec_return_relaxed */
  1544. #ifndef arch_atomic64_dec_return_acquire
  1545. static __always_inline s64
  1546. arch_atomic64_dec_return_acquire(atomic64_t *v)
  1547. {
  1548. s64 ret = arch_atomic64_dec_return_relaxed(v);
  1549. __atomic_acquire_fence();
  1550. return ret;
  1551. }
  1552. #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
  1553. #endif
  1554. #ifndef arch_atomic64_dec_return_release
  1555. static __always_inline s64
  1556. arch_atomic64_dec_return_release(atomic64_t *v)
  1557. {
  1558. __atomic_release_fence();
  1559. return arch_atomic64_dec_return_relaxed(v);
  1560. }
  1561. #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
  1562. #endif
  1563. #ifndef arch_atomic64_dec_return
  1564. static __always_inline s64
  1565. arch_atomic64_dec_return(atomic64_t *v)
  1566. {
  1567. s64 ret;
  1568. __atomic_pre_full_fence();
  1569. ret = arch_atomic64_dec_return_relaxed(v);
  1570. __atomic_post_full_fence();
  1571. return ret;
  1572. }
  1573. #define arch_atomic64_dec_return arch_atomic64_dec_return
  1574. #endif
  1575. #endif /* arch_atomic64_dec_return_relaxed */
  1576. #ifndef arch_atomic64_fetch_dec_relaxed
  1577. #ifdef arch_atomic64_fetch_dec
  1578. #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
  1579. #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
  1580. #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
  1581. #endif /* arch_atomic64_fetch_dec */
  1582. #ifndef arch_atomic64_fetch_dec
  1583. static __always_inline s64
  1584. arch_atomic64_fetch_dec(atomic64_t *v)
  1585. {
  1586. return arch_atomic64_fetch_sub(1, v);
  1587. }
  1588. #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
  1589. #endif
  1590. #ifndef arch_atomic64_fetch_dec_acquire
  1591. static __always_inline s64
  1592. arch_atomic64_fetch_dec_acquire(atomic64_t *v)
  1593. {
  1594. return arch_atomic64_fetch_sub_acquire(1, v);
  1595. }
  1596. #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
  1597. #endif
  1598. #ifndef arch_atomic64_fetch_dec_release
  1599. static __always_inline s64
  1600. arch_atomic64_fetch_dec_release(atomic64_t *v)
  1601. {
  1602. return arch_atomic64_fetch_sub_release(1, v);
  1603. }
  1604. #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
  1605. #endif
  1606. #ifndef arch_atomic64_fetch_dec_relaxed
  1607. static __always_inline s64
  1608. arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
  1609. {
  1610. return arch_atomic64_fetch_sub_relaxed(1, v);
  1611. }
  1612. #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
  1613. #endif
  1614. #else /* arch_atomic64_fetch_dec_relaxed */
  1615. #ifndef arch_atomic64_fetch_dec_acquire
  1616. static __always_inline s64
  1617. arch_atomic64_fetch_dec_acquire(atomic64_t *v)
  1618. {
  1619. s64 ret = arch_atomic64_fetch_dec_relaxed(v);
  1620. __atomic_acquire_fence();
  1621. return ret;
  1622. }
  1623. #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
  1624. #endif
  1625. #ifndef arch_atomic64_fetch_dec_release
  1626. static __always_inline s64
  1627. arch_atomic64_fetch_dec_release(atomic64_t *v)
  1628. {
  1629. __atomic_release_fence();
  1630. return arch_atomic64_fetch_dec_relaxed(v);
  1631. }
  1632. #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
  1633. #endif
  1634. #ifndef arch_atomic64_fetch_dec
  1635. static __always_inline s64
  1636. arch_atomic64_fetch_dec(atomic64_t *v)
  1637. {
  1638. s64 ret;
  1639. __atomic_pre_full_fence();
  1640. ret = arch_atomic64_fetch_dec_relaxed(v);
  1641. __atomic_post_full_fence();
  1642. return ret;
  1643. }
  1644. #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
  1645. #endif
  1646. #endif /* arch_atomic64_fetch_dec_relaxed */
  1647. #ifndef arch_atomic64_fetch_and_relaxed
  1648. #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
  1649. #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
  1650. #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
  1651. #else /* arch_atomic64_fetch_and_relaxed */
  1652. #ifndef arch_atomic64_fetch_and_acquire
  1653. static __always_inline s64
  1654. arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
  1655. {
  1656. s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
  1657. __atomic_acquire_fence();
  1658. return ret;
  1659. }
  1660. #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
  1661. #endif
  1662. #ifndef arch_atomic64_fetch_and_release
  1663. static __always_inline s64
  1664. arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
  1665. {
  1666. __atomic_release_fence();
  1667. return arch_atomic64_fetch_and_relaxed(i, v);
  1668. }
  1669. #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
  1670. #endif
  1671. #ifndef arch_atomic64_fetch_and
  1672. static __always_inline s64
  1673. arch_atomic64_fetch_and(s64 i, atomic64_t *v)
  1674. {
  1675. s64 ret;
  1676. __atomic_pre_full_fence();
  1677. ret = arch_atomic64_fetch_and_relaxed(i, v);
  1678. __atomic_post_full_fence();
  1679. return ret;
  1680. }
  1681. #define arch_atomic64_fetch_and arch_atomic64_fetch_and
  1682. #endif
  1683. #endif /* arch_atomic64_fetch_and_relaxed */
  1684. #ifndef arch_atomic64_andnot
  1685. static __always_inline void
  1686. arch_atomic64_andnot(s64 i, atomic64_t *v)
  1687. {
  1688. arch_atomic64_and(~i, v);
  1689. }
  1690. #define arch_atomic64_andnot arch_atomic64_andnot
  1691. #endif
  1692. #ifndef arch_atomic64_fetch_andnot_relaxed
  1693. #ifdef arch_atomic64_fetch_andnot
  1694. #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
  1695. #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
  1696. #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
  1697. #endif /* arch_atomic64_fetch_andnot */
  1698. #ifndef arch_atomic64_fetch_andnot
  1699. static __always_inline s64
  1700. arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
  1701. {
  1702. return arch_atomic64_fetch_and(~i, v);
  1703. }
  1704. #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
  1705. #endif
  1706. #ifndef arch_atomic64_fetch_andnot_acquire
  1707. static __always_inline s64
  1708. arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
  1709. {
  1710. return arch_atomic64_fetch_and_acquire(~i, v);
  1711. }
  1712. #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
  1713. #endif
  1714. #ifndef arch_atomic64_fetch_andnot_release
  1715. static __always_inline s64
  1716. arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
  1717. {
  1718. return arch_atomic64_fetch_and_release(~i, v);
  1719. }
  1720. #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
  1721. #endif
  1722. #ifndef arch_atomic64_fetch_andnot_relaxed
  1723. static __always_inline s64
  1724. arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
  1725. {
  1726. return arch_atomic64_fetch_and_relaxed(~i, v);
  1727. }
  1728. #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
  1729. #endif
  1730. #else /* arch_atomic64_fetch_andnot_relaxed */
  1731. #ifndef arch_atomic64_fetch_andnot_acquire
  1732. static __always_inline s64
  1733. arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
  1734. {
  1735. s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
  1736. __atomic_acquire_fence();
  1737. return ret;
  1738. }
  1739. #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
  1740. #endif
  1741. #ifndef arch_atomic64_fetch_andnot_release
  1742. static __always_inline s64
  1743. arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
  1744. {
  1745. __atomic_release_fence();
  1746. return arch_atomic64_fetch_andnot_relaxed(i, v);
  1747. }
  1748. #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
  1749. #endif
  1750. #ifndef arch_atomic64_fetch_andnot
  1751. static __always_inline s64
  1752. arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
  1753. {
  1754. s64 ret;
  1755. __atomic_pre_full_fence();
  1756. ret = arch_atomic64_fetch_andnot_relaxed(i, v);
  1757. __atomic_post_full_fence();
  1758. return ret;
  1759. }
  1760. #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
  1761. #endif
  1762. #endif /* arch_atomic64_fetch_andnot_relaxed */
  1763. #ifndef arch_atomic64_fetch_or_relaxed
  1764. #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
  1765. #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
  1766. #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
  1767. #else /* arch_atomic64_fetch_or_relaxed */
  1768. #ifndef arch_atomic64_fetch_or_acquire
  1769. static __always_inline s64
  1770. arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
  1771. {
  1772. s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
  1773. __atomic_acquire_fence();
  1774. return ret;
  1775. }
  1776. #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
  1777. #endif
  1778. #ifndef arch_atomic64_fetch_or_release
  1779. static __always_inline s64
  1780. arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
  1781. {
  1782. __atomic_release_fence();
  1783. return arch_atomic64_fetch_or_relaxed(i, v);
  1784. }
  1785. #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
  1786. #endif
  1787. #ifndef arch_atomic64_fetch_or
  1788. static __always_inline s64
  1789. arch_atomic64_fetch_or(s64 i, atomic64_t *v)
  1790. {
  1791. s64 ret;
  1792. __atomic_pre_full_fence();
  1793. ret = arch_atomic64_fetch_or_relaxed(i, v);
  1794. __atomic_post_full_fence();
  1795. return ret;
  1796. }
  1797. #define arch_atomic64_fetch_or arch_atomic64_fetch_or
  1798. #endif
  1799. #endif /* arch_atomic64_fetch_or_relaxed */
  1800. #ifndef arch_atomic64_fetch_xor_relaxed
  1801. #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
  1802. #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
  1803. #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
  1804. #else /* arch_atomic64_fetch_xor_relaxed */
  1805. #ifndef arch_atomic64_fetch_xor_acquire
  1806. static __always_inline s64
  1807. arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
  1808. {
  1809. s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
  1810. __atomic_acquire_fence();
  1811. return ret;
  1812. }
  1813. #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
  1814. #endif
  1815. #ifndef arch_atomic64_fetch_xor_release
  1816. static __always_inline s64
  1817. arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
  1818. {
  1819. __atomic_release_fence();
  1820. return arch_atomic64_fetch_xor_relaxed(i, v);
  1821. }
  1822. #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
  1823. #endif
  1824. #ifndef arch_atomic64_fetch_xor
  1825. static __always_inline s64
  1826. arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
  1827. {
  1828. s64 ret;
  1829. __atomic_pre_full_fence();
  1830. ret = arch_atomic64_fetch_xor_relaxed(i, v);
  1831. __atomic_post_full_fence();
  1832. return ret;
  1833. }
  1834. #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
  1835. #endif
  1836. #endif /* arch_atomic64_fetch_xor_relaxed */
  1837. #ifndef arch_atomic64_xchg_relaxed
  1838. #define arch_atomic64_xchg_acquire arch_atomic64_xchg
  1839. #define arch_atomic64_xchg_release arch_atomic64_xchg
  1840. #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
  1841. #else /* arch_atomic64_xchg_relaxed */
  1842. #ifndef arch_atomic64_xchg_acquire
  1843. static __always_inline s64
  1844. arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
  1845. {
  1846. s64 ret = arch_atomic64_xchg_relaxed(v, i);
  1847. __atomic_acquire_fence();
  1848. return ret;
  1849. }
  1850. #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
  1851. #endif
  1852. #ifndef arch_atomic64_xchg_release
  1853. static __always_inline s64
  1854. arch_atomic64_xchg_release(atomic64_t *v, s64 i)
  1855. {
  1856. __atomic_release_fence();
  1857. return arch_atomic64_xchg_relaxed(v, i);
  1858. }
  1859. #define arch_atomic64_xchg_release arch_atomic64_xchg_release
  1860. #endif
  1861. #ifndef arch_atomic64_xchg
  1862. static __always_inline s64
  1863. arch_atomic64_xchg(atomic64_t *v, s64 i)
  1864. {
  1865. s64 ret;
  1866. __atomic_pre_full_fence();
  1867. ret = arch_atomic64_xchg_relaxed(v, i);
  1868. __atomic_post_full_fence();
  1869. return ret;
  1870. }
  1871. #define arch_atomic64_xchg arch_atomic64_xchg
  1872. #endif
  1873. #endif /* arch_atomic64_xchg_relaxed */
  1874. #ifndef arch_atomic64_cmpxchg_relaxed
  1875. #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
  1876. #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
  1877. #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
  1878. #else /* arch_atomic64_cmpxchg_relaxed */
  1879. #ifndef arch_atomic64_cmpxchg_acquire
  1880. static __always_inline s64
  1881. arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
  1882. {
  1883. s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
  1884. __atomic_acquire_fence();
  1885. return ret;
  1886. }
  1887. #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
  1888. #endif
  1889. #ifndef arch_atomic64_cmpxchg_release
  1890. static __always_inline s64
  1891. arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
  1892. {
  1893. __atomic_release_fence();
  1894. return arch_atomic64_cmpxchg_relaxed(v, old, new);
  1895. }
  1896. #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
  1897. #endif
  1898. #ifndef arch_atomic64_cmpxchg
  1899. static __always_inline s64
  1900. arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
  1901. {
  1902. s64 ret;
  1903. __atomic_pre_full_fence();
  1904. ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
  1905. __atomic_post_full_fence();
  1906. return ret;
  1907. }
  1908. #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
  1909. #endif
  1910. #endif /* arch_atomic64_cmpxchg_relaxed */
  1911. #ifndef arch_atomic64_try_cmpxchg_relaxed
  1912. #ifdef arch_atomic64_try_cmpxchg
  1913. #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
  1914. #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
  1915. #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
  1916. #endif /* arch_atomic64_try_cmpxchg */
  1917. #ifndef arch_atomic64_try_cmpxchg
  1918. static __always_inline bool
  1919. arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
  1920. {
  1921. s64 r, o = *old;
  1922. r = arch_atomic64_cmpxchg(v, o, new);
  1923. if (unlikely(r != o))
  1924. *old = r;
  1925. return likely(r == o);
  1926. }
  1927. #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
  1928. #endif
  1929. #ifndef arch_atomic64_try_cmpxchg_acquire
  1930. static __always_inline bool
  1931. arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
  1932. {
  1933. s64 r, o = *old;
  1934. r = arch_atomic64_cmpxchg_acquire(v, o, new);
  1935. if (unlikely(r != o))
  1936. *old = r;
  1937. return likely(r == o);
  1938. }
  1939. #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
  1940. #endif
  1941. #ifndef arch_atomic64_try_cmpxchg_release
  1942. static __always_inline bool
  1943. arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
  1944. {
  1945. s64 r, o = *old;
  1946. r = arch_atomic64_cmpxchg_release(v, o, new);
  1947. if (unlikely(r != o))
  1948. *old = r;
  1949. return likely(r == o);
  1950. }
  1951. #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
  1952. #endif
  1953. #ifndef arch_atomic64_try_cmpxchg_relaxed
  1954. static __always_inline bool
  1955. arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
  1956. {
  1957. s64 r, o = *old;
  1958. r = arch_atomic64_cmpxchg_relaxed(v, o, new);
  1959. if (unlikely(r != o))
  1960. *old = r;
  1961. return likely(r == o);
  1962. }
  1963. #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
  1964. #endif
  1965. #else /* arch_atomic64_try_cmpxchg_relaxed */
  1966. #ifndef arch_atomic64_try_cmpxchg_acquire
  1967. static __always_inline bool
  1968. arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
  1969. {
  1970. bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
  1971. __atomic_acquire_fence();
  1972. return ret;
  1973. }
  1974. #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
  1975. #endif
  1976. #ifndef arch_atomic64_try_cmpxchg_release
  1977. static __always_inline bool
  1978. arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
  1979. {
  1980. __atomic_release_fence();
  1981. return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
  1982. }
  1983. #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
  1984. #endif
  1985. #ifndef arch_atomic64_try_cmpxchg
  1986. static __always_inline bool
  1987. arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
  1988. {
  1989. bool ret;
  1990. __atomic_pre_full_fence();
  1991. ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
  1992. __atomic_post_full_fence();
  1993. return ret;
  1994. }
  1995. #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
  1996. #endif
  1997. #endif /* arch_atomic64_try_cmpxchg_relaxed */
  1998. #ifndef arch_atomic64_sub_and_test
  1999. /**
  2000. * arch_atomic64_sub_and_test - subtract value from variable and test result
  2001. * @i: integer value to subtract
  2002. * @v: pointer of type atomic64_t
  2003. *
  2004. * Atomically subtracts @i from @v and returns
  2005. * true if the result is zero, or false for all
  2006. * other cases.
  2007. */
  2008. static __always_inline bool
  2009. arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
  2010. {
  2011. return arch_atomic64_sub_return(i, v) == 0;
  2012. }
  2013. #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
  2014. #endif
  2015. #ifndef arch_atomic64_dec_and_test
  2016. /**
  2017. * arch_atomic64_dec_and_test - decrement and test
  2018. * @v: pointer of type atomic64_t
  2019. *
  2020. * Atomically decrements @v by 1 and
  2021. * returns true if the result is 0, or false for all other
  2022. * cases.
  2023. */
  2024. static __always_inline bool
  2025. arch_atomic64_dec_and_test(atomic64_t *v)
  2026. {
  2027. return arch_atomic64_dec_return(v) == 0;
  2028. }
  2029. #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
  2030. #endif
  2031. #ifndef arch_atomic64_inc_and_test
  2032. /**
  2033. * arch_atomic64_inc_and_test - increment and test
  2034. * @v: pointer of type atomic64_t
  2035. *
  2036. * Atomically increments @v by 1
  2037. * and returns true if the result is zero, or false for all
  2038. * other cases.
  2039. */
  2040. static __always_inline bool
  2041. arch_atomic64_inc_and_test(atomic64_t *v)
  2042. {
  2043. return arch_atomic64_inc_return(v) == 0;
  2044. }
  2045. #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
  2046. #endif
  2047. #ifndef arch_atomic64_add_negative
  2048. /**
  2049. * arch_atomic64_add_negative - add and test if negative
  2050. * @i: integer value to add
  2051. * @v: pointer of type atomic64_t
  2052. *
  2053. * Atomically adds @i to @v and returns true
  2054. * if the result is negative, or false when
  2055. * result is greater than or equal to zero.
  2056. */
  2057. static __always_inline bool
  2058. arch_atomic64_add_negative(s64 i, atomic64_t *v)
  2059. {
  2060. return arch_atomic64_add_return(i, v) < 0;
  2061. }
  2062. #define arch_atomic64_add_negative arch_atomic64_add_negative
  2063. #endif
  2064. #ifndef arch_atomic64_fetch_add_unless
  2065. /**
  2066. * arch_atomic64_fetch_add_unless - add unless the number is already a given value
  2067. * @v: pointer of type atomic64_t
  2068. * @a: the amount to add to v...
  2069. * @u: ...unless v is equal to u.
  2070. *
  2071. * Atomically adds @a to @v, so long as @v was not already @u.
  2072. * Returns original value of @v
  2073. */
  2074. static __always_inline s64
  2075. arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
  2076. {
  2077. s64 c = arch_atomic64_read(v);
  2078. do {
  2079. if (unlikely(c == u))
  2080. break;
  2081. } while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
  2082. return c;
  2083. }
  2084. #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
  2085. #endif
  2086. #ifndef arch_atomic64_add_unless
  2087. /**
  2088. * arch_atomic64_add_unless - add unless the number is already a given value
  2089. * @v: pointer of type atomic64_t
  2090. * @a: the amount to add to v...
  2091. * @u: ...unless v is equal to u.
  2092. *
  2093. * Atomically adds @a to @v, if @v was not already @u.
  2094. * Returns true if the addition was done.
  2095. */
  2096. static __always_inline bool
  2097. arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
  2098. {
  2099. return arch_atomic64_fetch_add_unless(v, a, u) != u;
  2100. }
  2101. #define arch_atomic64_add_unless arch_atomic64_add_unless
  2102. #endif
  2103. #ifndef arch_atomic64_inc_not_zero
  2104. /**
  2105. * arch_atomic64_inc_not_zero - increment unless the number is zero
  2106. * @v: pointer of type atomic64_t
  2107. *
  2108. * Atomically increments @v by 1, if @v is non-zero.
  2109. * Returns true if the increment was done.
  2110. */
  2111. static __always_inline bool
  2112. arch_atomic64_inc_not_zero(atomic64_t *v)
  2113. {
  2114. return arch_atomic64_add_unless(v, 1, 0);
  2115. }
  2116. #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
  2117. #endif
  2118. #ifndef arch_atomic64_inc_unless_negative
  2119. static __always_inline bool
  2120. arch_atomic64_inc_unless_negative(atomic64_t *v)
  2121. {
  2122. s64 c = arch_atomic64_read(v);
  2123. do {
  2124. if (unlikely(c < 0))
  2125. return false;
  2126. } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
  2127. return true;
  2128. }
  2129. #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
  2130. #endif
  2131. #ifndef arch_atomic64_dec_unless_positive
  2132. static __always_inline bool
  2133. arch_atomic64_dec_unless_positive(atomic64_t *v)
  2134. {
  2135. s64 c = arch_atomic64_read(v);
  2136. do {
  2137. if (unlikely(c > 0))
  2138. return false;
  2139. } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
  2140. return true;
  2141. }
  2142. #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
  2143. #endif
  2144. #ifndef arch_atomic64_dec_if_positive
  2145. static __always_inline s64
  2146. arch_atomic64_dec_if_positive(atomic64_t *v)
  2147. {
  2148. s64 dec, c = arch_atomic64_read(v);
  2149. do {
  2150. dec = c - 1;
  2151. if (unlikely(dec < 0))
  2152. break;
  2153. } while (!arch_atomic64_try_cmpxchg(v, &c, dec));
  2154. return dec;
  2155. }
  2156. #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
  2157. #endif
  2158. #endif /* _LINUX_ATOMIC_FALLBACK_H */
  2159. // b5e87bdd5ede61470c29f7a7e4de781af3770f09