atomic-long.h 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014
  1. // SPDX-License-Identifier: GPL-2.0
  2. // Generated by scripts/atomic/gen-atomic-long.sh
  3. // DO NOT MODIFY THIS FILE DIRECTLY
  4. #ifndef _LINUX_ATOMIC_LONG_H
  5. #define _LINUX_ATOMIC_LONG_H
  6. #include <linux/compiler.h>
  7. #include <asm/types.h>
  8. #ifdef CONFIG_64BIT
  9. typedef atomic64_t atomic_long_t;
  10. #define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i)
  11. #define atomic_long_cond_read_acquire atomic64_cond_read_acquire
  12. #define atomic_long_cond_read_relaxed atomic64_cond_read_relaxed
  13. #else
  14. typedef atomic_t atomic_long_t;
  15. #define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i)
  16. #define atomic_long_cond_read_acquire atomic_cond_read_acquire
  17. #define atomic_long_cond_read_relaxed atomic_cond_read_relaxed
  18. #endif
  19. #ifdef CONFIG_64BIT
  20. static __always_inline long
  21. arch_atomic_long_read(const atomic_long_t *v)
  22. {
  23. return arch_atomic64_read(v);
  24. }
  25. static __always_inline long
  26. arch_atomic_long_read_acquire(const atomic_long_t *v)
  27. {
  28. return arch_atomic64_read_acquire(v);
  29. }
  30. static __always_inline void
  31. arch_atomic_long_set(atomic_long_t *v, long i)
  32. {
  33. arch_atomic64_set(v, i);
  34. }
  35. static __always_inline void
  36. arch_atomic_long_set_release(atomic_long_t *v, long i)
  37. {
  38. arch_atomic64_set_release(v, i);
  39. }
  40. static __always_inline void
  41. arch_atomic_long_add(long i, atomic_long_t *v)
  42. {
  43. arch_atomic64_add(i, v);
  44. }
  45. static __always_inline long
  46. arch_atomic_long_add_return(long i, atomic_long_t *v)
  47. {
  48. return arch_atomic64_add_return(i, v);
  49. }
  50. static __always_inline long
  51. arch_atomic_long_add_return_acquire(long i, atomic_long_t *v)
  52. {
  53. return arch_atomic64_add_return_acquire(i, v);
  54. }
  55. static __always_inline long
  56. arch_atomic_long_add_return_release(long i, atomic_long_t *v)
  57. {
  58. return arch_atomic64_add_return_release(i, v);
  59. }
  60. static __always_inline long
  61. arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
  62. {
  63. return arch_atomic64_add_return_relaxed(i, v);
  64. }
  65. static __always_inline long
  66. arch_atomic_long_fetch_add(long i, atomic_long_t *v)
  67. {
  68. return arch_atomic64_fetch_add(i, v);
  69. }
  70. static __always_inline long
  71. arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
  72. {
  73. return arch_atomic64_fetch_add_acquire(i, v);
  74. }
  75. static __always_inline long
  76. arch_atomic_long_fetch_add_release(long i, atomic_long_t *v)
  77. {
  78. return arch_atomic64_fetch_add_release(i, v);
  79. }
  80. static __always_inline long
  81. arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
  82. {
  83. return arch_atomic64_fetch_add_relaxed(i, v);
  84. }
  85. static __always_inline void
  86. arch_atomic_long_sub(long i, atomic_long_t *v)
  87. {
  88. arch_atomic64_sub(i, v);
  89. }
  90. static __always_inline long
  91. arch_atomic_long_sub_return(long i, atomic_long_t *v)
  92. {
  93. return arch_atomic64_sub_return(i, v);
  94. }
  95. static __always_inline long
  96. arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
  97. {
  98. return arch_atomic64_sub_return_acquire(i, v);
  99. }
  100. static __always_inline long
  101. arch_atomic_long_sub_return_release(long i, atomic_long_t *v)
  102. {
  103. return arch_atomic64_sub_return_release(i, v);
  104. }
  105. static __always_inline long
  106. arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
  107. {
  108. return arch_atomic64_sub_return_relaxed(i, v);
  109. }
  110. static __always_inline long
  111. arch_atomic_long_fetch_sub(long i, atomic_long_t *v)
  112. {
  113. return arch_atomic64_fetch_sub(i, v);
  114. }
  115. static __always_inline long
  116. arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
  117. {
  118. return arch_atomic64_fetch_sub_acquire(i, v);
  119. }
  120. static __always_inline long
  121. arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
  122. {
  123. return arch_atomic64_fetch_sub_release(i, v);
  124. }
  125. static __always_inline long
  126. arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
  127. {
  128. return arch_atomic64_fetch_sub_relaxed(i, v);
  129. }
  130. static __always_inline void
  131. arch_atomic_long_inc(atomic_long_t *v)
  132. {
  133. arch_atomic64_inc(v);
  134. }
  135. static __always_inline long
  136. arch_atomic_long_inc_return(atomic_long_t *v)
  137. {
  138. return arch_atomic64_inc_return(v);
  139. }
  140. static __always_inline long
  141. arch_atomic_long_inc_return_acquire(atomic_long_t *v)
  142. {
  143. return arch_atomic64_inc_return_acquire(v);
  144. }
  145. static __always_inline long
  146. arch_atomic_long_inc_return_release(atomic_long_t *v)
  147. {
  148. return arch_atomic64_inc_return_release(v);
  149. }
  150. static __always_inline long
  151. arch_atomic_long_inc_return_relaxed(atomic_long_t *v)
  152. {
  153. return arch_atomic64_inc_return_relaxed(v);
  154. }
  155. static __always_inline long
  156. arch_atomic_long_fetch_inc(atomic_long_t *v)
  157. {
  158. return arch_atomic64_fetch_inc(v);
  159. }
  160. static __always_inline long
  161. arch_atomic_long_fetch_inc_acquire(atomic_long_t *v)
  162. {
  163. return arch_atomic64_fetch_inc_acquire(v);
  164. }
  165. static __always_inline long
  166. arch_atomic_long_fetch_inc_release(atomic_long_t *v)
  167. {
  168. return arch_atomic64_fetch_inc_release(v);
  169. }
  170. static __always_inline long
  171. arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
  172. {
  173. return arch_atomic64_fetch_inc_relaxed(v);
  174. }
  175. static __always_inline void
  176. arch_atomic_long_dec(atomic_long_t *v)
  177. {
  178. arch_atomic64_dec(v);
  179. }
  180. static __always_inline long
  181. arch_atomic_long_dec_return(atomic_long_t *v)
  182. {
  183. return arch_atomic64_dec_return(v);
  184. }
  185. static __always_inline long
  186. arch_atomic_long_dec_return_acquire(atomic_long_t *v)
  187. {
  188. return arch_atomic64_dec_return_acquire(v);
  189. }
  190. static __always_inline long
  191. arch_atomic_long_dec_return_release(atomic_long_t *v)
  192. {
  193. return arch_atomic64_dec_return_release(v);
  194. }
  195. static __always_inline long
  196. arch_atomic_long_dec_return_relaxed(atomic_long_t *v)
  197. {
  198. return arch_atomic64_dec_return_relaxed(v);
  199. }
  200. static __always_inline long
  201. arch_atomic_long_fetch_dec(atomic_long_t *v)
  202. {
  203. return arch_atomic64_fetch_dec(v);
  204. }
  205. static __always_inline long
  206. arch_atomic_long_fetch_dec_acquire(atomic_long_t *v)
  207. {
  208. return arch_atomic64_fetch_dec_acquire(v);
  209. }
  210. static __always_inline long
  211. arch_atomic_long_fetch_dec_release(atomic_long_t *v)
  212. {
  213. return arch_atomic64_fetch_dec_release(v);
  214. }
  215. static __always_inline long
  216. arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
  217. {
  218. return arch_atomic64_fetch_dec_relaxed(v);
  219. }
  220. static __always_inline void
  221. arch_atomic_long_and(long i, atomic_long_t *v)
  222. {
  223. arch_atomic64_and(i, v);
  224. }
  225. static __always_inline long
  226. arch_atomic_long_fetch_and(long i, atomic_long_t *v)
  227. {
  228. return arch_atomic64_fetch_and(i, v);
  229. }
  230. static __always_inline long
  231. arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
  232. {
  233. return arch_atomic64_fetch_and_acquire(i, v);
  234. }
  235. static __always_inline long
  236. arch_atomic_long_fetch_and_release(long i, atomic_long_t *v)
  237. {
  238. return arch_atomic64_fetch_and_release(i, v);
  239. }
  240. static __always_inline long
  241. arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
  242. {
  243. return arch_atomic64_fetch_and_relaxed(i, v);
  244. }
  245. static __always_inline void
  246. arch_atomic_long_andnot(long i, atomic_long_t *v)
  247. {
  248. arch_atomic64_andnot(i, v);
  249. }
  250. static __always_inline long
  251. arch_atomic_long_fetch_andnot(long i, atomic_long_t *v)
  252. {
  253. return arch_atomic64_fetch_andnot(i, v);
  254. }
  255. static __always_inline long
  256. arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
  257. {
  258. return arch_atomic64_fetch_andnot_acquire(i, v);
  259. }
  260. static __always_inline long
  261. arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
  262. {
  263. return arch_atomic64_fetch_andnot_release(i, v);
  264. }
  265. static __always_inline long
  266. arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
  267. {
  268. return arch_atomic64_fetch_andnot_relaxed(i, v);
  269. }
  270. static __always_inline void
  271. arch_atomic_long_or(long i, atomic_long_t *v)
  272. {
  273. arch_atomic64_or(i, v);
  274. }
  275. static __always_inline long
  276. arch_atomic_long_fetch_or(long i, atomic_long_t *v)
  277. {
  278. return arch_atomic64_fetch_or(i, v);
  279. }
  280. static __always_inline long
  281. arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
  282. {
  283. return arch_atomic64_fetch_or_acquire(i, v);
  284. }
  285. static __always_inline long
  286. arch_atomic_long_fetch_or_release(long i, atomic_long_t *v)
  287. {
  288. return arch_atomic64_fetch_or_release(i, v);
  289. }
  290. static __always_inline long
  291. arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
  292. {
  293. return arch_atomic64_fetch_or_relaxed(i, v);
  294. }
  295. static __always_inline void
  296. arch_atomic_long_xor(long i, atomic_long_t *v)
  297. {
  298. arch_atomic64_xor(i, v);
  299. }
  300. static __always_inline long
  301. arch_atomic_long_fetch_xor(long i, atomic_long_t *v)
  302. {
  303. return arch_atomic64_fetch_xor(i, v);
  304. }
  305. static __always_inline long
  306. arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
  307. {
  308. return arch_atomic64_fetch_xor_acquire(i, v);
  309. }
  310. static __always_inline long
  311. arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
  312. {
  313. return arch_atomic64_fetch_xor_release(i, v);
  314. }
  315. static __always_inline long
  316. arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
  317. {
  318. return arch_atomic64_fetch_xor_relaxed(i, v);
  319. }
  320. static __always_inline long
  321. arch_atomic_long_xchg(atomic_long_t *v, long i)
  322. {
  323. return arch_atomic64_xchg(v, i);
  324. }
  325. static __always_inline long
  326. arch_atomic_long_xchg_acquire(atomic_long_t *v, long i)
  327. {
  328. return arch_atomic64_xchg_acquire(v, i);
  329. }
  330. static __always_inline long
  331. arch_atomic_long_xchg_release(atomic_long_t *v, long i)
  332. {
  333. return arch_atomic64_xchg_release(v, i);
  334. }
  335. static __always_inline long
  336. arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i)
  337. {
  338. return arch_atomic64_xchg_relaxed(v, i);
  339. }
  340. static __always_inline long
  341. arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
  342. {
  343. return arch_atomic64_cmpxchg(v, old, new);
  344. }
  345. static __always_inline long
  346. arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
  347. {
  348. return arch_atomic64_cmpxchg_acquire(v, old, new);
  349. }
  350. static __always_inline long
  351. arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
  352. {
  353. return arch_atomic64_cmpxchg_release(v, old, new);
  354. }
  355. static __always_inline long
  356. arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
  357. {
  358. return arch_atomic64_cmpxchg_relaxed(v, old, new);
  359. }
  360. static __always_inline bool
  361. arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
  362. {
  363. return arch_atomic64_try_cmpxchg(v, (s64 *)old, new);
  364. }
  365. static __always_inline bool
  366. arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
  367. {
  368. return arch_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
  369. }
  370. static __always_inline bool
  371. arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
  372. {
  373. return arch_atomic64_try_cmpxchg_release(v, (s64 *)old, new);
  374. }
  375. static __always_inline bool
  376. arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
  377. {
  378. return arch_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
  379. }
  380. static __always_inline bool
  381. arch_atomic_long_sub_and_test(long i, atomic_long_t *v)
  382. {
  383. return arch_atomic64_sub_and_test(i, v);
  384. }
  385. static __always_inline bool
  386. arch_atomic_long_dec_and_test(atomic_long_t *v)
  387. {
  388. return arch_atomic64_dec_and_test(v);
  389. }
  390. static __always_inline bool
  391. arch_atomic_long_inc_and_test(atomic_long_t *v)
  392. {
  393. return arch_atomic64_inc_and_test(v);
  394. }
  395. static __always_inline bool
  396. arch_atomic_long_add_negative(long i, atomic_long_t *v)
  397. {
  398. return arch_atomic64_add_negative(i, v);
  399. }
  400. static __always_inline long
  401. arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
  402. {
  403. return arch_atomic64_fetch_add_unless(v, a, u);
  404. }
  405. static __always_inline bool
  406. arch_atomic_long_add_unless(atomic_long_t *v, long a, long u)
  407. {
  408. return arch_atomic64_add_unless(v, a, u);
  409. }
  410. static __always_inline bool
  411. arch_atomic_long_inc_not_zero(atomic_long_t *v)
  412. {
  413. return arch_atomic64_inc_not_zero(v);
  414. }
  415. static __always_inline bool
  416. arch_atomic_long_inc_unless_negative(atomic_long_t *v)
  417. {
  418. return arch_atomic64_inc_unless_negative(v);
  419. }
  420. static __always_inline bool
  421. arch_atomic_long_dec_unless_positive(atomic_long_t *v)
  422. {
  423. return arch_atomic64_dec_unless_positive(v);
  424. }
  425. static __always_inline long
  426. arch_atomic_long_dec_if_positive(atomic_long_t *v)
  427. {
  428. return arch_atomic64_dec_if_positive(v);
  429. }
  430. #else /* CONFIG_64BIT */
  431. static __always_inline long
  432. arch_atomic_long_read(const atomic_long_t *v)
  433. {
  434. return arch_atomic_read(v);
  435. }
  436. static __always_inline long
  437. arch_atomic_long_read_acquire(const atomic_long_t *v)
  438. {
  439. return arch_atomic_read_acquire(v);
  440. }
  441. static __always_inline void
  442. arch_atomic_long_set(atomic_long_t *v, long i)
  443. {
  444. arch_atomic_set(v, i);
  445. }
  446. static __always_inline void
  447. arch_atomic_long_set_release(atomic_long_t *v, long i)
  448. {
  449. arch_atomic_set_release(v, i);
  450. }
  451. static __always_inline void
  452. arch_atomic_long_add(long i, atomic_long_t *v)
  453. {
  454. arch_atomic_add(i, v);
  455. }
  456. static __always_inline long
  457. arch_atomic_long_add_return(long i, atomic_long_t *v)
  458. {
  459. return arch_atomic_add_return(i, v);
  460. }
  461. static __always_inline long
  462. arch_atomic_long_add_return_acquire(long i, atomic_long_t *v)
  463. {
  464. return arch_atomic_add_return_acquire(i, v);
  465. }
  466. static __always_inline long
  467. arch_atomic_long_add_return_release(long i, atomic_long_t *v)
  468. {
  469. return arch_atomic_add_return_release(i, v);
  470. }
  471. static __always_inline long
  472. arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
  473. {
  474. return arch_atomic_add_return_relaxed(i, v);
  475. }
  476. static __always_inline long
  477. arch_atomic_long_fetch_add(long i, atomic_long_t *v)
  478. {
  479. return arch_atomic_fetch_add(i, v);
  480. }
  481. static __always_inline long
  482. arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
  483. {
  484. return arch_atomic_fetch_add_acquire(i, v);
  485. }
  486. static __always_inline long
  487. arch_atomic_long_fetch_add_release(long i, atomic_long_t *v)
  488. {
  489. return arch_atomic_fetch_add_release(i, v);
  490. }
  491. static __always_inline long
  492. arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
  493. {
  494. return arch_atomic_fetch_add_relaxed(i, v);
  495. }
  496. static __always_inline void
  497. arch_atomic_long_sub(long i, atomic_long_t *v)
  498. {
  499. arch_atomic_sub(i, v);
  500. }
  501. static __always_inline long
  502. arch_atomic_long_sub_return(long i, atomic_long_t *v)
  503. {
  504. return arch_atomic_sub_return(i, v);
  505. }
  506. static __always_inline long
  507. arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
  508. {
  509. return arch_atomic_sub_return_acquire(i, v);
  510. }
  511. static __always_inline long
  512. arch_atomic_long_sub_return_release(long i, atomic_long_t *v)
  513. {
  514. return arch_atomic_sub_return_release(i, v);
  515. }
  516. static __always_inline long
  517. arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
  518. {
  519. return arch_atomic_sub_return_relaxed(i, v);
  520. }
  521. static __always_inline long
  522. arch_atomic_long_fetch_sub(long i, atomic_long_t *v)
  523. {
  524. return arch_atomic_fetch_sub(i, v);
  525. }
  526. static __always_inline long
  527. arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
  528. {
  529. return arch_atomic_fetch_sub_acquire(i, v);
  530. }
  531. static __always_inline long
  532. arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
  533. {
  534. return arch_atomic_fetch_sub_release(i, v);
  535. }
  536. static __always_inline long
  537. arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
  538. {
  539. return arch_atomic_fetch_sub_relaxed(i, v);
  540. }
  541. static __always_inline void
  542. arch_atomic_long_inc(atomic_long_t *v)
  543. {
  544. arch_atomic_inc(v);
  545. }
  546. static __always_inline long
  547. arch_atomic_long_inc_return(atomic_long_t *v)
  548. {
  549. return arch_atomic_inc_return(v);
  550. }
  551. static __always_inline long
  552. arch_atomic_long_inc_return_acquire(atomic_long_t *v)
  553. {
  554. return arch_atomic_inc_return_acquire(v);
  555. }
  556. static __always_inline long
  557. arch_atomic_long_inc_return_release(atomic_long_t *v)
  558. {
  559. return arch_atomic_inc_return_release(v);
  560. }
  561. static __always_inline long
  562. arch_atomic_long_inc_return_relaxed(atomic_long_t *v)
  563. {
  564. return arch_atomic_inc_return_relaxed(v);
  565. }
  566. static __always_inline long
  567. arch_atomic_long_fetch_inc(atomic_long_t *v)
  568. {
  569. return arch_atomic_fetch_inc(v);
  570. }
  571. static __always_inline long
  572. arch_atomic_long_fetch_inc_acquire(atomic_long_t *v)
  573. {
  574. return arch_atomic_fetch_inc_acquire(v);
  575. }
  576. static __always_inline long
  577. arch_atomic_long_fetch_inc_release(atomic_long_t *v)
  578. {
  579. return arch_atomic_fetch_inc_release(v);
  580. }
  581. static __always_inline long
  582. arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
  583. {
  584. return arch_atomic_fetch_inc_relaxed(v);
  585. }
  586. static __always_inline void
  587. arch_atomic_long_dec(atomic_long_t *v)
  588. {
  589. arch_atomic_dec(v);
  590. }
  591. static __always_inline long
  592. arch_atomic_long_dec_return(atomic_long_t *v)
  593. {
  594. return arch_atomic_dec_return(v);
  595. }
  596. static __always_inline long
  597. arch_atomic_long_dec_return_acquire(atomic_long_t *v)
  598. {
  599. return arch_atomic_dec_return_acquire(v);
  600. }
  601. static __always_inline long
  602. arch_atomic_long_dec_return_release(atomic_long_t *v)
  603. {
  604. return arch_atomic_dec_return_release(v);
  605. }
  606. static __always_inline long
  607. arch_atomic_long_dec_return_relaxed(atomic_long_t *v)
  608. {
  609. return arch_atomic_dec_return_relaxed(v);
  610. }
  611. static __always_inline long
  612. arch_atomic_long_fetch_dec(atomic_long_t *v)
  613. {
  614. return arch_atomic_fetch_dec(v);
  615. }
  616. static __always_inline long
  617. arch_atomic_long_fetch_dec_acquire(atomic_long_t *v)
  618. {
  619. return arch_atomic_fetch_dec_acquire(v);
  620. }
  621. static __always_inline long
  622. arch_atomic_long_fetch_dec_release(atomic_long_t *v)
  623. {
  624. return arch_atomic_fetch_dec_release(v);
  625. }
  626. static __always_inline long
  627. arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
  628. {
  629. return arch_atomic_fetch_dec_relaxed(v);
  630. }
  631. static __always_inline void
  632. arch_atomic_long_and(long i, atomic_long_t *v)
  633. {
  634. arch_atomic_and(i, v);
  635. }
  636. static __always_inline long
  637. arch_atomic_long_fetch_and(long i, atomic_long_t *v)
  638. {
  639. return arch_atomic_fetch_and(i, v);
  640. }
  641. static __always_inline long
  642. arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
  643. {
  644. return arch_atomic_fetch_and_acquire(i, v);
  645. }
  646. static __always_inline long
  647. arch_atomic_long_fetch_and_release(long i, atomic_long_t *v)
  648. {
  649. return arch_atomic_fetch_and_release(i, v);
  650. }
  651. static __always_inline long
  652. arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
  653. {
  654. return arch_atomic_fetch_and_relaxed(i, v);
  655. }
  656. static __always_inline void
  657. arch_atomic_long_andnot(long i, atomic_long_t *v)
  658. {
  659. arch_atomic_andnot(i, v);
  660. }
  661. static __always_inline long
  662. arch_atomic_long_fetch_andnot(long i, atomic_long_t *v)
  663. {
  664. return arch_atomic_fetch_andnot(i, v);
  665. }
  666. static __always_inline long
  667. arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
  668. {
  669. return arch_atomic_fetch_andnot_acquire(i, v);
  670. }
  671. static __always_inline long
  672. arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
  673. {
  674. return arch_atomic_fetch_andnot_release(i, v);
  675. }
  676. static __always_inline long
  677. arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
  678. {
  679. return arch_atomic_fetch_andnot_relaxed(i, v);
  680. }
  681. static __always_inline void
  682. arch_atomic_long_or(long i, atomic_long_t *v)
  683. {
  684. arch_atomic_or(i, v);
  685. }
  686. static __always_inline long
  687. arch_atomic_long_fetch_or(long i, atomic_long_t *v)
  688. {
  689. return arch_atomic_fetch_or(i, v);
  690. }
  691. static __always_inline long
  692. arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
  693. {
  694. return arch_atomic_fetch_or_acquire(i, v);
  695. }
  696. static __always_inline long
  697. arch_atomic_long_fetch_or_release(long i, atomic_long_t *v)
  698. {
  699. return arch_atomic_fetch_or_release(i, v);
  700. }
  701. static __always_inline long
  702. arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
  703. {
  704. return arch_atomic_fetch_or_relaxed(i, v);
  705. }
  706. static __always_inline void
  707. arch_atomic_long_xor(long i, atomic_long_t *v)
  708. {
  709. arch_atomic_xor(i, v);
  710. }
  711. static __always_inline long
  712. arch_atomic_long_fetch_xor(long i, atomic_long_t *v)
  713. {
  714. return arch_atomic_fetch_xor(i, v);
  715. }
  716. static __always_inline long
  717. arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
  718. {
  719. return arch_atomic_fetch_xor_acquire(i, v);
  720. }
  721. static __always_inline long
  722. arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
  723. {
  724. return arch_atomic_fetch_xor_release(i, v);
  725. }
  726. static __always_inline long
  727. arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
  728. {
  729. return arch_atomic_fetch_xor_relaxed(i, v);
  730. }
  731. static __always_inline long
  732. arch_atomic_long_xchg(atomic_long_t *v, long i)
  733. {
  734. return arch_atomic_xchg(v, i);
  735. }
  736. static __always_inline long
  737. arch_atomic_long_xchg_acquire(atomic_long_t *v, long i)
  738. {
  739. return arch_atomic_xchg_acquire(v, i);
  740. }
  741. static __always_inline long
  742. arch_atomic_long_xchg_release(atomic_long_t *v, long i)
  743. {
  744. return arch_atomic_xchg_release(v, i);
  745. }
  746. static __always_inline long
  747. arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i)
  748. {
  749. return arch_atomic_xchg_relaxed(v, i);
  750. }
  751. static __always_inline long
  752. arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
  753. {
  754. return arch_atomic_cmpxchg(v, old, new);
  755. }
  756. static __always_inline long
  757. arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
  758. {
  759. return arch_atomic_cmpxchg_acquire(v, old, new);
  760. }
  761. static __always_inline long
  762. arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
  763. {
  764. return arch_atomic_cmpxchg_release(v, old, new);
  765. }
  766. static __always_inline long
  767. arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
  768. {
  769. return arch_atomic_cmpxchg_relaxed(v, old, new);
  770. }
  771. static __always_inline bool
  772. arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
  773. {
  774. return arch_atomic_try_cmpxchg(v, (int *)old, new);
  775. }
  776. static __always_inline bool
  777. arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
  778. {
  779. return arch_atomic_try_cmpxchg_acquire(v, (int *)old, new);
  780. }
  781. static __always_inline bool
  782. arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
  783. {
  784. return arch_atomic_try_cmpxchg_release(v, (int *)old, new);
  785. }
  786. static __always_inline bool
  787. arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
  788. {
  789. return arch_atomic_try_cmpxchg_relaxed(v, (int *)old, new);
  790. }
  791. static __always_inline bool
  792. arch_atomic_long_sub_and_test(long i, atomic_long_t *v)
  793. {
  794. return arch_atomic_sub_and_test(i, v);
  795. }
  796. static __always_inline bool
  797. arch_atomic_long_dec_and_test(atomic_long_t *v)
  798. {
  799. return arch_atomic_dec_and_test(v);
  800. }
  801. static __always_inline bool
  802. arch_atomic_long_inc_and_test(atomic_long_t *v)
  803. {
  804. return arch_atomic_inc_and_test(v);
  805. }
  806. static __always_inline bool
  807. arch_atomic_long_add_negative(long i, atomic_long_t *v)
  808. {
  809. return arch_atomic_add_negative(i, v);
  810. }
  811. static __always_inline long
  812. arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
  813. {
  814. return arch_atomic_fetch_add_unless(v, a, u);
  815. }
  816. static __always_inline bool
  817. arch_atomic_long_add_unless(atomic_long_t *v, long a, long u)
  818. {
  819. return arch_atomic_add_unless(v, a, u);
  820. }
  821. static __always_inline bool
  822. arch_atomic_long_inc_not_zero(atomic_long_t *v)
  823. {
  824. return arch_atomic_inc_not_zero(v);
  825. }
  826. static __always_inline bool
  827. arch_atomic_long_inc_unless_negative(atomic_long_t *v)
  828. {
  829. return arch_atomic_inc_unless_negative(v);
  830. }
  831. static __always_inline bool
  832. arch_atomic_long_dec_unless_positive(atomic_long_t *v)
  833. {
  834. return arch_atomic_dec_unless_positive(v);
  835. }
  836. static __always_inline long
  837. arch_atomic_long_dec_if_positive(atomic_long_t *v)
  838. {
  839. return arch_atomic_dec_if_positive(v);
  840. }
  841. #endif /* CONFIG_64BIT */
  842. #endif /* _LINUX_ATOMIC_LONG_H */
  843. // e8f0e08ff072b74d180eabe2ad001282b38c2c88