percpu.h 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_GENERIC_PERCPU_H_
  3. #define _ASM_GENERIC_PERCPU_H_
  4. #include <linux/compiler.h>
  5. #include <linux/threads.h>
  6. #include <linux/percpu-defs.h>
  7. #ifdef CONFIG_SMP
  8. /*
  9. * per_cpu_offset() is the offset that has to be added to a
  10. * percpu variable to get to the instance for a certain processor.
  11. *
  12. * Most arches use the __per_cpu_offset array for those offsets but
  13. * some arches have their own ways of determining the offset (x86_64, s390).
  14. */
  15. #ifndef __per_cpu_offset
  16. extern unsigned long __per_cpu_offset[NR_CPUS];
  17. #define per_cpu_offset(x) (__per_cpu_offset[x])
  18. #endif
  19. /*
  20. * Determine the offset for the currently active processor.
  21. * An arch may define __my_cpu_offset to provide a more effective
  22. * means of obtaining the offset to the per cpu variables of the
  23. * current processor.
  24. */
  25. #ifndef __my_cpu_offset
  26. #define __my_cpu_offset per_cpu_offset(raw_smp_processor_id())
  27. #endif
  28. #ifdef CONFIG_DEBUG_PREEMPT
  29. #define my_cpu_offset per_cpu_offset(smp_processor_id())
  30. #else
  31. #define my_cpu_offset __my_cpu_offset
  32. #endif
  33. /*
  34. * Arch may define arch_raw_cpu_ptr() to provide more efficient address
  35. * translations for raw_cpu_ptr().
  36. */
  37. #ifndef arch_raw_cpu_ptr
  38. #define arch_raw_cpu_ptr(ptr) SHIFT_PERCPU_PTR(ptr, __my_cpu_offset)
  39. #endif
  40. #ifdef CONFIG_HAVE_SETUP_PER_CPU_AREA
  41. extern void setup_per_cpu_areas(void);
  42. #endif
  43. #endif /* SMP */
  44. #ifndef PER_CPU_BASE_SECTION
  45. #ifdef CONFIG_SMP
  46. #define PER_CPU_BASE_SECTION ".data..percpu"
  47. #else
  48. #define PER_CPU_BASE_SECTION ".data"
  49. #endif
  50. #endif
  51. #ifndef PER_CPU_ATTRIBUTES
  52. #define PER_CPU_ATTRIBUTES
  53. #endif
  54. #define raw_cpu_generic_read(pcp) \
  55. ({ \
  56. *raw_cpu_ptr(&(pcp)); \
  57. })
  58. #define raw_cpu_generic_to_op(pcp, val, op) \
  59. do { \
  60. *raw_cpu_ptr(&(pcp)) op val; \
  61. } while (0)
  62. #define raw_cpu_generic_add_return(pcp, val) \
  63. ({ \
  64. typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
  65. \
  66. *__p += val; \
  67. *__p; \
  68. })
  69. #define raw_cpu_generic_xchg(pcp, nval) \
  70. ({ \
  71. typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
  72. typeof(pcp) __ret; \
  73. __ret = *__p; \
  74. *__p = nval; \
  75. __ret; \
  76. })
  77. #define raw_cpu_generic_cmpxchg(pcp, oval, nval) \
  78. ({ \
  79. typeof(pcp) *__p = raw_cpu_ptr(&(pcp)); \
  80. typeof(pcp) __ret; \
  81. __ret = *__p; \
  82. if (__ret == (oval)) \
  83. *__p = nval; \
  84. __ret; \
  85. })
  86. #define raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  87. ({ \
  88. typeof(pcp1) *__p1 = raw_cpu_ptr(&(pcp1)); \
  89. typeof(pcp2) *__p2 = raw_cpu_ptr(&(pcp2)); \
  90. int __ret = 0; \
  91. if (*__p1 == (oval1) && *__p2 == (oval2)) { \
  92. *__p1 = nval1; \
  93. *__p2 = nval2; \
  94. __ret = 1; \
  95. } \
  96. (__ret); \
  97. })
  98. #define __this_cpu_generic_read_nopreempt(pcp) \
  99. ({ \
  100. typeof(pcp) ___ret; \
  101. preempt_disable_notrace(); \
  102. ___ret = READ_ONCE(*raw_cpu_ptr(&(pcp))); \
  103. preempt_enable_notrace(); \
  104. ___ret; \
  105. })
  106. #define __this_cpu_generic_read_noirq(pcp) \
  107. ({ \
  108. typeof(pcp) ___ret; \
  109. unsigned long ___flags; \
  110. raw_local_irq_save(___flags); \
  111. ___ret = raw_cpu_generic_read(pcp); \
  112. raw_local_irq_restore(___flags); \
  113. ___ret; \
  114. })
  115. #define this_cpu_generic_read(pcp) \
  116. ({ \
  117. typeof(pcp) __ret; \
  118. if (__native_word(pcp)) \
  119. __ret = __this_cpu_generic_read_nopreempt(pcp); \
  120. else \
  121. __ret = __this_cpu_generic_read_noirq(pcp); \
  122. __ret; \
  123. })
  124. #define this_cpu_generic_to_op(pcp, val, op) \
  125. do { \
  126. unsigned long __flags; \
  127. raw_local_irq_save(__flags); \
  128. raw_cpu_generic_to_op(pcp, val, op); \
  129. raw_local_irq_restore(__flags); \
  130. } while (0)
  131. #define this_cpu_generic_add_return(pcp, val) \
  132. ({ \
  133. typeof(pcp) __ret; \
  134. unsigned long __flags; \
  135. raw_local_irq_save(__flags); \
  136. __ret = raw_cpu_generic_add_return(pcp, val); \
  137. raw_local_irq_restore(__flags); \
  138. __ret; \
  139. })
  140. #define this_cpu_generic_xchg(pcp, nval) \
  141. ({ \
  142. typeof(pcp) __ret; \
  143. unsigned long __flags; \
  144. raw_local_irq_save(__flags); \
  145. __ret = raw_cpu_generic_xchg(pcp, nval); \
  146. raw_local_irq_restore(__flags); \
  147. __ret; \
  148. })
  149. #define this_cpu_generic_cmpxchg(pcp, oval, nval) \
  150. ({ \
  151. typeof(pcp) __ret; \
  152. unsigned long __flags; \
  153. raw_local_irq_save(__flags); \
  154. __ret = raw_cpu_generic_cmpxchg(pcp, oval, nval); \
  155. raw_local_irq_restore(__flags); \
  156. __ret; \
  157. })
  158. #define this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  159. ({ \
  160. int __ret; \
  161. unsigned long __flags; \
  162. raw_local_irq_save(__flags); \
  163. __ret = raw_cpu_generic_cmpxchg_double(pcp1, pcp2, \
  164. oval1, oval2, nval1, nval2); \
  165. raw_local_irq_restore(__flags); \
  166. __ret; \
  167. })
  168. #ifndef raw_cpu_read_1
  169. #define raw_cpu_read_1(pcp) raw_cpu_generic_read(pcp)
  170. #endif
  171. #ifndef raw_cpu_read_2
  172. #define raw_cpu_read_2(pcp) raw_cpu_generic_read(pcp)
  173. #endif
  174. #ifndef raw_cpu_read_4
  175. #define raw_cpu_read_4(pcp) raw_cpu_generic_read(pcp)
  176. #endif
  177. #ifndef raw_cpu_read_8
  178. #define raw_cpu_read_8(pcp) raw_cpu_generic_read(pcp)
  179. #endif
  180. #ifndef raw_cpu_write_1
  181. #define raw_cpu_write_1(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  182. #endif
  183. #ifndef raw_cpu_write_2
  184. #define raw_cpu_write_2(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  185. #endif
  186. #ifndef raw_cpu_write_4
  187. #define raw_cpu_write_4(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  188. #endif
  189. #ifndef raw_cpu_write_8
  190. #define raw_cpu_write_8(pcp, val) raw_cpu_generic_to_op(pcp, val, =)
  191. #endif
  192. #ifndef raw_cpu_add_1
  193. #define raw_cpu_add_1(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  194. #endif
  195. #ifndef raw_cpu_add_2
  196. #define raw_cpu_add_2(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  197. #endif
  198. #ifndef raw_cpu_add_4
  199. #define raw_cpu_add_4(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  200. #endif
  201. #ifndef raw_cpu_add_8
  202. #define raw_cpu_add_8(pcp, val) raw_cpu_generic_to_op(pcp, val, +=)
  203. #endif
  204. #ifndef raw_cpu_and_1
  205. #define raw_cpu_and_1(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  206. #endif
  207. #ifndef raw_cpu_and_2
  208. #define raw_cpu_and_2(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  209. #endif
  210. #ifndef raw_cpu_and_4
  211. #define raw_cpu_and_4(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  212. #endif
  213. #ifndef raw_cpu_and_8
  214. #define raw_cpu_and_8(pcp, val) raw_cpu_generic_to_op(pcp, val, &=)
  215. #endif
  216. #ifndef raw_cpu_or_1
  217. #define raw_cpu_or_1(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  218. #endif
  219. #ifndef raw_cpu_or_2
  220. #define raw_cpu_or_2(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  221. #endif
  222. #ifndef raw_cpu_or_4
  223. #define raw_cpu_or_4(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  224. #endif
  225. #ifndef raw_cpu_or_8
  226. #define raw_cpu_or_8(pcp, val) raw_cpu_generic_to_op(pcp, val, |=)
  227. #endif
  228. #ifndef raw_cpu_add_return_1
  229. #define raw_cpu_add_return_1(pcp, val) raw_cpu_generic_add_return(pcp, val)
  230. #endif
  231. #ifndef raw_cpu_add_return_2
  232. #define raw_cpu_add_return_2(pcp, val) raw_cpu_generic_add_return(pcp, val)
  233. #endif
  234. #ifndef raw_cpu_add_return_4
  235. #define raw_cpu_add_return_4(pcp, val) raw_cpu_generic_add_return(pcp, val)
  236. #endif
  237. #ifndef raw_cpu_add_return_8
  238. #define raw_cpu_add_return_8(pcp, val) raw_cpu_generic_add_return(pcp, val)
  239. #endif
  240. #ifndef raw_cpu_xchg_1
  241. #define raw_cpu_xchg_1(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  242. #endif
  243. #ifndef raw_cpu_xchg_2
  244. #define raw_cpu_xchg_2(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  245. #endif
  246. #ifndef raw_cpu_xchg_4
  247. #define raw_cpu_xchg_4(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  248. #endif
  249. #ifndef raw_cpu_xchg_8
  250. #define raw_cpu_xchg_8(pcp, nval) raw_cpu_generic_xchg(pcp, nval)
  251. #endif
  252. #ifndef raw_cpu_cmpxchg_1
  253. #define raw_cpu_cmpxchg_1(pcp, oval, nval) \
  254. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  255. #endif
  256. #ifndef raw_cpu_cmpxchg_2
  257. #define raw_cpu_cmpxchg_2(pcp, oval, nval) \
  258. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  259. #endif
  260. #ifndef raw_cpu_cmpxchg_4
  261. #define raw_cpu_cmpxchg_4(pcp, oval, nval) \
  262. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  263. #endif
  264. #ifndef raw_cpu_cmpxchg_8
  265. #define raw_cpu_cmpxchg_8(pcp, oval, nval) \
  266. raw_cpu_generic_cmpxchg(pcp, oval, nval)
  267. #endif
  268. #ifndef raw_cpu_cmpxchg_double_1
  269. #define raw_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  270. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  271. #endif
  272. #ifndef raw_cpu_cmpxchg_double_2
  273. #define raw_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  274. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  275. #endif
  276. #ifndef raw_cpu_cmpxchg_double_4
  277. #define raw_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  278. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  279. #endif
  280. #ifndef raw_cpu_cmpxchg_double_8
  281. #define raw_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  282. raw_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  283. #endif
  284. #ifndef this_cpu_read_1
  285. #define this_cpu_read_1(pcp) this_cpu_generic_read(pcp)
  286. #endif
  287. #ifndef this_cpu_read_2
  288. #define this_cpu_read_2(pcp) this_cpu_generic_read(pcp)
  289. #endif
  290. #ifndef this_cpu_read_4
  291. #define this_cpu_read_4(pcp) this_cpu_generic_read(pcp)
  292. #endif
  293. #ifndef this_cpu_read_8
  294. #define this_cpu_read_8(pcp) this_cpu_generic_read(pcp)
  295. #endif
  296. #ifndef this_cpu_write_1
  297. #define this_cpu_write_1(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  298. #endif
  299. #ifndef this_cpu_write_2
  300. #define this_cpu_write_2(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  301. #endif
  302. #ifndef this_cpu_write_4
  303. #define this_cpu_write_4(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  304. #endif
  305. #ifndef this_cpu_write_8
  306. #define this_cpu_write_8(pcp, val) this_cpu_generic_to_op(pcp, val, =)
  307. #endif
  308. #ifndef this_cpu_add_1
  309. #define this_cpu_add_1(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  310. #endif
  311. #ifndef this_cpu_add_2
  312. #define this_cpu_add_2(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  313. #endif
  314. #ifndef this_cpu_add_4
  315. #define this_cpu_add_4(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  316. #endif
  317. #ifndef this_cpu_add_8
  318. #define this_cpu_add_8(pcp, val) this_cpu_generic_to_op(pcp, val, +=)
  319. #endif
  320. #ifndef this_cpu_and_1
  321. #define this_cpu_and_1(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  322. #endif
  323. #ifndef this_cpu_and_2
  324. #define this_cpu_and_2(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  325. #endif
  326. #ifndef this_cpu_and_4
  327. #define this_cpu_and_4(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  328. #endif
  329. #ifndef this_cpu_and_8
  330. #define this_cpu_and_8(pcp, val) this_cpu_generic_to_op(pcp, val, &=)
  331. #endif
  332. #ifndef this_cpu_or_1
  333. #define this_cpu_or_1(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  334. #endif
  335. #ifndef this_cpu_or_2
  336. #define this_cpu_or_2(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  337. #endif
  338. #ifndef this_cpu_or_4
  339. #define this_cpu_or_4(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  340. #endif
  341. #ifndef this_cpu_or_8
  342. #define this_cpu_or_8(pcp, val) this_cpu_generic_to_op(pcp, val, |=)
  343. #endif
  344. #ifndef this_cpu_add_return_1
  345. #define this_cpu_add_return_1(pcp, val) this_cpu_generic_add_return(pcp, val)
  346. #endif
  347. #ifndef this_cpu_add_return_2
  348. #define this_cpu_add_return_2(pcp, val) this_cpu_generic_add_return(pcp, val)
  349. #endif
  350. #ifndef this_cpu_add_return_4
  351. #define this_cpu_add_return_4(pcp, val) this_cpu_generic_add_return(pcp, val)
  352. #endif
  353. #ifndef this_cpu_add_return_8
  354. #define this_cpu_add_return_8(pcp, val) this_cpu_generic_add_return(pcp, val)
  355. #endif
  356. #ifndef this_cpu_xchg_1
  357. #define this_cpu_xchg_1(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  358. #endif
  359. #ifndef this_cpu_xchg_2
  360. #define this_cpu_xchg_2(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  361. #endif
  362. #ifndef this_cpu_xchg_4
  363. #define this_cpu_xchg_4(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  364. #endif
  365. #ifndef this_cpu_xchg_8
  366. #define this_cpu_xchg_8(pcp, nval) this_cpu_generic_xchg(pcp, nval)
  367. #endif
  368. #ifndef this_cpu_cmpxchg_1
  369. #define this_cpu_cmpxchg_1(pcp, oval, nval) \
  370. this_cpu_generic_cmpxchg(pcp, oval, nval)
  371. #endif
  372. #ifndef this_cpu_cmpxchg_2
  373. #define this_cpu_cmpxchg_2(pcp, oval, nval) \
  374. this_cpu_generic_cmpxchg(pcp, oval, nval)
  375. #endif
  376. #ifndef this_cpu_cmpxchg_4
  377. #define this_cpu_cmpxchg_4(pcp, oval, nval) \
  378. this_cpu_generic_cmpxchg(pcp, oval, nval)
  379. #endif
  380. #ifndef this_cpu_cmpxchg_8
  381. #define this_cpu_cmpxchg_8(pcp, oval, nval) \
  382. this_cpu_generic_cmpxchg(pcp, oval, nval)
  383. #endif
  384. #ifndef this_cpu_cmpxchg_double_1
  385. #define this_cpu_cmpxchg_double_1(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  386. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  387. #endif
  388. #ifndef this_cpu_cmpxchg_double_2
  389. #define this_cpu_cmpxchg_double_2(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  390. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  391. #endif
  392. #ifndef this_cpu_cmpxchg_double_4
  393. #define this_cpu_cmpxchg_double_4(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  394. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  395. #endif
  396. #ifndef this_cpu_cmpxchg_double_8
  397. #define this_cpu_cmpxchg_double_8(pcp1, pcp2, oval1, oval2, nval1, nval2) \
  398. this_cpu_generic_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2)
  399. #endif
  400. #endif /* _ASM_GENERIC_PERCPU_H_ */