dcr-native.h 3.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * (c) Copyright 2006 Benjamin Herrenschmidt, IBM Corp.
  4. * <[email protected]>
  5. */
  6. #ifndef _ASM_POWERPC_DCR_NATIVE_H
  7. #define _ASM_POWERPC_DCR_NATIVE_H
  8. #ifdef __KERNEL__
  9. #ifndef __ASSEMBLY__
  10. #include <linux/spinlock.h>
  11. #include <asm/cputable.h>
  12. #include <asm/cpu_has_feature.h>
  13. #include <linux/stringify.h>
  14. typedef struct {
  15. unsigned int base;
  16. } dcr_host_native_t;
  17. static inline bool dcr_map_ok_native(dcr_host_native_t host)
  18. {
  19. return true;
  20. }
  21. #define dcr_map_native(dev, dcr_n, dcr_c) \
  22. ((dcr_host_native_t){ .base = (dcr_n) })
  23. #define dcr_unmap_native(host, dcr_c) do {} while (0)
  24. #define dcr_read_native(host, dcr_n) mfdcr(dcr_n + host.base)
  25. #define dcr_write_native(host, dcr_n, value) mtdcr(dcr_n + host.base, value)
  26. /* Table based DCR accessors */
  27. extern void __mtdcr(unsigned int reg, unsigned int val);
  28. extern unsigned int __mfdcr(unsigned int reg);
  29. /* mfdcrx/mtdcrx instruction based accessors. We hand code
  30. * the opcodes in order not to depend on newer binutils
  31. */
  32. static inline unsigned int mfdcrx(unsigned int reg)
  33. {
  34. unsigned int ret;
  35. asm volatile(".long 0x7c000206 | (%0 << 21) | (%1 << 16)"
  36. : "=r" (ret) : "r" (reg));
  37. return ret;
  38. }
  39. static inline void mtdcrx(unsigned int reg, unsigned int val)
  40. {
  41. asm volatile(".long 0x7c000306 | (%0 << 21) | (%1 << 16)"
  42. : : "r" (val), "r" (reg));
  43. }
  44. #define mfdcr(rn) \
  45. ({unsigned int rval; \
  46. if (__builtin_constant_p(rn) && rn < 1024) \
  47. asm volatile("mfdcr %0, %1" : "=r" (rval) \
  48. : "n" (rn)); \
  49. else if (likely(cpu_has_feature(CPU_FTR_INDEXED_DCR))) \
  50. rval = mfdcrx(rn); \
  51. else \
  52. rval = __mfdcr(rn); \
  53. rval;})
  54. #define mtdcr(rn, v) \
  55. do { \
  56. if (__builtin_constant_p(rn) && rn < 1024) \
  57. asm volatile("mtdcr %0, %1" \
  58. : : "n" (rn), "r" (v)); \
  59. else if (likely(cpu_has_feature(CPU_FTR_INDEXED_DCR))) \
  60. mtdcrx(rn, v); \
  61. else \
  62. __mtdcr(rn, v); \
  63. } while (0)
  64. /* R/W of indirect DCRs make use of standard naming conventions for DCRs */
  65. extern spinlock_t dcr_ind_lock;
  66. static inline unsigned __mfdcri(int base_addr, int base_data, int reg)
  67. {
  68. unsigned long flags;
  69. unsigned int val;
  70. spin_lock_irqsave(&dcr_ind_lock, flags);
  71. if (cpu_has_feature(CPU_FTR_INDEXED_DCR)) {
  72. mtdcrx(base_addr, reg);
  73. val = mfdcrx(base_data);
  74. } else {
  75. __mtdcr(base_addr, reg);
  76. val = __mfdcr(base_data);
  77. }
  78. spin_unlock_irqrestore(&dcr_ind_lock, flags);
  79. return val;
  80. }
  81. static inline void __mtdcri(int base_addr, int base_data, int reg,
  82. unsigned val)
  83. {
  84. unsigned long flags;
  85. spin_lock_irqsave(&dcr_ind_lock, flags);
  86. if (cpu_has_feature(CPU_FTR_INDEXED_DCR)) {
  87. mtdcrx(base_addr, reg);
  88. mtdcrx(base_data, val);
  89. } else {
  90. __mtdcr(base_addr, reg);
  91. __mtdcr(base_data, val);
  92. }
  93. spin_unlock_irqrestore(&dcr_ind_lock, flags);
  94. }
  95. static inline void __dcri_clrset(int base_addr, int base_data, int reg,
  96. unsigned clr, unsigned set)
  97. {
  98. unsigned long flags;
  99. unsigned int val;
  100. spin_lock_irqsave(&dcr_ind_lock, flags);
  101. if (cpu_has_feature(CPU_FTR_INDEXED_DCR)) {
  102. mtdcrx(base_addr, reg);
  103. val = (mfdcrx(base_data) & ~clr) | set;
  104. mtdcrx(base_data, val);
  105. } else {
  106. __mtdcr(base_addr, reg);
  107. val = (__mfdcr(base_data) & ~clr) | set;
  108. __mtdcr(base_data, val);
  109. }
  110. spin_unlock_irqrestore(&dcr_ind_lock, flags);
  111. }
  112. #define mfdcri(base, reg) __mfdcri(DCRN_ ## base ## _CONFIG_ADDR, \
  113. DCRN_ ## base ## _CONFIG_DATA, \
  114. reg)
  115. #define mtdcri(base, reg, data) __mtdcri(DCRN_ ## base ## _CONFIG_ADDR, \
  116. DCRN_ ## base ## _CONFIG_DATA, \
  117. reg, data)
  118. #define dcri_clrset(base, reg, clr, set) __dcri_clrset(DCRN_ ## base ## _CONFIG_ADDR, \
  119. DCRN_ ## base ## _CONFIG_DATA, \
  120. reg, clr, set)
  121. #endif /* __ASSEMBLY__ */
  122. #endif /* __KERNEL__ */
  123. #endif /* _ASM_POWERPC_DCR_NATIVE_H */