msa.h 7.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * Copyright (C) 2013 Imagination Technologies
  4. * Author: Paul Burton <[email protected]>
  5. */
  6. #ifndef _ASM_MSA_H
  7. #define _ASM_MSA_H
  8. #include <asm/mipsregs.h>
  9. #ifndef __ASSEMBLY__
  10. #include <asm/inst.h>
  11. extern void _save_msa(struct task_struct *);
  12. extern void _restore_msa(struct task_struct *);
  13. extern void _init_msa_upper(void);
  14. extern void read_msa_wr_b(unsigned idx, union fpureg *to);
  15. extern void read_msa_wr_h(unsigned idx, union fpureg *to);
  16. extern void read_msa_wr_w(unsigned idx, union fpureg *to);
  17. extern void read_msa_wr_d(unsigned idx, union fpureg *to);
  18. /**
  19. * read_msa_wr() - Read a single MSA vector register
  20. * @idx: The index of the vector register to read
  21. * @to: The FPU register union to store the registers value in
  22. * @fmt: The format of the data in the vector register
  23. *
  24. * Read the value of MSA vector register idx into the FPU register
  25. * union to, using the format fmt.
  26. */
  27. static inline void read_msa_wr(unsigned idx, union fpureg *to,
  28. enum msa_2b_fmt fmt)
  29. {
  30. switch (fmt) {
  31. case msa_fmt_b:
  32. read_msa_wr_b(idx, to);
  33. break;
  34. case msa_fmt_h:
  35. read_msa_wr_h(idx, to);
  36. break;
  37. case msa_fmt_w:
  38. read_msa_wr_w(idx, to);
  39. break;
  40. case msa_fmt_d:
  41. read_msa_wr_d(idx, to);
  42. break;
  43. default:
  44. BUG();
  45. }
  46. }
  47. extern void write_msa_wr_b(unsigned idx, union fpureg *from);
  48. extern void write_msa_wr_h(unsigned idx, union fpureg *from);
  49. extern void write_msa_wr_w(unsigned idx, union fpureg *from);
  50. extern void write_msa_wr_d(unsigned idx, union fpureg *from);
  51. /**
  52. * write_msa_wr() - Write a single MSA vector register
  53. * @idx: The index of the vector register to write
  54. * @from: The FPU register union to take the registers value from
  55. * @fmt: The format of the data in the vector register
  56. *
  57. * Write the value from the FPU register union from into MSA vector
  58. * register idx, using the format fmt.
  59. */
  60. static inline void write_msa_wr(unsigned idx, union fpureg *from,
  61. enum msa_2b_fmt fmt)
  62. {
  63. switch (fmt) {
  64. case msa_fmt_b:
  65. write_msa_wr_b(idx, from);
  66. break;
  67. case msa_fmt_h:
  68. write_msa_wr_h(idx, from);
  69. break;
  70. case msa_fmt_w:
  71. write_msa_wr_w(idx, from);
  72. break;
  73. case msa_fmt_d:
  74. write_msa_wr_d(idx, from);
  75. break;
  76. default:
  77. BUG();
  78. }
  79. }
  80. static inline void enable_msa(void)
  81. {
  82. if (cpu_has_msa) {
  83. set_c0_config5(MIPS_CONF5_MSAEN);
  84. enable_fpu_hazard();
  85. }
  86. }
  87. static inline void disable_msa(void)
  88. {
  89. if (cpu_has_msa) {
  90. clear_c0_config5(MIPS_CONF5_MSAEN);
  91. disable_fpu_hazard();
  92. }
  93. }
  94. static inline int is_msa_enabled(void)
  95. {
  96. if (!cpu_has_msa)
  97. return 0;
  98. return read_c0_config5() & MIPS_CONF5_MSAEN;
  99. }
  100. static inline int thread_msa_context_live(void)
  101. {
  102. /*
  103. * Check cpu_has_msa only if it's a constant. This will allow the
  104. * compiler to optimise out code for CPUs without MSA without adding
  105. * an extra redundant check for CPUs with MSA.
  106. */
  107. if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
  108. return 0;
  109. return test_thread_flag(TIF_MSA_CTX_LIVE);
  110. }
  111. static inline void save_msa(struct task_struct *t)
  112. {
  113. if (cpu_has_msa)
  114. _save_msa(t);
  115. }
  116. static inline void restore_msa(struct task_struct *t)
  117. {
  118. if (cpu_has_msa)
  119. _restore_msa(t);
  120. }
  121. static inline void init_msa_upper(void)
  122. {
  123. /*
  124. * Check cpu_has_msa only if it's a constant. This will allow the
  125. * compiler to optimise out code for CPUs without MSA without adding
  126. * an extra redundant check for CPUs with MSA.
  127. */
  128. if (__builtin_constant_p(cpu_has_msa) && !cpu_has_msa)
  129. return;
  130. _init_msa_upper();
  131. }
  132. #ifndef TOOLCHAIN_SUPPORTS_MSA
  133. /*
  134. * Define assembler macros using .word for the c[ft]cmsa instructions in order
  135. * to allow compilation with toolchains that do not support MSA. Once all
  136. * toolchains in use support MSA these can be removed.
  137. */
  138. #define _ASM_SET_CFCMSA \
  139. _ASM_MACRO_2R(cfcmsa, rd, cs, \
  140. _ASM_INSN_IF_MIPS(0x787e0019 | __cs << 11 | __rd << 6) \
  141. _ASM_INSN32_IF_MM(0x587e0016 | __cs << 11 | __rd << 6))
  142. #define _ASM_UNSET_CFCMSA ".purgem cfcmsa\n\t"
  143. #define _ASM_SET_CTCMSA \
  144. _ASM_MACRO_2R(ctcmsa, cd, rs, \
  145. _ASM_INSN_IF_MIPS(0x783e0019 | __rs << 11 | __cd << 6) \
  146. _ASM_INSN32_IF_MM(0x583e0016 | __rs << 11 | __cd << 6))
  147. #define _ASM_UNSET_CTCMSA ".purgem ctcmsa\n\t"
  148. #else /* TOOLCHAIN_SUPPORTS_MSA */
  149. #define _ASM_SET_CFCMSA \
  150. ".set\tfp=64\n\t" \
  151. ".set\tmsa\n\t"
  152. #define _ASM_UNSET_CFCMSA
  153. #define _ASM_SET_CTCMSA \
  154. ".set\tfp=64\n\t" \
  155. ".set\tmsa\n\t"
  156. #define _ASM_UNSET_CTCMSA
  157. #endif
  158. #define __BUILD_MSA_CTL_REG(name, cs) \
  159. static inline unsigned int read_msa_##name(void) \
  160. { \
  161. unsigned int reg; \
  162. __asm__ __volatile__( \
  163. " .set push\n" \
  164. _ASM_SET_CFCMSA \
  165. " cfcmsa %0, $" #cs "\n" \
  166. _ASM_UNSET_CFCMSA \
  167. " .set pop\n" \
  168. : "=r"(reg)); \
  169. return reg; \
  170. } \
  171. \
  172. static inline void write_msa_##name(unsigned int val) \
  173. { \
  174. __asm__ __volatile__( \
  175. " .set push\n" \
  176. _ASM_SET_CTCMSA \
  177. " ctcmsa $" #cs ", %0\n" \
  178. _ASM_UNSET_CTCMSA \
  179. " .set pop\n" \
  180. : : "r"(val)); \
  181. }
  182. __BUILD_MSA_CTL_REG(ir, 0)
  183. __BUILD_MSA_CTL_REG(csr, 1)
  184. __BUILD_MSA_CTL_REG(access, 2)
  185. __BUILD_MSA_CTL_REG(save, 3)
  186. __BUILD_MSA_CTL_REG(modify, 4)
  187. __BUILD_MSA_CTL_REG(request, 5)
  188. __BUILD_MSA_CTL_REG(map, 6)
  189. __BUILD_MSA_CTL_REG(unmap, 7)
  190. #endif /* !__ASSEMBLY__ */
  191. #define MSA_IR 0
  192. #define MSA_CSR 1
  193. #define MSA_ACCESS 2
  194. #define MSA_SAVE 3
  195. #define MSA_MODIFY 4
  196. #define MSA_REQUEST 5
  197. #define MSA_MAP 6
  198. #define MSA_UNMAP 7
  199. /* MSA Implementation Register (MSAIR) */
  200. #define MSA_IR_REVB 0
  201. #define MSA_IR_REVF (_ULCAST_(0xff) << MSA_IR_REVB)
  202. #define MSA_IR_PROCB 8
  203. #define MSA_IR_PROCF (_ULCAST_(0xff) << MSA_IR_PROCB)
  204. #define MSA_IR_WRPB 16
  205. #define MSA_IR_WRPF (_ULCAST_(0x1) << MSA_IR_WRPB)
  206. /* MSA Control & Status Register (MSACSR) */
  207. #define MSA_CSR_RMB 0
  208. #define MSA_CSR_RMF (_ULCAST_(0x3) << MSA_CSR_RMB)
  209. #define MSA_CSR_RM_NEAREST 0
  210. #define MSA_CSR_RM_TO_ZERO 1
  211. #define MSA_CSR_RM_TO_POS 2
  212. #define MSA_CSR_RM_TO_NEG 3
  213. #define MSA_CSR_FLAGSB 2
  214. #define MSA_CSR_FLAGSF (_ULCAST_(0x1f) << MSA_CSR_FLAGSB)
  215. #define MSA_CSR_FLAGS_IB 2
  216. #define MSA_CSR_FLAGS_IF (_ULCAST_(0x1) << MSA_CSR_FLAGS_IB)
  217. #define MSA_CSR_FLAGS_UB 3
  218. #define MSA_CSR_FLAGS_UF (_ULCAST_(0x1) << MSA_CSR_FLAGS_UB)
  219. #define MSA_CSR_FLAGS_OB 4
  220. #define MSA_CSR_FLAGS_OF (_ULCAST_(0x1) << MSA_CSR_FLAGS_OB)
  221. #define MSA_CSR_FLAGS_ZB 5
  222. #define MSA_CSR_FLAGS_ZF (_ULCAST_(0x1) << MSA_CSR_FLAGS_ZB)
  223. #define MSA_CSR_FLAGS_VB 6
  224. #define MSA_CSR_FLAGS_VF (_ULCAST_(0x1) << MSA_CSR_FLAGS_VB)
  225. #define MSA_CSR_ENABLESB 7
  226. #define MSA_CSR_ENABLESF (_ULCAST_(0x1f) << MSA_CSR_ENABLESB)
  227. #define MSA_CSR_ENABLES_IB 7
  228. #define MSA_CSR_ENABLES_IF (_ULCAST_(0x1) << MSA_CSR_ENABLES_IB)
  229. #define MSA_CSR_ENABLES_UB 8
  230. #define MSA_CSR_ENABLES_UF (_ULCAST_(0x1) << MSA_CSR_ENABLES_UB)
  231. #define MSA_CSR_ENABLES_OB 9
  232. #define MSA_CSR_ENABLES_OF (_ULCAST_(0x1) << MSA_CSR_ENABLES_OB)
  233. #define MSA_CSR_ENABLES_ZB 10
  234. #define MSA_CSR_ENABLES_ZF (_ULCAST_(0x1) << MSA_CSR_ENABLES_ZB)
  235. #define MSA_CSR_ENABLES_VB 11
  236. #define MSA_CSR_ENABLES_VF (_ULCAST_(0x1) << MSA_CSR_ENABLES_VB)
  237. #define MSA_CSR_CAUSEB 12
  238. #define MSA_CSR_CAUSEF (_ULCAST_(0x3f) << MSA_CSR_CAUSEB)
  239. #define MSA_CSR_CAUSE_IB 12
  240. #define MSA_CSR_CAUSE_IF (_ULCAST_(0x1) << MSA_CSR_CAUSE_IB)
  241. #define MSA_CSR_CAUSE_UB 13
  242. #define MSA_CSR_CAUSE_UF (_ULCAST_(0x1) << MSA_CSR_CAUSE_UB)
  243. #define MSA_CSR_CAUSE_OB 14
  244. #define MSA_CSR_CAUSE_OF (_ULCAST_(0x1) << MSA_CSR_CAUSE_OB)
  245. #define MSA_CSR_CAUSE_ZB 15
  246. #define MSA_CSR_CAUSE_ZF (_ULCAST_(0x1) << MSA_CSR_CAUSE_ZB)
  247. #define MSA_CSR_CAUSE_VB 16
  248. #define MSA_CSR_CAUSE_VF (_ULCAST_(0x1) << MSA_CSR_CAUSE_VB)
  249. #define MSA_CSR_CAUSE_EB 17
  250. #define MSA_CSR_CAUSE_EF (_ULCAST_(0x1) << MSA_CSR_CAUSE_EB)
  251. #define MSA_CSR_NXB 18
  252. #define MSA_CSR_NXF (_ULCAST_(0x1) << MSA_CSR_NXB)
  253. #define MSA_CSR_FSB 24
  254. #define MSA_CSR_FSF (_ULCAST_(0x1) << MSA_CSR_FSB)
  255. #endif /* _ASM_MSA_H */