r4k_fpu.S 9.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417
  1. /*
  2. * This file is subject to the terms and conditions of the GNU General Public
  3. * License. See the file "COPYING" in the main directory of this archive
  4. * for more details.
  5. *
  6. * Copyright (C) 1996, 98, 99, 2000, 01 Ralf Baechle
  7. *
  8. * Multi-arch abstraction and asm macros for easier reading:
  9. * Copyright (C) 1996 David S. Miller ([email protected])
  10. *
  11. * Carsten Langgaard, [email protected]
  12. * Copyright (C) 2000 MIPS Technologies, Inc.
  13. * Copyright (C) 1999, 2001 Silicon Graphics, Inc.
  14. */
  15. #include <asm/asm.h>
  16. #include <asm/asmmacro.h>
  17. #include <asm/errno.h>
  18. #include <asm/export.h>
  19. #include <asm/fpregdef.h>
  20. #include <asm/mipsregs.h>
  21. #include <asm/asm-offsets.h>
  22. #include <asm/regdef.h>
  23. /* preprocessor replaces the fp in ".set fp=64" with $30 otherwise */
  24. #undef fp
  25. .macro EX insn, reg, src
  26. .set push
  27. SET_HARDFLOAT
  28. .set nomacro
  29. .ex\@: \insn \reg, \src
  30. .set pop
  31. .section __ex_table,"a"
  32. PTR_WD .ex\@, fault
  33. .previous
  34. .endm
  35. /*
  36. * Save a thread's fp context.
  37. */
  38. LEAF(_save_fp)
  39. EXPORT_SYMBOL(_save_fp)
  40. #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2) || \
  41. defined(CONFIG_CPU_MIPSR5) || defined(CONFIG_CPU_MIPSR6)
  42. mfc0 t0, CP0_STATUS
  43. #endif
  44. fpu_save_double a0 t0 t1 # clobbers t1
  45. jr ra
  46. END(_save_fp)
  47. /*
  48. * Restore a thread's fp context.
  49. */
  50. LEAF(_restore_fp)
  51. #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2) || \
  52. defined(CONFIG_CPU_MIPSR5) || defined(CONFIG_CPU_MIPSR6)
  53. mfc0 t0, CP0_STATUS
  54. #endif
  55. fpu_restore_double a0 t0 t1 # clobbers t1
  56. jr ra
  57. END(_restore_fp)
  58. #ifdef CONFIG_CPU_HAS_MSA
  59. /*
  60. * Save a thread's MSA vector context.
  61. */
  62. LEAF(_save_msa)
  63. EXPORT_SYMBOL(_save_msa)
  64. msa_save_all a0
  65. jr ra
  66. END(_save_msa)
  67. /*
  68. * Restore a thread's MSA vector context.
  69. */
  70. LEAF(_restore_msa)
  71. msa_restore_all a0
  72. jr ra
  73. END(_restore_msa)
  74. LEAF(_init_msa_upper)
  75. msa_init_all_upper
  76. jr ra
  77. END(_init_msa_upper)
  78. #endif
  79. .set noreorder
  80. /**
  81. * _save_fp_context() - save FP context from the FPU
  82. * @a0 - pointer to fpregs field of sigcontext
  83. * @a1 - pointer to fpc_csr field of sigcontext
  84. *
  85. * Save FP context, including the 32 FP data registers and the FP
  86. * control & status register, from the FPU to signal context.
  87. */
  88. LEAF(_save_fp_context)
  89. .set push
  90. SET_HARDFLOAT
  91. cfc1 t1, fcr31
  92. .set pop
  93. #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2) || \
  94. defined(CONFIG_CPU_MIPSR5) || defined(CONFIG_CPU_MIPSR6)
  95. .set push
  96. SET_HARDFLOAT
  97. #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR5)
  98. .set mips32r2
  99. .set fp=64
  100. mfc0 t0, CP0_STATUS
  101. sll t0, t0, 5
  102. bgez t0, 1f # skip storing odd if FR=0
  103. nop
  104. #endif
  105. /* Store the 16 odd double precision registers */
  106. EX sdc1 $f1, 8(a0)
  107. EX sdc1 $f3, 24(a0)
  108. EX sdc1 $f5, 40(a0)
  109. EX sdc1 $f7, 56(a0)
  110. EX sdc1 $f9, 72(a0)
  111. EX sdc1 $f11, 88(a0)
  112. EX sdc1 $f13, 104(a0)
  113. EX sdc1 $f15, 120(a0)
  114. EX sdc1 $f17, 136(a0)
  115. EX sdc1 $f19, 152(a0)
  116. EX sdc1 $f21, 168(a0)
  117. EX sdc1 $f23, 184(a0)
  118. EX sdc1 $f25, 200(a0)
  119. EX sdc1 $f27, 216(a0)
  120. EX sdc1 $f29, 232(a0)
  121. EX sdc1 $f31, 248(a0)
  122. 1: .set pop
  123. #endif
  124. .set push
  125. SET_HARDFLOAT
  126. /* Store the 16 even double precision registers */
  127. EX sdc1 $f0, 0(a0)
  128. EX sdc1 $f2, 16(a0)
  129. EX sdc1 $f4, 32(a0)
  130. EX sdc1 $f6, 48(a0)
  131. EX sdc1 $f8, 64(a0)
  132. EX sdc1 $f10, 80(a0)
  133. EX sdc1 $f12, 96(a0)
  134. EX sdc1 $f14, 112(a0)
  135. EX sdc1 $f16, 128(a0)
  136. EX sdc1 $f18, 144(a0)
  137. EX sdc1 $f20, 160(a0)
  138. EX sdc1 $f22, 176(a0)
  139. EX sdc1 $f24, 192(a0)
  140. EX sdc1 $f26, 208(a0)
  141. EX sdc1 $f28, 224(a0)
  142. EX sdc1 $f30, 240(a0)
  143. EX sw t1, 0(a1)
  144. jr ra
  145. li v0, 0 # success
  146. .set pop
  147. END(_save_fp_context)
  148. /**
  149. * _restore_fp_context() - restore FP context to the FPU
  150. * @a0 - pointer to fpregs field of sigcontext
  151. * @a1 - pointer to fpc_csr field of sigcontext
  152. *
  153. * Restore FP context, including the 32 FP data registers and the FP
  154. * control & status register, from signal context to the FPU.
  155. */
  156. LEAF(_restore_fp_context)
  157. EX lw t1, 0(a1)
  158. #if defined(CONFIG_64BIT) || defined(CONFIG_CPU_MIPSR2) || \
  159. defined(CONFIG_CPU_MIPSR5) || defined(CONFIG_CPU_MIPSR6)
  160. .set push
  161. SET_HARDFLOAT
  162. #if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR5)
  163. .set mips32r2
  164. .set fp=64
  165. mfc0 t0, CP0_STATUS
  166. sll t0, t0, 5
  167. bgez t0, 1f # skip loading odd if FR=0
  168. nop
  169. #endif
  170. EX ldc1 $f1, 8(a0)
  171. EX ldc1 $f3, 24(a0)
  172. EX ldc1 $f5, 40(a0)
  173. EX ldc1 $f7, 56(a0)
  174. EX ldc1 $f9, 72(a0)
  175. EX ldc1 $f11, 88(a0)
  176. EX ldc1 $f13, 104(a0)
  177. EX ldc1 $f15, 120(a0)
  178. EX ldc1 $f17, 136(a0)
  179. EX ldc1 $f19, 152(a0)
  180. EX ldc1 $f21, 168(a0)
  181. EX ldc1 $f23, 184(a0)
  182. EX ldc1 $f25, 200(a0)
  183. EX ldc1 $f27, 216(a0)
  184. EX ldc1 $f29, 232(a0)
  185. EX ldc1 $f31, 248(a0)
  186. 1: .set pop
  187. #endif
  188. .set push
  189. SET_HARDFLOAT
  190. EX ldc1 $f0, 0(a0)
  191. EX ldc1 $f2, 16(a0)
  192. EX ldc1 $f4, 32(a0)
  193. EX ldc1 $f6, 48(a0)
  194. EX ldc1 $f8, 64(a0)
  195. EX ldc1 $f10, 80(a0)
  196. EX ldc1 $f12, 96(a0)
  197. EX ldc1 $f14, 112(a0)
  198. EX ldc1 $f16, 128(a0)
  199. EX ldc1 $f18, 144(a0)
  200. EX ldc1 $f20, 160(a0)
  201. EX ldc1 $f22, 176(a0)
  202. EX ldc1 $f24, 192(a0)
  203. EX ldc1 $f26, 208(a0)
  204. EX ldc1 $f28, 224(a0)
  205. EX ldc1 $f30, 240(a0)
  206. ctc1 t1, fcr31
  207. .set pop
  208. jr ra
  209. li v0, 0 # success
  210. END(_restore_fp_context)
  211. #ifdef CONFIG_CPU_HAS_MSA
  212. .macro op_one_wr op, idx, base
  213. .align 4
  214. \idx: \op \idx, 0, \base
  215. jr ra
  216. nop
  217. .endm
  218. .macro op_msa_wr name, op
  219. LEAF(\name)
  220. .set push
  221. .set noreorder
  222. sll t0, a0, 4
  223. PTR_LA t1, 0f
  224. PTR_ADDU t0, t0, t1
  225. jr t0
  226. nop
  227. op_one_wr \op, 0, a1
  228. op_one_wr \op, 1, a1
  229. op_one_wr \op, 2, a1
  230. op_one_wr \op, 3, a1
  231. op_one_wr \op, 4, a1
  232. op_one_wr \op, 5, a1
  233. op_one_wr \op, 6, a1
  234. op_one_wr \op, 7, a1
  235. op_one_wr \op, 8, a1
  236. op_one_wr \op, 9, a1
  237. op_one_wr \op, 10, a1
  238. op_one_wr \op, 11, a1
  239. op_one_wr \op, 12, a1
  240. op_one_wr \op, 13, a1
  241. op_one_wr \op, 14, a1
  242. op_one_wr \op, 15, a1
  243. op_one_wr \op, 16, a1
  244. op_one_wr \op, 17, a1
  245. op_one_wr \op, 18, a1
  246. op_one_wr \op, 19, a1
  247. op_one_wr \op, 20, a1
  248. op_one_wr \op, 21, a1
  249. op_one_wr \op, 22, a1
  250. op_one_wr \op, 23, a1
  251. op_one_wr \op, 24, a1
  252. op_one_wr \op, 25, a1
  253. op_one_wr \op, 26, a1
  254. op_one_wr \op, 27, a1
  255. op_one_wr \op, 28, a1
  256. op_one_wr \op, 29, a1
  257. op_one_wr \op, 30, a1
  258. op_one_wr \op, 31, a1
  259. .set pop
  260. END(\name)
  261. .endm
  262. op_msa_wr read_msa_wr_b, st_b
  263. op_msa_wr read_msa_wr_h, st_h
  264. op_msa_wr read_msa_wr_w, st_w
  265. op_msa_wr read_msa_wr_d, st_d
  266. op_msa_wr write_msa_wr_b, ld_b
  267. op_msa_wr write_msa_wr_h, ld_h
  268. op_msa_wr write_msa_wr_w, ld_w
  269. op_msa_wr write_msa_wr_d, ld_d
  270. #endif /* CONFIG_CPU_HAS_MSA */
  271. #ifdef CONFIG_CPU_HAS_MSA
  272. .macro save_msa_upper wr, off, base
  273. .set push
  274. .set noat
  275. #ifdef CONFIG_64BIT
  276. copy_s_d \wr, 1
  277. EX sd $1, \off(\base)
  278. #elif defined(CONFIG_CPU_LITTLE_ENDIAN)
  279. copy_s_w \wr, 2
  280. EX sw $1, \off(\base)
  281. copy_s_w \wr, 3
  282. EX sw $1, (\off+4)(\base)
  283. #else /* CONFIG_CPU_BIG_ENDIAN */
  284. copy_s_w \wr, 2
  285. EX sw $1, (\off+4)(\base)
  286. copy_s_w \wr, 3
  287. EX sw $1, \off(\base)
  288. #endif
  289. .set pop
  290. .endm
  291. LEAF(_save_msa_all_upper)
  292. save_msa_upper 0, 0x00, a0
  293. save_msa_upper 1, 0x08, a0
  294. save_msa_upper 2, 0x10, a0
  295. save_msa_upper 3, 0x18, a0
  296. save_msa_upper 4, 0x20, a0
  297. save_msa_upper 5, 0x28, a0
  298. save_msa_upper 6, 0x30, a0
  299. save_msa_upper 7, 0x38, a0
  300. save_msa_upper 8, 0x40, a0
  301. save_msa_upper 9, 0x48, a0
  302. save_msa_upper 10, 0x50, a0
  303. save_msa_upper 11, 0x58, a0
  304. save_msa_upper 12, 0x60, a0
  305. save_msa_upper 13, 0x68, a0
  306. save_msa_upper 14, 0x70, a0
  307. save_msa_upper 15, 0x78, a0
  308. save_msa_upper 16, 0x80, a0
  309. save_msa_upper 17, 0x88, a0
  310. save_msa_upper 18, 0x90, a0
  311. save_msa_upper 19, 0x98, a0
  312. save_msa_upper 20, 0xa0, a0
  313. save_msa_upper 21, 0xa8, a0
  314. save_msa_upper 22, 0xb0, a0
  315. save_msa_upper 23, 0xb8, a0
  316. save_msa_upper 24, 0xc0, a0
  317. save_msa_upper 25, 0xc8, a0
  318. save_msa_upper 26, 0xd0, a0
  319. save_msa_upper 27, 0xd8, a0
  320. save_msa_upper 28, 0xe0, a0
  321. save_msa_upper 29, 0xe8, a0
  322. save_msa_upper 30, 0xf0, a0
  323. save_msa_upper 31, 0xf8, a0
  324. jr ra
  325. li v0, 0
  326. END(_save_msa_all_upper)
  327. .macro restore_msa_upper wr, off, base
  328. .set push
  329. .set noat
  330. #ifdef CONFIG_64BIT
  331. EX ld $1, \off(\base)
  332. insert_d \wr, 1
  333. #elif defined(CONFIG_CPU_LITTLE_ENDIAN)
  334. EX lw $1, \off(\base)
  335. insert_w \wr, 2
  336. EX lw $1, (\off+4)(\base)
  337. insert_w \wr, 3
  338. #else /* CONFIG_CPU_BIG_ENDIAN */
  339. EX lw $1, (\off+4)(\base)
  340. insert_w \wr, 2
  341. EX lw $1, \off(\base)
  342. insert_w \wr, 3
  343. #endif
  344. .set pop
  345. .endm
  346. LEAF(_restore_msa_all_upper)
  347. restore_msa_upper 0, 0x00, a0
  348. restore_msa_upper 1, 0x08, a0
  349. restore_msa_upper 2, 0x10, a0
  350. restore_msa_upper 3, 0x18, a0
  351. restore_msa_upper 4, 0x20, a0
  352. restore_msa_upper 5, 0x28, a0
  353. restore_msa_upper 6, 0x30, a0
  354. restore_msa_upper 7, 0x38, a0
  355. restore_msa_upper 8, 0x40, a0
  356. restore_msa_upper 9, 0x48, a0
  357. restore_msa_upper 10, 0x50, a0
  358. restore_msa_upper 11, 0x58, a0
  359. restore_msa_upper 12, 0x60, a0
  360. restore_msa_upper 13, 0x68, a0
  361. restore_msa_upper 14, 0x70, a0
  362. restore_msa_upper 15, 0x78, a0
  363. restore_msa_upper 16, 0x80, a0
  364. restore_msa_upper 17, 0x88, a0
  365. restore_msa_upper 18, 0x90, a0
  366. restore_msa_upper 19, 0x98, a0
  367. restore_msa_upper 20, 0xa0, a0
  368. restore_msa_upper 21, 0xa8, a0
  369. restore_msa_upper 22, 0xb0, a0
  370. restore_msa_upper 23, 0xb8, a0
  371. restore_msa_upper 24, 0xc0, a0
  372. restore_msa_upper 25, 0xc8, a0
  373. restore_msa_upper 26, 0xd0, a0
  374. restore_msa_upper 27, 0xd8, a0
  375. restore_msa_upper 28, 0xe0, a0
  376. restore_msa_upper 29, 0xe8, a0
  377. restore_msa_upper 30, 0xf0, a0
  378. restore_msa_upper 31, 0xf8, a0
  379. jr ra
  380. li v0, 0
  381. END(_restore_msa_all_upper)
  382. #endif /* CONFIG_CPU_HAS_MSA */
  383. .set reorder
  384. .type fault, @function
  385. .ent fault
  386. fault: li v0, -EFAULT # failure
  387. jr ra
  388. .end fault