pacache.S 31 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * PARISC TLB and cache flushing support
  4. * Copyright (C) 2000-2001 Hewlett-Packard (John Marvin)
  5. * Copyright (C) 2001 Matthew Wilcox (willy at parisc-linux.org)
  6. * Copyright (C) 2002 Richard Hirst (rhirst with parisc-linux.org)
  7. */
  8. /*
  9. * NOTE: fdc,fic, and pdc instructions that use base register modification
  10. * should only use index and base registers that are not shadowed,
  11. * so that the fast path emulation in the non access miss handler
  12. * can be used.
  13. */
  14. #ifdef CONFIG_64BIT
  15. .level 2.0w
  16. #else
  17. .level 2.0
  18. #endif
  19. #include <asm/psw.h>
  20. #include <asm/assembly.h>
  21. #include <asm/cache.h>
  22. #include <asm/ldcw.h>
  23. #include <asm/alternative.h>
  24. #include <linux/linkage.h>
  25. #include <linux/init.h>
  26. #include <linux/pgtable.h>
  27. .section .text.hot
  28. .align 16
  29. ENTRY_CFI(flush_tlb_all_local)
  30. /*
  31. * The pitlbe and pdtlbe instructions should only be used to
  32. * flush the entire tlb. Also, there needs to be no intervening
  33. * tlb operations, e.g. tlb misses, so the operation needs
  34. * to happen in real mode with all interruptions disabled.
  35. */
  36. /* pcxt_ssm_bug - relied upon translation! PA 2.0 Arch. F-4 and F-5 */
  37. rsm PSW_SM_I, %r19 /* save I-bit state */
  38. load32 PA(1f), %r1
  39. nop
  40. nop
  41. nop
  42. nop
  43. nop
  44. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  45. mtctl %r0, %cr17 /* Clear IIASQ tail */
  46. mtctl %r0, %cr17 /* Clear IIASQ head */
  47. mtctl %r1, %cr18 /* IIAOQ head */
  48. ldo 4(%r1), %r1
  49. mtctl %r1, %cr18 /* IIAOQ tail */
  50. load32 REAL_MODE_PSW, %r1
  51. mtctl %r1, %ipsw
  52. rfi
  53. nop
  54. 1: load32 PA(cache_info), %r1
  55. /* Flush Instruction Tlb */
  56. 88: LDREG ITLB_SID_BASE(%r1), %r20
  57. LDREG ITLB_SID_STRIDE(%r1), %r21
  58. LDREG ITLB_SID_COUNT(%r1), %r22
  59. LDREG ITLB_OFF_BASE(%r1), %arg0
  60. LDREG ITLB_OFF_STRIDE(%r1), %arg1
  61. LDREG ITLB_OFF_COUNT(%r1), %arg2
  62. LDREG ITLB_LOOP(%r1), %arg3
  63. addib,COND(=) -1, %arg3, fitoneloop /* Preadjust and test */
  64. movb,<,n %arg3, %r31, fitdone /* If loop < 0, skip */
  65. copy %arg0, %r28 /* Init base addr */
  66. fitmanyloop: /* Loop if LOOP >= 2 */
  67. mtsp %r20, %sr1
  68. add %r21, %r20, %r20 /* increment space */
  69. copy %arg2, %r29 /* Init middle loop count */
  70. fitmanymiddle: /* Loop if LOOP >= 2 */
  71. addib,COND(>) -1, %r31, fitmanymiddle /* Adjusted inner loop decr */
  72. pitlbe %r0(%sr1, %r28)
  73. pitlbe,m %arg1(%sr1, %r28) /* Last pitlbe and addr adjust */
  74. addib,COND(>) -1, %r29, fitmanymiddle /* Middle loop decr */
  75. copy %arg3, %r31 /* Re-init inner loop count */
  76. movb,tr %arg0, %r28, fitmanyloop /* Re-init base addr */
  77. addib,COND(<=),n -1, %r22, fitdone /* Outer loop count decr */
  78. fitoneloop: /* Loop if LOOP = 1 */
  79. mtsp %r20, %sr1
  80. copy %arg0, %r28 /* init base addr */
  81. copy %arg2, %r29 /* init middle loop count */
  82. fitonemiddle: /* Loop if LOOP = 1 */
  83. addib,COND(>) -1, %r29, fitonemiddle /* Middle loop count decr */
  84. pitlbe,m %arg1(%sr1, %r28) /* pitlbe for one loop */
  85. addib,COND(>) -1, %r22, fitoneloop /* Outer loop count decr */
  86. add %r21, %r20, %r20 /* increment space */
  87. fitdone:
  88. ALTERNATIVE(88b, fitdone, ALT_COND_NO_SPLIT_TLB, INSN_NOP)
  89. /* Flush Data Tlb */
  90. LDREG DTLB_SID_BASE(%r1), %r20
  91. LDREG DTLB_SID_STRIDE(%r1), %r21
  92. LDREG DTLB_SID_COUNT(%r1), %r22
  93. LDREG DTLB_OFF_BASE(%r1), %arg0
  94. LDREG DTLB_OFF_STRIDE(%r1), %arg1
  95. LDREG DTLB_OFF_COUNT(%r1), %arg2
  96. LDREG DTLB_LOOP(%r1), %arg3
  97. addib,COND(=) -1, %arg3, fdtoneloop /* Preadjust and test */
  98. movb,<,n %arg3, %r31, fdtdone /* If loop < 0, skip */
  99. copy %arg0, %r28 /* Init base addr */
  100. fdtmanyloop: /* Loop if LOOP >= 2 */
  101. mtsp %r20, %sr1
  102. add %r21, %r20, %r20 /* increment space */
  103. copy %arg2, %r29 /* Init middle loop count */
  104. fdtmanymiddle: /* Loop if LOOP >= 2 */
  105. addib,COND(>) -1, %r31, fdtmanymiddle /* Adjusted inner loop decr */
  106. pdtlbe %r0(%sr1, %r28)
  107. pdtlbe,m %arg1(%sr1, %r28) /* Last pdtlbe and addr adjust */
  108. addib,COND(>) -1, %r29, fdtmanymiddle /* Middle loop decr */
  109. copy %arg3, %r31 /* Re-init inner loop count */
  110. movb,tr %arg0, %r28, fdtmanyloop /* Re-init base addr */
  111. addib,COND(<=),n -1, %r22,fdtdone /* Outer loop count decr */
  112. fdtoneloop: /* Loop if LOOP = 1 */
  113. mtsp %r20, %sr1
  114. copy %arg0, %r28 /* init base addr */
  115. copy %arg2, %r29 /* init middle loop count */
  116. fdtonemiddle: /* Loop if LOOP = 1 */
  117. addib,COND(>) -1, %r29, fdtonemiddle /* Middle loop count decr */
  118. pdtlbe,m %arg1(%sr1, %r28) /* pdtlbe for one loop */
  119. addib,COND(>) -1, %r22, fdtoneloop /* Outer loop count decr */
  120. add %r21, %r20, %r20 /* increment space */
  121. fdtdone:
  122. /*
  123. * Switch back to virtual mode
  124. */
  125. /* pcxt_ssm_bug */
  126. rsm PSW_SM_I, %r0
  127. load32 2f, %r1
  128. nop
  129. nop
  130. nop
  131. nop
  132. nop
  133. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  134. mtctl %r0, %cr17 /* Clear IIASQ tail */
  135. mtctl %r0, %cr17 /* Clear IIASQ head */
  136. mtctl %r1, %cr18 /* IIAOQ head */
  137. ldo 4(%r1), %r1
  138. mtctl %r1, %cr18 /* IIAOQ tail */
  139. load32 KERNEL_PSW, %r1
  140. or %r1, %r19, %r1 /* I-bit to state on entry */
  141. mtctl %r1, %ipsw /* restore I-bit (entire PSW) */
  142. rfi
  143. nop
  144. 2: bv %r0(%r2)
  145. nop
  146. /*
  147. * When running in qemu, drop whole flush_tlb_all_local function and
  148. * replace by one pdtlbe instruction, for which QEMU will drop all
  149. * local TLB entries.
  150. */
  151. 3: pdtlbe %r0(%sr1,%r0)
  152. bv,n %r0(%r2)
  153. ALTERNATIVE_CODE(flush_tlb_all_local, 2, ALT_COND_RUN_ON_QEMU, 3b)
  154. ENDPROC_CFI(flush_tlb_all_local)
  155. .import cache_info,data
  156. ENTRY_CFI(flush_instruction_cache_local)
  157. 88: load32 cache_info, %r1
  158. /* Flush Instruction Cache */
  159. LDREG ICACHE_BASE(%r1), %arg0
  160. LDREG ICACHE_STRIDE(%r1), %arg1
  161. LDREG ICACHE_COUNT(%r1), %arg2
  162. LDREG ICACHE_LOOP(%r1), %arg3
  163. rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
  164. mtsp %r0, %sr1
  165. addib,COND(=) -1, %arg3, fioneloop /* Preadjust and test */
  166. movb,<,n %arg3, %r31, fisync /* If loop < 0, do sync */
  167. fimanyloop: /* Loop if LOOP >= 2 */
  168. addib,COND(>) -1, %r31, fimanyloop /* Adjusted inner loop decr */
  169. fice %r0(%sr1, %arg0)
  170. fice,m %arg1(%sr1, %arg0) /* Last fice and addr adjust */
  171. movb,tr %arg3, %r31, fimanyloop /* Re-init inner loop count */
  172. addib,COND(<=),n -1, %arg2, fisync /* Outer loop decr */
  173. fioneloop: /* Loop if LOOP = 1 */
  174. /* Some implementations may flush with a single fice instruction */
  175. cmpib,COND(>>=),n 15, %arg2, fioneloop2
  176. fioneloop1:
  177. fice,m %arg1(%sr1, %arg0)
  178. fice,m %arg1(%sr1, %arg0)
  179. fice,m %arg1(%sr1, %arg0)
  180. fice,m %arg1(%sr1, %arg0)
  181. fice,m %arg1(%sr1, %arg0)
  182. fice,m %arg1(%sr1, %arg0)
  183. fice,m %arg1(%sr1, %arg0)
  184. fice,m %arg1(%sr1, %arg0)
  185. fice,m %arg1(%sr1, %arg0)
  186. fice,m %arg1(%sr1, %arg0)
  187. fice,m %arg1(%sr1, %arg0)
  188. fice,m %arg1(%sr1, %arg0)
  189. fice,m %arg1(%sr1, %arg0)
  190. fice,m %arg1(%sr1, %arg0)
  191. fice,m %arg1(%sr1, %arg0)
  192. addib,COND(>) -16, %arg2, fioneloop1
  193. fice,m %arg1(%sr1, %arg0)
  194. /* Check if done */
  195. cmpb,COND(=),n %arg2, %r0, fisync /* Predict branch taken */
  196. fioneloop2:
  197. addib,COND(>) -1, %arg2, fioneloop2 /* Outer loop count decr */
  198. fice,m %arg1(%sr1, %arg0) /* Fice for one loop */
  199. fisync:
  200. sync
  201. mtsm %r22 /* restore I-bit */
  202. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
  203. bv %r0(%r2)
  204. nop
  205. ENDPROC_CFI(flush_instruction_cache_local)
  206. .import cache_info, data
  207. ENTRY_CFI(flush_data_cache_local)
  208. 88: load32 cache_info, %r1
  209. /* Flush Data Cache */
  210. LDREG DCACHE_BASE(%r1), %arg0
  211. LDREG DCACHE_STRIDE(%r1), %arg1
  212. LDREG DCACHE_COUNT(%r1), %arg2
  213. LDREG DCACHE_LOOP(%r1), %arg3
  214. rsm PSW_SM_I, %r22 /* No mmgt ops during loop*/
  215. mtsp %r0, %sr1
  216. addib,COND(=) -1, %arg3, fdoneloop /* Preadjust and test */
  217. movb,<,n %arg3, %r31, fdsync /* If loop < 0, do sync */
  218. fdmanyloop: /* Loop if LOOP >= 2 */
  219. addib,COND(>) -1, %r31, fdmanyloop /* Adjusted inner loop decr */
  220. fdce %r0(%sr1, %arg0)
  221. fdce,m %arg1(%sr1, %arg0) /* Last fdce and addr adjust */
  222. movb,tr %arg3, %r31, fdmanyloop /* Re-init inner loop count */
  223. addib,COND(<=),n -1, %arg2, fdsync /* Outer loop decr */
  224. fdoneloop: /* Loop if LOOP = 1 */
  225. /* Some implementations may flush with a single fdce instruction */
  226. cmpib,COND(>>=),n 15, %arg2, fdoneloop2
  227. fdoneloop1:
  228. fdce,m %arg1(%sr1, %arg0)
  229. fdce,m %arg1(%sr1, %arg0)
  230. fdce,m %arg1(%sr1, %arg0)
  231. fdce,m %arg1(%sr1, %arg0)
  232. fdce,m %arg1(%sr1, %arg0)
  233. fdce,m %arg1(%sr1, %arg0)
  234. fdce,m %arg1(%sr1, %arg0)
  235. fdce,m %arg1(%sr1, %arg0)
  236. fdce,m %arg1(%sr1, %arg0)
  237. fdce,m %arg1(%sr1, %arg0)
  238. fdce,m %arg1(%sr1, %arg0)
  239. fdce,m %arg1(%sr1, %arg0)
  240. fdce,m %arg1(%sr1, %arg0)
  241. fdce,m %arg1(%sr1, %arg0)
  242. fdce,m %arg1(%sr1, %arg0)
  243. addib,COND(>) -16, %arg2, fdoneloop1
  244. fdce,m %arg1(%sr1, %arg0)
  245. /* Check if done */
  246. cmpb,COND(=),n %arg2, %r0, fdsync /* Predict branch taken */
  247. fdoneloop2:
  248. addib,COND(>) -1, %arg2, fdoneloop2 /* Outer loop count decr */
  249. fdce,m %arg1(%sr1, %arg0) /* Fdce for one loop */
  250. fdsync:
  251. sync
  252. mtsm %r22 /* restore I-bit */
  253. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  254. bv %r0(%r2)
  255. nop
  256. ENDPROC_CFI(flush_data_cache_local)
  257. /* Clear page using kernel mapping. */
  258. ENTRY_CFI(clear_page_asm)
  259. #ifdef CONFIG_64BIT
  260. /* Unroll the loop. */
  261. ldi (PAGE_SIZE / 128), %r1
  262. 1:
  263. std %r0, 0(%r26)
  264. std %r0, 8(%r26)
  265. std %r0, 16(%r26)
  266. std %r0, 24(%r26)
  267. std %r0, 32(%r26)
  268. std %r0, 40(%r26)
  269. std %r0, 48(%r26)
  270. std %r0, 56(%r26)
  271. std %r0, 64(%r26)
  272. std %r0, 72(%r26)
  273. std %r0, 80(%r26)
  274. std %r0, 88(%r26)
  275. std %r0, 96(%r26)
  276. std %r0, 104(%r26)
  277. std %r0, 112(%r26)
  278. std %r0, 120(%r26)
  279. /* Note reverse branch hint for addib is taken. */
  280. addib,COND(>),n -1, %r1, 1b
  281. ldo 128(%r26), %r26
  282. #else
  283. /*
  284. * Note that until (if) we start saving the full 64-bit register
  285. * values on interrupt, we can't use std on a 32 bit kernel.
  286. */
  287. ldi (PAGE_SIZE / 64), %r1
  288. 1:
  289. stw %r0, 0(%r26)
  290. stw %r0, 4(%r26)
  291. stw %r0, 8(%r26)
  292. stw %r0, 12(%r26)
  293. stw %r0, 16(%r26)
  294. stw %r0, 20(%r26)
  295. stw %r0, 24(%r26)
  296. stw %r0, 28(%r26)
  297. stw %r0, 32(%r26)
  298. stw %r0, 36(%r26)
  299. stw %r0, 40(%r26)
  300. stw %r0, 44(%r26)
  301. stw %r0, 48(%r26)
  302. stw %r0, 52(%r26)
  303. stw %r0, 56(%r26)
  304. stw %r0, 60(%r26)
  305. addib,COND(>),n -1, %r1, 1b
  306. ldo 64(%r26), %r26
  307. #endif
  308. bv %r0(%r2)
  309. nop
  310. ENDPROC_CFI(clear_page_asm)
  311. /* Copy page using kernel mapping. */
  312. ENTRY_CFI(copy_page_asm)
  313. #ifdef CONFIG_64BIT
  314. /* PA8x00 CPUs can consume 2 loads or 1 store per cycle.
  315. * Unroll the loop by hand and arrange insn appropriately.
  316. * Prefetch doesn't improve performance on rp3440.
  317. * GCC probably can do this just as well...
  318. */
  319. ldi (PAGE_SIZE / 128), %r1
  320. 1: ldd 0(%r25), %r19
  321. ldd 8(%r25), %r20
  322. ldd 16(%r25), %r21
  323. ldd 24(%r25), %r22
  324. std %r19, 0(%r26)
  325. std %r20, 8(%r26)
  326. ldd 32(%r25), %r19
  327. ldd 40(%r25), %r20
  328. std %r21, 16(%r26)
  329. std %r22, 24(%r26)
  330. ldd 48(%r25), %r21
  331. ldd 56(%r25), %r22
  332. std %r19, 32(%r26)
  333. std %r20, 40(%r26)
  334. ldd 64(%r25), %r19
  335. ldd 72(%r25), %r20
  336. std %r21, 48(%r26)
  337. std %r22, 56(%r26)
  338. ldd 80(%r25), %r21
  339. ldd 88(%r25), %r22
  340. std %r19, 64(%r26)
  341. std %r20, 72(%r26)
  342. ldd 96(%r25), %r19
  343. ldd 104(%r25), %r20
  344. std %r21, 80(%r26)
  345. std %r22, 88(%r26)
  346. ldd 112(%r25), %r21
  347. ldd 120(%r25), %r22
  348. ldo 128(%r25), %r25
  349. std %r19, 96(%r26)
  350. std %r20, 104(%r26)
  351. std %r21, 112(%r26)
  352. std %r22, 120(%r26)
  353. /* Note reverse branch hint for addib is taken. */
  354. addib,COND(>),n -1, %r1, 1b
  355. ldo 128(%r26), %r26
  356. #else
  357. /*
  358. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  359. * bundles (very restricted rules for bundling).
  360. * Note that until (if) we start saving
  361. * the full 64 bit register values on interrupt, we can't
  362. * use ldd/std on a 32 bit kernel.
  363. */
  364. ldw 0(%r25), %r19
  365. ldi (PAGE_SIZE / 64), %r1
  366. 1:
  367. ldw 4(%r25), %r20
  368. ldw 8(%r25), %r21
  369. ldw 12(%r25), %r22
  370. stw %r19, 0(%r26)
  371. stw %r20, 4(%r26)
  372. stw %r21, 8(%r26)
  373. stw %r22, 12(%r26)
  374. ldw 16(%r25), %r19
  375. ldw 20(%r25), %r20
  376. ldw 24(%r25), %r21
  377. ldw 28(%r25), %r22
  378. stw %r19, 16(%r26)
  379. stw %r20, 20(%r26)
  380. stw %r21, 24(%r26)
  381. stw %r22, 28(%r26)
  382. ldw 32(%r25), %r19
  383. ldw 36(%r25), %r20
  384. ldw 40(%r25), %r21
  385. ldw 44(%r25), %r22
  386. stw %r19, 32(%r26)
  387. stw %r20, 36(%r26)
  388. stw %r21, 40(%r26)
  389. stw %r22, 44(%r26)
  390. ldw 48(%r25), %r19
  391. ldw 52(%r25), %r20
  392. ldw 56(%r25), %r21
  393. ldw 60(%r25), %r22
  394. stw %r19, 48(%r26)
  395. stw %r20, 52(%r26)
  396. ldo 64(%r25), %r25
  397. stw %r21, 56(%r26)
  398. stw %r22, 60(%r26)
  399. ldo 64(%r26), %r26
  400. addib,COND(>),n -1, %r1, 1b
  401. ldw 0(%r25), %r19
  402. #endif
  403. bv %r0(%r2)
  404. nop
  405. ENDPROC_CFI(copy_page_asm)
  406. /*
  407. * NOTE: Code in clear_user_page has a hard coded dependency on the
  408. * maximum alias boundary being 4 Mb. We've been assured by the
  409. * parisc chip designers that there will not ever be a parisc
  410. * chip with a larger alias boundary (Never say never :-) ).
  411. *
  412. * Yah, what about the PA8800 and PA8900 processors?
  413. *
  414. * Subtle: the dtlb miss handlers support the temp alias region by
  415. * "knowing" that if a dtlb miss happens within the temp alias
  416. * region it must have occurred while in clear_user_page. Since
  417. * this routine makes use of processor local translations, we
  418. * don't want to insert them into the kernel page table. Instead,
  419. * we load up some general registers (they need to be registers
  420. * which aren't shadowed) with the physical page numbers (preshifted
  421. * for tlb insertion) needed to insert the translations. When we
  422. * miss on the translation, the dtlb miss handler inserts the
  423. * translation into the tlb using these values:
  424. *
  425. * %r26 physical address of "to" translation
  426. * %r23 physical address of "from" translation
  427. */
  428. /*
  429. * copy_user_page_asm() performs a page copy using mappings
  430. * equivalent to the user page mappings. It can be used to
  431. * implement copy_user_page() but unfortunately both the `from'
  432. * and `to' pages need to be flushed through mappings equivalent
  433. * to the user mappings after the copy because the kernel accesses
  434. * the `from' page through the kmap kernel mapping and the `to'
  435. * page needs to be flushed since code can be copied. As a
  436. * result, this implementation is less efficient than the simpler
  437. * copy using the kernel mapping. It only needs the `from' page
  438. * to flushed via the user mapping. The kunmap routines handle
  439. * the flushes needed for the kernel mapping.
  440. *
  441. * I'm still keeping this around because it may be possible to
  442. * use it if more information is passed into copy_user_page().
  443. * Have to do some measurements to see if it is worthwhile to
  444. * lobby for such a change.
  445. *
  446. */
  447. ENTRY_CFI(copy_user_page_asm)
  448. /* Convert virtual `to' and `from' addresses to physical addresses.
  449. Move `from' physical address to non shadowed register. */
  450. ldil L%(__PAGE_OFFSET), %r1
  451. sub %r26, %r1, %r26
  452. sub %r25, %r1, %r23
  453. ldil L%(TMPALIAS_MAP_START), %r28
  454. dep_safe %r24, 31,TMPALIAS_SIZE_BITS, %r28 /* Form aliased virtual address 'to' */
  455. depi_safe 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  456. copy %r28, %r29
  457. depi_safe 1, 31-TMPALIAS_SIZE_BITS,1, %r29 /* Form aliased virtual address 'from' */
  458. /* Purge any old translations */
  459. #ifdef CONFIG_PA20
  460. pdtlb,l %r0(%r28)
  461. pdtlb,l %r0(%r29)
  462. #else
  463. 0: pdtlb %r0(%r28)
  464. 1: pdtlb %r0(%r29)
  465. ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  466. ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  467. #endif
  468. #ifdef CONFIG_64BIT
  469. /* PA8x00 CPUs can consume 2 loads or 1 store per cycle.
  470. * Unroll the loop by hand and arrange insn appropriately.
  471. * GCC probably can do this just as well.
  472. */
  473. ldd 0(%r29), %r19
  474. ldi (PAGE_SIZE / 128), %r1
  475. 1: ldd 8(%r29), %r20
  476. ldd 16(%r29), %r21
  477. ldd 24(%r29), %r22
  478. std %r19, 0(%r28)
  479. std %r20, 8(%r28)
  480. ldd 32(%r29), %r19
  481. ldd 40(%r29), %r20
  482. std %r21, 16(%r28)
  483. std %r22, 24(%r28)
  484. ldd 48(%r29), %r21
  485. ldd 56(%r29), %r22
  486. std %r19, 32(%r28)
  487. std %r20, 40(%r28)
  488. ldd 64(%r29), %r19
  489. ldd 72(%r29), %r20
  490. std %r21, 48(%r28)
  491. std %r22, 56(%r28)
  492. ldd 80(%r29), %r21
  493. ldd 88(%r29), %r22
  494. std %r19, 64(%r28)
  495. std %r20, 72(%r28)
  496. ldd 96(%r29), %r19
  497. ldd 104(%r29), %r20
  498. std %r21, 80(%r28)
  499. std %r22, 88(%r28)
  500. ldd 112(%r29), %r21
  501. ldd 120(%r29), %r22
  502. std %r19, 96(%r28)
  503. std %r20, 104(%r28)
  504. ldo 128(%r29), %r29
  505. std %r21, 112(%r28)
  506. std %r22, 120(%r28)
  507. ldo 128(%r28), %r28
  508. /* conditional branches nullify on forward taken branch, and on
  509. * non-taken backward branch. Note that .+4 is a backwards branch.
  510. * The ldd should only get executed if the branch is taken.
  511. */
  512. addib,COND(>),n -1, %r1, 1b /* bundle 10 */
  513. ldd 0(%r29), %r19 /* start next loads */
  514. #else
  515. ldi (PAGE_SIZE / 64), %r1
  516. /*
  517. * This loop is optimized for PCXL/PCXL2 ldw/ldw and stw/stw
  518. * bundles (very restricted rules for bundling). It probably
  519. * does OK on PCXU and better, but we could do better with
  520. * ldd/std instructions. Note that until (if) we start saving
  521. * the full 64 bit register values on interrupt, we can't
  522. * use ldd/std on a 32 bit kernel.
  523. */
  524. 1: ldw 0(%r29), %r19
  525. ldw 4(%r29), %r20
  526. ldw 8(%r29), %r21
  527. ldw 12(%r29), %r22
  528. stw %r19, 0(%r28)
  529. stw %r20, 4(%r28)
  530. stw %r21, 8(%r28)
  531. stw %r22, 12(%r28)
  532. ldw 16(%r29), %r19
  533. ldw 20(%r29), %r20
  534. ldw 24(%r29), %r21
  535. ldw 28(%r29), %r22
  536. stw %r19, 16(%r28)
  537. stw %r20, 20(%r28)
  538. stw %r21, 24(%r28)
  539. stw %r22, 28(%r28)
  540. ldw 32(%r29), %r19
  541. ldw 36(%r29), %r20
  542. ldw 40(%r29), %r21
  543. ldw 44(%r29), %r22
  544. stw %r19, 32(%r28)
  545. stw %r20, 36(%r28)
  546. stw %r21, 40(%r28)
  547. stw %r22, 44(%r28)
  548. ldw 48(%r29), %r19
  549. ldw 52(%r29), %r20
  550. ldw 56(%r29), %r21
  551. ldw 60(%r29), %r22
  552. stw %r19, 48(%r28)
  553. stw %r20, 52(%r28)
  554. stw %r21, 56(%r28)
  555. stw %r22, 60(%r28)
  556. ldo 64(%r28), %r28
  557. addib,COND(>) -1, %r1,1b
  558. ldo 64(%r29), %r29
  559. #endif
  560. bv %r0(%r2)
  561. nop
  562. ENDPROC_CFI(copy_user_page_asm)
  563. ENTRY_CFI(clear_user_page_asm)
  564. tophys_r1 %r26
  565. ldil L%(TMPALIAS_MAP_START), %r28
  566. dep_safe %r25, 31,TMPALIAS_SIZE_BITS, %r28 /* Form aliased virtual address 'to' */
  567. depi_safe 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  568. /* Purge any old translation */
  569. #ifdef CONFIG_PA20
  570. pdtlb,l %r0(%r28)
  571. #else
  572. 0: pdtlb %r0(%r28)
  573. ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  574. #endif
  575. #ifdef CONFIG_64BIT
  576. ldi (PAGE_SIZE / 128), %r1
  577. /* PREFETCH (Write) has not (yet) been proven to help here */
  578. /* #define PREFETCHW_OP ldd 256(%0), %r0 */
  579. 1: std %r0, 0(%r28)
  580. std %r0, 8(%r28)
  581. std %r0, 16(%r28)
  582. std %r0, 24(%r28)
  583. std %r0, 32(%r28)
  584. std %r0, 40(%r28)
  585. std %r0, 48(%r28)
  586. std %r0, 56(%r28)
  587. std %r0, 64(%r28)
  588. std %r0, 72(%r28)
  589. std %r0, 80(%r28)
  590. std %r0, 88(%r28)
  591. std %r0, 96(%r28)
  592. std %r0, 104(%r28)
  593. std %r0, 112(%r28)
  594. std %r0, 120(%r28)
  595. addib,COND(>) -1, %r1, 1b
  596. ldo 128(%r28), %r28
  597. #else /* ! CONFIG_64BIT */
  598. ldi (PAGE_SIZE / 64), %r1
  599. 1: stw %r0, 0(%r28)
  600. stw %r0, 4(%r28)
  601. stw %r0, 8(%r28)
  602. stw %r0, 12(%r28)
  603. stw %r0, 16(%r28)
  604. stw %r0, 20(%r28)
  605. stw %r0, 24(%r28)
  606. stw %r0, 28(%r28)
  607. stw %r0, 32(%r28)
  608. stw %r0, 36(%r28)
  609. stw %r0, 40(%r28)
  610. stw %r0, 44(%r28)
  611. stw %r0, 48(%r28)
  612. stw %r0, 52(%r28)
  613. stw %r0, 56(%r28)
  614. stw %r0, 60(%r28)
  615. addib,COND(>) -1, %r1, 1b
  616. ldo 64(%r28), %r28
  617. #endif /* CONFIG_64BIT */
  618. bv %r0(%r2)
  619. nop
  620. ENDPROC_CFI(clear_user_page_asm)
  621. ENTRY_CFI(flush_dcache_page_asm)
  622. ldil L%(TMPALIAS_MAP_START), %r28
  623. dep_safe %r25, 31,TMPALIAS_SIZE_BITS, %r28 /* Form aliased virtual address 'to' */
  624. depi_safe 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  625. /* Purge any old translation */
  626. #ifdef CONFIG_PA20
  627. pdtlb,l %r0(%r28)
  628. #else
  629. 0: pdtlb %r0(%r28)
  630. ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  631. #endif
  632. 88: ldil L%dcache_stride, %r1
  633. ldw R%dcache_stride(%r1), r31
  634. #ifdef CONFIG_64BIT
  635. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  636. #else
  637. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  638. #endif
  639. add %r28, %r25, %r25
  640. sub %r25, r31, %r25
  641. 1: fdc,m r31(%r28)
  642. fdc,m r31(%r28)
  643. fdc,m r31(%r28)
  644. fdc,m r31(%r28)
  645. fdc,m r31(%r28)
  646. fdc,m r31(%r28)
  647. fdc,m r31(%r28)
  648. fdc,m r31(%r28)
  649. fdc,m r31(%r28)
  650. fdc,m r31(%r28)
  651. fdc,m r31(%r28)
  652. fdc,m r31(%r28)
  653. fdc,m r31(%r28)
  654. fdc,m r31(%r28)
  655. fdc,m r31(%r28)
  656. cmpb,COND(>>) %r25, %r28, 1b /* predict taken */
  657. fdc,m r31(%r28)
  658. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  659. sync
  660. bv %r0(%r2)
  661. nop
  662. ENDPROC_CFI(flush_dcache_page_asm)
  663. ENTRY_CFI(purge_dcache_page_asm)
  664. ldil L%(TMPALIAS_MAP_START), %r28
  665. dep_safe %r25, 31,TMPALIAS_SIZE_BITS, %r28 /* Form aliased virtual address 'to' */
  666. depi_safe 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  667. /* Purge any old translation */
  668. #ifdef CONFIG_PA20
  669. pdtlb,l %r0(%r28)
  670. #else
  671. 0: pdtlb %r0(%r28)
  672. ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  673. #endif
  674. 88: ldil L%dcache_stride, %r1
  675. ldw R%dcache_stride(%r1), r31
  676. #ifdef CONFIG_64BIT
  677. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  678. #else
  679. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  680. #endif
  681. add %r28, %r25, %r25
  682. sub %r25, r31, %r25
  683. 1: pdc,m r31(%r28)
  684. pdc,m r31(%r28)
  685. pdc,m r31(%r28)
  686. pdc,m r31(%r28)
  687. pdc,m r31(%r28)
  688. pdc,m r31(%r28)
  689. pdc,m r31(%r28)
  690. pdc,m r31(%r28)
  691. pdc,m r31(%r28)
  692. pdc,m r31(%r28)
  693. pdc,m r31(%r28)
  694. pdc,m r31(%r28)
  695. pdc,m r31(%r28)
  696. pdc,m r31(%r28)
  697. pdc,m r31(%r28)
  698. cmpb,COND(>>) %r25, %r28, 1b /* predict taken */
  699. pdc,m r31(%r28)
  700. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  701. sync
  702. bv %r0(%r2)
  703. nop
  704. ENDPROC_CFI(purge_dcache_page_asm)
  705. ENTRY_CFI(flush_icache_page_asm)
  706. ldil L%(TMPALIAS_MAP_START), %r28
  707. dep_safe %r25, 31,TMPALIAS_SIZE_BITS, %r28 /* Form aliased virtual address 'to' */
  708. depi_safe 0, 31,PAGE_SHIFT, %r28 /* Clear any offset bits */
  709. /* Purge any old translation. Note that the FIC instruction
  710. * may use either the instruction or data TLB. Given that we
  711. * have a flat address space, it's not clear which TLB will be
  712. * used. So, we purge both entries. */
  713. #ifdef CONFIG_PA20
  714. pdtlb,l %r0(%r28)
  715. 1: pitlb,l %r0(%sr4,%r28)
  716. ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SPLIT_TLB, INSN_NOP)
  717. #else
  718. 0: pdtlb %r0(%r28)
  719. 1: pitlb %r0(%sr4,%r28)
  720. ALTERNATIVE(0b, 0b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  721. ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SMP, INSN_PxTLB)
  722. ALTERNATIVE(1b, 1b+4, ALT_COND_NO_SPLIT_TLB, INSN_NOP)
  723. #endif
  724. 88: ldil L%icache_stride, %r1
  725. ldw R%icache_stride(%r1), %r31
  726. #ifdef CONFIG_64BIT
  727. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  728. #else
  729. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  730. #endif
  731. add %r28, %r25, %r25
  732. sub %r25, %r31, %r25
  733. /* fic only has the type 26 form on PA1.1, requiring an
  734. * explicit space specification, so use %sr4 */
  735. 1: fic,m %r31(%sr4,%r28)
  736. fic,m %r31(%sr4,%r28)
  737. fic,m %r31(%sr4,%r28)
  738. fic,m %r31(%sr4,%r28)
  739. fic,m %r31(%sr4,%r28)
  740. fic,m %r31(%sr4,%r28)
  741. fic,m %r31(%sr4,%r28)
  742. fic,m %r31(%sr4,%r28)
  743. fic,m %r31(%sr4,%r28)
  744. fic,m %r31(%sr4,%r28)
  745. fic,m %r31(%sr4,%r28)
  746. fic,m %r31(%sr4,%r28)
  747. fic,m %r31(%sr4,%r28)
  748. fic,m %r31(%sr4,%r28)
  749. fic,m %r31(%sr4,%r28)
  750. cmpb,COND(>>) %r25, %r28, 1b /* predict taken */
  751. fic,m %r31(%sr4,%r28)
  752. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
  753. sync
  754. bv %r0(%r2)
  755. nop
  756. ENDPROC_CFI(flush_icache_page_asm)
  757. ENTRY_CFI(flush_kernel_dcache_page_asm)
  758. 88: ldil L%dcache_stride, %r1
  759. ldw R%dcache_stride(%r1), %r23
  760. depi_safe 0, 31,PAGE_SHIFT, %r26 /* Clear any offset bits */
  761. #ifdef CONFIG_64BIT
  762. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  763. #else
  764. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  765. #endif
  766. add %r26, %r25, %r25
  767. sub %r25, %r23, %r25
  768. 1: fdc,m %r23(%r26)
  769. fdc,m %r23(%r26)
  770. fdc,m %r23(%r26)
  771. fdc,m %r23(%r26)
  772. fdc,m %r23(%r26)
  773. fdc,m %r23(%r26)
  774. fdc,m %r23(%r26)
  775. fdc,m %r23(%r26)
  776. fdc,m %r23(%r26)
  777. fdc,m %r23(%r26)
  778. fdc,m %r23(%r26)
  779. fdc,m %r23(%r26)
  780. fdc,m %r23(%r26)
  781. fdc,m %r23(%r26)
  782. fdc,m %r23(%r26)
  783. cmpb,COND(>>) %r25, %r26, 1b /* predict taken */
  784. fdc,m %r23(%r26)
  785. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  786. sync
  787. bv %r0(%r2)
  788. nop
  789. ENDPROC_CFI(flush_kernel_dcache_page_asm)
  790. ENTRY_CFI(purge_kernel_dcache_page_asm)
  791. 88: ldil L%dcache_stride, %r1
  792. ldw R%dcache_stride(%r1), %r23
  793. depi_safe 0, 31,PAGE_SHIFT, %r26 /* Clear any offset bits */
  794. #ifdef CONFIG_64BIT
  795. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  796. #else
  797. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  798. #endif
  799. add %r26, %r25, %r25
  800. sub %r25, %r23, %r25
  801. 1: pdc,m %r23(%r26)
  802. pdc,m %r23(%r26)
  803. pdc,m %r23(%r26)
  804. pdc,m %r23(%r26)
  805. pdc,m %r23(%r26)
  806. pdc,m %r23(%r26)
  807. pdc,m %r23(%r26)
  808. pdc,m %r23(%r26)
  809. pdc,m %r23(%r26)
  810. pdc,m %r23(%r26)
  811. pdc,m %r23(%r26)
  812. pdc,m %r23(%r26)
  813. pdc,m %r23(%r26)
  814. pdc,m %r23(%r26)
  815. pdc,m %r23(%r26)
  816. cmpb,COND(>>) %r25, %r26, 1b /* predict taken */
  817. pdc,m %r23(%r26)
  818. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  819. sync
  820. bv %r0(%r2)
  821. nop
  822. ENDPROC_CFI(purge_kernel_dcache_page_asm)
  823. ENTRY_CFI(flush_user_dcache_range_asm)
  824. 88: ldil L%dcache_stride, %r1
  825. ldw R%dcache_stride(%r1), %r23
  826. ldo -1(%r23), %r21
  827. ANDCM %r26, %r21, %r26
  828. #ifdef CONFIG_64BIT
  829. depd,z %r23, 59, 60, %r21
  830. #else
  831. depw,z %r23, 27, 28, %r21
  832. #endif
  833. add %r26, %r21, %r22
  834. cmpb,COND(>>),n %r22, %r25, 2f /* predict not taken */
  835. 1: add %r22, %r21, %r22
  836. fdc,m %r23(%sr3, %r26)
  837. fdc,m %r23(%sr3, %r26)
  838. fdc,m %r23(%sr3, %r26)
  839. fdc,m %r23(%sr3, %r26)
  840. fdc,m %r23(%sr3, %r26)
  841. fdc,m %r23(%sr3, %r26)
  842. fdc,m %r23(%sr3, %r26)
  843. fdc,m %r23(%sr3, %r26)
  844. fdc,m %r23(%sr3, %r26)
  845. fdc,m %r23(%sr3, %r26)
  846. fdc,m %r23(%sr3, %r26)
  847. fdc,m %r23(%sr3, %r26)
  848. fdc,m %r23(%sr3, %r26)
  849. fdc,m %r23(%sr3, %r26)
  850. fdc,m %r23(%sr3, %r26)
  851. cmpb,COND(<<=) %r22, %r25, 1b /* predict taken */
  852. fdc,m %r23(%sr3, %r26)
  853. 2: cmpb,COND(>>),n %r25, %r26, 2b
  854. fdc,m %r23(%sr3, %r26)
  855. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  856. sync
  857. bv %r0(%r2)
  858. nop
  859. ENDPROC_CFI(flush_user_dcache_range_asm)
  860. ENTRY_CFI(flush_kernel_dcache_range_asm)
  861. 88: ldil L%dcache_stride, %r1
  862. ldw R%dcache_stride(%r1), %r23
  863. ldo -1(%r23), %r21
  864. ANDCM %r26, %r21, %r26
  865. #ifdef CONFIG_64BIT
  866. depd,z %r23, 59, 60, %r21
  867. #else
  868. depw,z %r23, 27, 28, %r21
  869. #endif
  870. add %r26, %r21, %r22
  871. cmpb,COND(>>),n %r22, %r25, 2f /* predict not taken */
  872. 1: add %r22, %r21, %r22
  873. fdc,m %r23(%r26)
  874. fdc,m %r23(%r26)
  875. fdc,m %r23(%r26)
  876. fdc,m %r23(%r26)
  877. fdc,m %r23(%r26)
  878. fdc,m %r23(%r26)
  879. fdc,m %r23(%r26)
  880. fdc,m %r23(%r26)
  881. fdc,m %r23(%r26)
  882. fdc,m %r23(%r26)
  883. fdc,m %r23(%r26)
  884. fdc,m %r23(%r26)
  885. fdc,m %r23(%r26)
  886. fdc,m %r23(%r26)
  887. fdc,m %r23(%r26)
  888. cmpb,COND(<<=) %r22, %r25, 1b /* predict taken */
  889. fdc,m %r23(%r26)
  890. 2: cmpb,COND(>>),n %r25, %r26, 2b /* predict taken */
  891. fdc,m %r23(%r26)
  892. sync
  893. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  894. bv %r0(%r2)
  895. nop
  896. ENDPROC_CFI(flush_kernel_dcache_range_asm)
  897. ENTRY_CFI(purge_kernel_dcache_range_asm)
  898. 88: ldil L%dcache_stride, %r1
  899. ldw R%dcache_stride(%r1), %r23
  900. ldo -1(%r23), %r21
  901. ANDCM %r26, %r21, %r26
  902. #ifdef CONFIG_64BIT
  903. depd,z %r23, 59, 60, %r21
  904. #else
  905. depw,z %r23, 27, 28, %r21
  906. #endif
  907. add %r26, %r21, %r22
  908. cmpb,COND(>>),n %r22, %r25, 2f /* predict not taken */
  909. 1: add %r22, %r21, %r22
  910. pdc,m %r23(%r26)
  911. pdc,m %r23(%r26)
  912. pdc,m %r23(%r26)
  913. pdc,m %r23(%r26)
  914. pdc,m %r23(%r26)
  915. pdc,m %r23(%r26)
  916. pdc,m %r23(%r26)
  917. pdc,m %r23(%r26)
  918. pdc,m %r23(%r26)
  919. pdc,m %r23(%r26)
  920. pdc,m %r23(%r26)
  921. pdc,m %r23(%r26)
  922. pdc,m %r23(%r26)
  923. pdc,m %r23(%r26)
  924. pdc,m %r23(%r26)
  925. cmpb,COND(<<=) %r22, %r25, 1b /* predict taken */
  926. pdc,m %r23(%r26)
  927. 2: cmpb,COND(>>),n %r25, %r26, 2b /* predict taken */
  928. pdc,m %r23(%r26)
  929. sync
  930. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_DCACHE, INSN_NOP)
  931. bv %r0(%r2)
  932. nop
  933. ENDPROC_CFI(purge_kernel_dcache_range_asm)
  934. ENTRY_CFI(flush_user_icache_range_asm)
  935. 88: ldil L%icache_stride, %r1
  936. ldw R%icache_stride(%r1), %r23
  937. ldo -1(%r23), %r21
  938. ANDCM %r26, %r21, %r26
  939. #ifdef CONFIG_64BIT
  940. depd,z %r23, 59, 60, %r21
  941. #else
  942. depw,z %r23, 27, 28, %r21
  943. #endif
  944. add %r26, %r21, %r22
  945. cmpb,COND(>>),n %r22, %r25, 2f /* predict not taken */
  946. 1: add %r22, %r21, %r22
  947. fic,m %r23(%sr3, %r26)
  948. fic,m %r23(%sr3, %r26)
  949. fic,m %r23(%sr3, %r26)
  950. fic,m %r23(%sr3, %r26)
  951. fic,m %r23(%sr3, %r26)
  952. fic,m %r23(%sr3, %r26)
  953. fic,m %r23(%sr3, %r26)
  954. fic,m %r23(%sr3, %r26)
  955. fic,m %r23(%sr3, %r26)
  956. fic,m %r23(%sr3, %r26)
  957. fic,m %r23(%sr3, %r26)
  958. fic,m %r23(%sr3, %r26)
  959. fic,m %r23(%sr3, %r26)
  960. fic,m %r23(%sr3, %r26)
  961. fic,m %r23(%sr3, %r26)
  962. cmpb,COND(<<=) %r22, %r25, 1b /* predict taken */
  963. fic,m %r23(%sr3, %r26)
  964. 2: cmpb,COND(>>),n %r25, %r26, 2b
  965. fic,m %r23(%sr3, %r26)
  966. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
  967. sync
  968. bv %r0(%r2)
  969. nop
  970. ENDPROC_CFI(flush_user_icache_range_asm)
  971. ENTRY_CFI(flush_kernel_icache_page)
  972. 88: ldil L%icache_stride, %r1
  973. ldw R%icache_stride(%r1), %r23
  974. #ifdef CONFIG_64BIT
  975. depdi,z 1, 63-PAGE_SHIFT,1, %r25
  976. #else
  977. depwi,z 1, 31-PAGE_SHIFT,1, %r25
  978. #endif
  979. add %r26, %r25, %r25
  980. sub %r25, %r23, %r25
  981. 1: fic,m %r23(%sr4, %r26)
  982. fic,m %r23(%sr4, %r26)
  983. fic,m %r23(%sr4, %r26)
  984. fic,m %r23(%sr4, %r26)
  985. fic,m %r23(%sr4, %r26)
  986. fic,m %r23(%sr4, %r26)
  987. fic,m %r23(%sr4, %r26)
  988. fic,m %r23(%sr4, %r26)
  989. fic,m %r23(%sr4, %r26)
  990. fic,m %r23(%sr4, %r26)
  991. fic,m %r23(%sr4, %r26)
  992. fic,m %r23(%sr4, %r26)
  993. fic,m %r23(%sr4, %r26)
  994. fic,m %r23(%sr4, %r26)
  995. fic,m %r23(%sr4, %r26)
  996. cmpb,COND(>>) %r25, %r26, 1b /* predict taken */
  997. fic,m %r23(%sr4, %r26)
  998. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
  999. sync
  1000. bv %r0(%r2)
  1001. nop
  1002. ENDPROC_CFI(flush_kernel_icache_page)
  1003. ENTRY_CFI(flush_kernel_icache_range_asm)
  1004. 88: ldil L%icache_stride, %r1
  1005. ldw R%icache_stride(%r1), %r23
  1006. ldo -1(%r23), %r21
  1007. ANDCM %r26, %r21, %r26
  1008. #ifdef CONFIG_64BIT
  1009. depd,z %r23, 59, 60, %r21
  1010. #else
  1011. depw,z %r23, 27, 28, %r21
  1012. #endif
  1013. add %r26, %r21, %r22
  1014. cmpb,COND(>>),n %r22, %r25, 2f /* predict not taken */
  1015. 1: add %r22, %r21, %r22
  1016. fic,m %r23(%sr4, %r26)
  1017. fic,m %r23(%sr4, %r26)
  1018. fic,m %r23(%sr4, %r26)
  1019. fic,m %r23(%sr4, %r26)
  1020. fic,m %r23(%sr4, %r26)
  1021. fic,m %r23(%sr4, %r26)
  1022. fic,m %r23(%sr4, %r26)
  1023. fic,m %r23(%sr4, %r26)
  1024. fic,m %r23(%sr4, %r26)
  1025. fic,m %r23(%sr4, %r26)
  1026. fic,m %r23(%sr4, %r26)
  1027. fic,m %r23(%sr4, %r26)
  1028. fic,m %r23(%sr4, %r26)
  1029. fic,m %r23(%sr4, %r26)
  1030. fic,m %r23(%sr4, %r26)
  1031. cmpb,COND(<<=) %r22, %r25, 1b /* predict taken */
  1032. fic,m %r23(%sr4, %r26)
  1033. 2: cmpb,COND(>>),n %r25, %r26, 2b /* predict taken */
  1034. fic,m %r23(%sr4, %r26)
  1035. 89: ALTERNATIVE(88b, 89b, ALT_COND_NO_ICACHE, INSN_NOP)
  1036. sync
  1037. bv %r0(%r2)
  1038. nop
  1039. ENDPROC_CFI(flush_kernel_icache_range_asm)
  1040. .text
  1041. /* align should cover use of rfi in disable_sr_hashing_asm and
  1042. * srdis_done.
  1043. */
  1044. .align 256
  1045. ENTRY_CFI(disable_sr_hashing_asm)
  1046. /*
  1047. * Switch to real mode
  1048. */
  1049. /* pcxt_ssm_bug */
  1050. rsm PSW_SM_I, %r0
  1051. load32 PA(1f), %r1
  1052. nop
  1053. nop
  1054. nop
  1055. nop
  1056. nop
  1057. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  1058. mtctl %r0, %cr17 /* Clear IIASQ tail */
  1059. mtctl %r0, %cr17 /* Clear IIASQ head */
  1060. mtctl %r1, %cr18 /* IIAOQ head */
  1061. ldo 4(%r1), %r1
  1062. mtctl %r1, %cr18 /* IIAOQ tail */
  1063. load32 REAL_MODE_PSW, %r1
  1064. mtctl %r1, %ipsw
  1065. rfi
  1066. nop
  1067. 1: cmpib,=,n SRHASH_PCXST, %r26,srdis_pcxs
  1068. cmpib,=,n SRHASH_PCXL, %r26,srdis_pcxl
  1069. cmpib,=,n SRHASH_PA20, %r26,srdis_pa20
  1070. b,n srdis_done
  1071. srdis_pcxs:
  1072. /* Disable Space Register Hashing for PCXS,PCXT,PCXT' */
  1073. .word 0x141c1a00 /* mfdiag %dr0, %r28 */
  1074. .word 0x141c1a00 /* must issue twice */
  1075. depwi 0,18,1, %r28 /* Clear DHE (dcache hash enable) */
  1076. depwi 0,20,1, %r28 /* Clear IHE (icache hash enable) */
  1077. .word 0x141c1600 /* mtdiag %r28, %dr0 */
  1078. .word 0x141c1600 /* must issue twice */
  1079. b,n srdis_done
  1080. srdis_pcxl:
  1081. /* Disable Space Register Hashing for PCXL */
  1082. .word 0x141c0600 /* mfdiag %dr0, %r28 */
  1083. depwi 0,28,2, %r28 /* Clear DHASH_EN & IHASH_EN */
  1084. .word 0x141c0240 /* mtdiag %r28, %dr0 */
  1085. b,n srdis_done
  1086. srdis_pa20:
  1087. /* Disable Space Register Hashing for PCXU,PCXU+,PCXW,PCXW+,PCXW2 */
  1088. .word 0x144008bc /* mfdiag %dr2, %r28 */
  1089. depdi 0, 54,1, %r28 /* clear DIAG_SPHASH_ENAB (bit 54) */
  1090. .word 0x145c1840 /* mtdiag %r28, %dr2 */
  1091. srdis_done:
  1092. /* Switch back to virtual mode */
  1093. rsm PSW_SM_I, %r0 /* prep to load iia queue */
  1094. load32 2f, %r1
  1095. nop
  1096. nop
  1097. nop
  1098. nop
  1099. nop
  1100. rsm PSW_SM_Q, %r0 /* prep to load iia queue */
  1101. mtctl %r0, %cr17 /* Clear IIASQ tail */
  1102. mtctl %r0, %cr17 /* Clear IIASQ head */
  1103. mtctl %r1, %cr18 /* IIAOQ head */
  1104. ldo 4(%r1), %r1
  1105. mtctl %r1, %cr18 /* IIAOQ tail */
  1106. load32 KERNEL_PSW, %r1
  1107. mtctl %r1, %ipsw
  1108. rfi
  1109. nop
  1110. 2: bv %r0(%r2)
  1111. nop
  1112. ENDPROC_CFI(disable_sr_hashing_asm)
  1113. .end