initialize_mmu.h 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245
  1. /*
  2. * arch/xtensa/include/asm/initialize_mmu.h
  3. *
  4. * Initializes MMU:
  5. *
  6. * For the new V3 MMU we remap the TLB from virtual == physical
  7. * to the standard Linux mapping used in earlier MMU's.
  8. *
  9. * For the MMU we also support a new configuration register that
  10. * specifies how the S32C1I instruction operates with the cache
  11. * controller.
  12. *
  13. * This file is subject to the terms and conditions of the GNU General
  14. * Public License. See the file "COPYING" in the main directory of
  15. * this archive for more details.
  16. *
  17. * Copyright (C) 2008 - 2012 Tensilica, Inc.
  18. *
  19. * Marc Gauthier <[email protected]>
  20. * Pete Delaney <[email protected]>
  21. */
  22. #ifndef _XTENSA_INITIALIZE_MMU_H
  23. #define _XTENSA_INITIALIZE_MMU_H
  24. #include <linux/init.h>
  25. #include <linux/pgtable.h>
  26. #include <asm/vectors.h>
  27. #if XCHAL_HAVE_PTP_MMU
  28. #define CA_BYPASS (_PAGE_CA_BYPASS | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
  29. #define CA_WRITEBACK (_PAGE_CA_WB | _PAGE_HW_WRITE | _PAGE_HW_EXEC)
  30. #else
  31. #define CA_WRITEBACK (0x4)
  32. #endif
  33. #ifdef __ASSEMBLY__
  34. #define XTENSA_HWVERSION_RC_2009_0 230000
  35. .macro initialize_mmu
  36. #if XCHAL_HAVE_S32C1I && (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
  37. /*
  38. * We Have Atomic Operation Control (ATOMCTL) Register; Initialize it.
  39. * For details see Documentation/xtensa/atomctl.rst
  40. */
  41. #if XCHAL_DCACHE_IS_COHERENT
  42. movi a3, 0x25 /* For SMP/MX -- internal for writeback,
  43. * RCW otherwise
  44. */
  45. #else
  46. movi a3, 0x29 /* non-MX -- Most cores use Std Memory
  47. * Controlers which usually can't use RCW
  48. */
  49. #endif
  50. wsr a3, atomctl
  51. #endif /* XCHAL_HAVE_S32C1I &&
  52. * (XCHAL_HW_MIN_VERSION >= XTENSA_HWVERSION_RC_2009_0)
  53. */
  54. #if defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU && XCHAL_HAVE_SPANNING_WAY
  55. /*
  56. * Have MMU v3
  57. */
  58. #if !XCHAL_HAVE_VECBASE
  59. # error "MMU v3 requires reloc vectors"
  60. #endif
  61. movi a1, 0
  62. _call0 1f
  63. _j 2f
  64. .align 4
  65. 1:
  66. #if CONFIG_KERNEL_LOAD_ADDRESS < 0x40000000ul
  67. #define TEMP_MAPPING_VADDR 0x40000000
  68. #else
  69. #define TEMP_MAPPING_VADDR 0x00000000
  70. #endif
  71. /* Step 1: invalidate mapping at 0x40000000..0x5FFFFFFF. */
  72. movi a2, TEMP_MAPPING_VADDR | XCHAL_SPANNING_WAY
  73. idtlb a2
  74. iitlb a2
  75. isync
  76. /* Step 2: map 0x40000000..0x47FFFFFF to paddr containing this code
  77. * and jump to the new mapping.
  78. */
  79. srli a3, a0, 27
  80. slli a3, a3, 27
  81. addi a3, a3, CA_BYPASS
  82. addi a7, a2, 5 - XCHAL_SPANNING_WAY
  83. wdtlb a3, a7
  84. witlb a3, a7
  85. isync
  86. slli a4, a0, 5
  87. srli a4, a4, 5
  88. addi a5, a2, -XCHAL_SPANNING_WAY
  89. add a4, a4, a5
  90. jx a4
  91. /* Step 3: unmap everything other than current area.
  92. * Start at 0x60000000, wrap around, and end with 0x20000000
  93. */
  94. 2: movi a4, 0x20000000
  95. add a5, a2, a4
  96. 3: idtlb a5
  97. iitlb a5
  98. add a5, a5, a4
  99. bne a5, a2, 3b
  100. /* Step 4: Setup MMU with the requested static mappings. */
  101. movi a6, 0x01000000
  102. wsr a6, ITLBCFG
  103. wsr a6, DTLBCFG
  104. isync
  105. movi a5, XCHAL_KSEG_CACHED_VADDR + XCHAL_KSEG_TLB_WAY
  106. movi a4, XCHAL_KSEG_PADDR + CA_WRITEBACK
  107. wdtlb a4, a5
  108. witlb a4, a5
  109. movi a5, XCHAL_KSEG_BYPASS_VADDR + XCHAL_KSEG_TLB_WAY
  110. movi a4, XCHAL_KSEG_PADDR + CA_BYPASS
  111. wdtlb a4, a5
  112. witlb a4, a5
  113. #ifdef CONFIG_XTENSA_KSEG_512M
  114. movi a5, XCHAL_KSEG_CACHED_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY
  115. movi a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_WRITEBACK
  116. wdtlb a4, a5
  117. witlb a4, a5
  118. movi a5, XCHAL_KSEG_BYPASS_VADDR + 0x10000000 + XCHAL_KSEG_TLB_WAY
  119. movi a4, XCHAL_KSEG_PADDR + 0x10000000 + CA_BYPASS
  120. wdtlb a4, a5
  121. witlb a4, a5
  122. #endif
  123. movi a5, XCHAL_KIO_CACHED_VADDR + XCHAL_KIO_TLB_WAY
  124. movi a4, XCHAL_KIO_DEFAULT_PADDR + CA_WRITEBACK
  125. wdtlb a4, a5
  126. witlb a4, a5
  127. movi a5, XCHAL_KIO_BYPASS_VADDR + XCHAL_KIO_TLB_WAY
  128. movi a4, XCHAL_KIO_DEFAULT_PADDR + CA_BYPASS
  129. wdtlb a4, a5
  130. witlb a4, a5
  131. isync
  132. /* Jump to self, using final mappings. */
  133. movi a4, 1f
  134. jx a4
  135. 1:
  136. /* Step 5: remove temporary mapping. */
  137. idtlb a7
  138. iitlb a7
  139. isync
  140. movi a0, 0
  141. wsr a0, ptevaddr
  142. rsync
  143. #endif /* defined(CONFIG_MMU) && XCHAL_HAVE_PTP_MMU &&
  144. XCHAL_HAVE_SPANNING_WAY */
  145. .endm
  146. .macro initialize_cacheattr
  147. #if !defined(CONFIG_MMU) && (XCHAL_HAVE_TLBS || XCHAL_HAVE_MPU)
  148. #if CONFIG_MEMMAP_CACHEATTR == 0x22222222 && XCHAL_HAVE_PTP_MMU
  149. #error Default MEMMAP_CACHEATTR of 0x22222222 does not work with full MMU.
  150. #endif
  151. #if XCHAL_HAVE_MPU
  152. __REFCONST
  153. .align 4
  154. .Lattribute_table:
  155. .long 0x000000, 0x1fff00, 0x1ddf00, 0x1eef00
  156. .long 0x006600, 0x000000, 0x000000, 0x000000
  157. .long 0x000000, 0x000000, 0x000000, 0x000000
  158. .long 0x000000, 0x000000, 0x000000, 0x000000
  159. .previous
  160. movi a3, .Lattribute_table
  161. movi a4, CONFIG_MEMMAP_CACHEATTR
  162. movi a5, 1
  163. movi a6, XCHAL_MPU_ENTRIES
  164. movi a10, 0x20000000
  165. movi a11, -1
  166. 1:
  167. sub a5, a5, a10
  168. extui a8, a4, 28, 4
  169. beq a8, a11, 2f
  170. addi a6, a6, -1
  171. mov a11, a8
  172. 2:
  173. addx4 a9, a8, a3
  174. l32i a9, a9, 0
  175. or a9, a9, a6
  176. wptlb a9, a5
  177. slli a4, a4, 4
  178. bgeu a5, a10, 1b
  179. #else
  180. movi a5, XCHAL_SPANNING_WAY
  181. movi a6, ~_PAGE_ATTRIB_MASK
  182. movi a4, CONFIG_MEMMAP_CACHEATTR
  183. movi a8, 0x20000000
  184. 1:
  185. rdtlb1 a3, a5
  186. xor a3, a3, a4
  187. and a3, a3, a6
  188. xor a3, a3, a4
  189. wdtlb a3, a5
  190. ritlb1 a3, a5
  191. xor a3, a3, a4
  192. and a3, a3, a6
  193. xor a3, a3, a4
  194. witlb a3, a5
  195. add a5, a5, a8
  196. srli a4, a4, 4
  197. bgeu a5, a8, 1b
  198. isync
  199. #endif
  200. #endif
  201. .endm
  202. #endif /*__ASSEMBLY__*/
  203. #endif /* _XTENSA_INITIALIZE_MMU_H */