cache.h 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_POWERPC_CACHE_H
  3. #define _ASM_POWERPC_CACHE_H
  4. #ifdef __KERNEL__
  5. /* bytes per L1 cache line */
  6. #if defined(CONFIG_PPC_8xx)
  7. #define L1_CACHE_SHIFT 4
  8. #define MAX_COPY_PREFETCH 1
  9. #define IFETCH_ALIGN_SHIFT 2
  10. #elif defined(CONFIG_PPC_E500MC)
  11. #define L1_CACHE_SHIFT 6
  12. #define MAX_COPY_PREFETCH 4
  13. #define IFETCH_ALIGN_SHIFT 3
  14. #elif defined(CONFIG_PPC32)
  15. #define MAX_COPY_PREFETCH 4
  16. #define IFETCH_ALIGN_SHIFT 3 /* 603 fetches 2 insn at a time */
  17. #if defined(CONFIG_PPC_47x)
  18. #define L1_CACHE_SHIFT 7
  19. #else
  20. #define L1_CACHE_SHIFT 5
  21. #endif
  22. #else /* CONFIG_PPC64 */
  23. #define L1_CACHE_SHIFT 7
  24. #define IFETCH_ALIGN_SHIFT 4 /* POWER8,9 */
  25. #endif
  26. #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
  27. #define SMP_CACHE_BYTES L1_CACHE_BYTES
  28. #define IFETCH_ALIGN_BYTES (1 << IFETCH_ALIGN_SHIFT)
  29. #if !defined(__ASSEMBLY__)
  30. #ifdef CONFIG_PPC64
  31. struct ppc_cache_info {
  32. u32 size;
  33. u32 line_size;
  34. u32 block_size; /* L1 only */
  35. u32 log_block_size;
  36. u32 blocks_per_page;
  37. u32 sets;
  38. u32 assoc;
  39. };
  40. struct ppc64_caches {
  41. struct ppc_cache_info l1d;
  42. struct ppc_cache_info l1i;
  43. struct ppc_cache_info l2;
  44. struct ppc_cache_info l3;
  45. };
  46. extern struct ppc64_caches ppc64_caches;
  47. static inline u32 l1_dcache_shift(void)
  48. {
  49. return ppc64_caches.l1d.log_block_size;
  50. }
  51. static inline u32 l1_dcache_bytes(void)
  52. {
  53. return ppc64_caches.l1d.block_size;
  54. }
  55. static inline u32 l1_icache_shift(void)
  56. {
  57. return ppc64_caches.l1i.log_block_size;
  58. }
  59. static inline u32 l1_icache_bytes(void)
  60. {
  61. return ppc64_caches.l1i.block_size;
  62. }
  63. #else
  64. static inline u32 l1_dcache_shift(void)
  65. {
  66. return L1_CACHE_SHIFT;
  67. }
  68. static inline u32 l1_dcache_bytes(void)
  69. {
  70. return L1_CACHE_BYTES;
  71. }
  72. static inline u32 l1_icache_shift(void)
  73. {
  74. return L1_CACHE_SHIFT;
  75. }
  76. static inline u32 l1_icache_bytes(void)
  77. {
  78. return L1_CACHE_BYTES;
  79. }
  80. #endif
  81. #define __read_mostly __section(".data..read_mostly")
  82. #ifdef CONFIG_PPC_BOOK3S_32
  83. extern long _get_L2CR(void);
  84. extern long _get_L3CR(void);
  85. extern void _set_L2CR(unsigned long);
  86. extern void _set_L3CR(unsigned long);
  87. #else
  88. #define _get_L2CR() 0L
  89. #define _get_L3CR() 0L
  90. #define _set_L2CR(val) do { } while(0)
  91. #define _set_L3CR(val) do { } while(0)
  92. #endif
  93. static inline void dcbz(void *addr)
  94. {
  95. __asm__ __volatile__ ("dcbz 0, %0" : : "r"(addr) : "memory");
  96. }
  97. static inline void dcbi(void *addr)
  98. {
  99. __asm__ __volatile__ ("dcbi 0, %0" : : "r"(addr) : "memory");
  100. }
  101. static inline void dcbf(void *addr)
  102. {
  103. __asm__ __volatile__ ("dcbf 0, %0" : : "r"(addr) : "memory");
  104. }
  105. static inline void dcbst(void *addr)
  106. {
  107. __asm__ __volatile__ ("dcbst 0, %0" : : "r"(addr) : "memory");
  108. }
  109. static inline void icbi(void *addr)
  110. {
  111. asm volatile ("icbi 0, %0" : : "r"(addr) : "memory");
  112. }
  113. static inline void iccci(void *addr)
  114. {
  115. asm volatile ("iccci 0, %0" : : "r"(addr) : "memory");
  116. }
  117. #endif /* !__ASSEMBLY__ */
  118. #endif /* __KERNEL__ */
  119. #endif /* _ASM_POWERPC_CACHE_H */