cacheflush.S 1.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899
  1. /* SPDX-License-Identifier: GPL-2.0-or-later */
  2. /*
  3. * vDSO provided cache flush routines
  4. *
  5. * Copyright (C) 2004 Benjamin Herrenschmuidt ([email protected]),
  6. * IBM Corp.
  7. */
  8. #include <asm/processor.h>
  9. #include <asm/ppc_asm.h>
  10. #include <asm/vdso.h>
  11. #include <asm/vdso_datapage.h>
  12. #include <asm/asm-offsets.h>
  13. #include <asm/cache.h>
  14. .text
  15. /*
  16. * Default "generic" version of __kernel_sync_dicache.
  17. *
  18. * void __kernel_sync_dicache(unsigned long start, unsigned long end)
  19. *
  20. * Flushes the data cache & invalidate the instruction cache for the
  21. * provided range [start, end[
  22. */
  23. V_FUNCTION_BEGIN(__kernel_sync_dicache)
  24. .cfi_startproc
  25. BEGIN_FTR_SECTION
  26. b 3f
  27. END_FTR_SECTION_IFSET(CPU_FTR_COHERENT_ICACHE)
  28. #ifdef CONFIG_PPC64
  29. mflr r12
  30. .cfi_register lr,r12
  31. get_datapage r10
  32. mtlr r12
  33. .cfi_restore lr
  34. #endif
  35. #ifdef CONFIG_PPC64
  36. lwz r7,CFG_DCACHE_BLOCKSZ(r10)
  37. addi r5,r7,-1
  38. #else
  39. li r5, L1_CACHE_BYTES - 1
  40. #endif
  41. andc r6,r3,r5 /* round low to line bdy */
  42. subf r8,r6,r4 /* compute length */
  43. add r8,r8,r5 /* ensure we get enough */
  44. #ifdef CONFIG_PPC64
  45. lwz r9,CFG_DCACHE_LOGBLOCKSZ(r10)
  46. PPC_SRL. r8,r8,r9 /* compute line count */
  47. #else
  48. srwi. r8, r8, L1_CACHE_SHIFT
  49. mr r7, r6
  50. #endif
  51. crclr cr0*4+so
  52. beqlr /* nothing to do? */
  53. mtctr r8
  54. 1: dcbst 0,r6
  55. #ifdef CONFIG_PPC64
  56. add r6,r6,r7
  57. #else
  58. addi r6, r6, L1_CACHE_BYTES
  59. #endif
  60. bdnz 1b
  61. sync
  62. /* Now invalidate the instruction cache */
  63. #ifdef CONFIG_PPC64
  64. lwz r7,CFG_ICACHE_BLOCKSZ(r10)
  65. addi r5,r7,-1
  66. andc r6,r3,r5 /* round low to line bdy */
  67. subf r8,r6,r4 /* compute length */
  68. add r8,r8,r5
  69. lwz r9,CFG_ICACHE_LOGBLOCKSZ(r10)
  70. PPC_SRL. r8,r8,r9 /* compute line count */
  71. crclr cr0*4+so
  72. beqlr /* nothing to do? */
  73. #endif
  74. mtctr r8
  75. #ifdef CONFIG_PPC64
  76. 2: icbi 0,r6
  77. add r6,r6,r7
  78. #else
  79. 2: icbi 0, r7
  80. addi r7, r7, L1_CACHE_BYTES
  81. #endif
  82. bdnz 2b
  83. isync
  84. li r3,0
  85. blr
  86. 3:
  87. crclr cr0*4+so
  88. sync
  89. icbi 0,r1
  90. isync
  91. li r3,0
  92. blr
  93. .cfi_endproc
  94. V_FUNCTION_END(__kernel_sync_dicache)