cache.h 794 B

1234567891011121314151617181920212223242526272829
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /*
  3. * arch/arm/include/asm/cache.h
  4. */
  5. #ifndef __ASMARM_CACHE_H
  6. #define __ASMARM_CACHE_H
  7. #define L1_CACHE_SHIFT CONFIG_ARM_L1_CACHE_SHIFT
  8. #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT)
  9. /*
  10. * Memory returned by kmalloc() may be used for DMA, so we must make
  11. * sure that all such allocations are cache aligned. Otherwise,
  12. * unrelated code may cause parts of the buffer to be read into the
  13. * cache before the transfer is done, causing old data to be seen by
  14. * the CPU.
  15. */
  16. #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
  17. /*
  18. * With EABI on ARMv5 and above we must have 64-bit aligned slab pointers.
  19. */
  20. #if defined(CONFIG_AEABI) && (__LINUX_ARM_ARCH__ >= 5)
  21. #define ARCH_SLAB_MINALIGN 8
  22. #endif
  23. #define __read_mostly __section(".data..read_mostly")
  24. #endif