string_64.h 3.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. #ifndef _ASM_X86_STRING_64_H
  3. #define _ASM_X86_STRING_64_H
  4. #ifdef __KERNEL__
  5. #include <linux/jump_label.h>
  6. /* Written 2002 by Andi Kleen */
  7. /* Even with __builtin_ the compiler may decide to use the out of line
  8. function. */
  9. #if defined(__SANITIZE_MEMORY__) && defined(__NO_FORTIFY)
  10. #include <linux/kmsan_string.h>
  11. #endif
  12. #define __HAVE_ARCH_MEMCPY 1
  13. #if defined(__SANITIZE_MEMORY__) && defined(__NO_FORTIFY)
  14. #undef memcpy
  15. #define memcpy __msan_memcpy
  16. #else
  17. extern void *memcpy(void *to, const void *from, size_t len);
  18. #endif
  19. extern void *__memcpy(void *to, const void *from, size_t len);
  20. #define __HAVE_ARCH_MEMSET
  21. #if defined(__SANITIZE_MEMORY__) && defined(__NO_FORTIFY)
  22. extern void *__msan_memset(void *s, int c, size_t n);
  23. #undef memset
  24. #define memset __msan_memset
  25. #else
  26. void *memset(void *s, int c, size_t n);
  27. #endif
  28. void *__memset(void *s, int c, size_t n);
  29. #define __HAVE_ARCH_MEMSET16
  30. static inline void *memset16(uint16_t *s, uint16_t v, size_t n)
  31. {
  32. long d0, d1;
  33. asm volatile("rep\n\t"
  34. "stosw"
  35. : "=&c" (d0), "=&D" (d1)
  36. : "a" (v), "1" (s), "0" (n)
  37. : "memory");
  38. return s;
  39. }
  40. #define __HAVE_ARCH_MEMSET32
  41. static inline void *memset32(uint32_t *s, uint32_t v, size_t n)
  42. {
  43. long d0, d1;
  44. asm volatile("rep\n\t"
  45. "stosl"
  46. : "=&c" (d0), "=&D" (d1)
  47. : "a" (v), "1" (s), "0" (n)
  48. : "memory");
  49. return s;
  50. }
  51. #define __HAVE_ARCH_MEMSET64
  52. static inline void *memset64(uint64_t *s, uint64_t v, size_t n)
  53. {
  54. long d0, d1;
  55. asm volatile("rep\n\t"
  56. "stosq"
  57. : "=&c" (d0), "=&D" (d1)
  58. : "a" (v), "1" (s), "0" (n)
  59. : "memory");
  60. return s;
  61. }
  62. #define __HAVE_ARCH_MEMMOVE
  63. #if defined(__SANITIZE_MEMORY__) && defined(__NO_FORTIFY)
  64. #undef memmove
  65. void *__msan_memmove(void *dest, const void *src, size_t len);
  66. #define memmove __msan_memmove
  67. #else
  68. void *memmove(void *dest, const void *src, size_t count);
  69. #endif
  70. void *__memmove(void *dest, const void *src, size_t count);
  71. int memcmp(const void *cs, const void *ct, size_t count);
  72. size_t strlen(const char *s);
  73. char *strcpy(char *dest, const char *src);
  74. char *strcat(char *dest, const char *src);
  75. int strcmp(const char *cs, const char *ct);
  76. #if (defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__))
  77. /*
  78. * For files that not instrumented (e.g. mm/slub.c) we
  79. * should use not instrumented version of mem* functions.
  80. */
  81. #undef memcpy
  82. #define memcpy(dst, src, len) __memcpy(dst, src, len)
  83. #undef memmove
  84. #define memmove(dst, src, len) __memmove(dst, src, len)
  85. #undef memset
  86. #define memset(s, c, n) __memset(s, c, n)
  87. #ifndef __NO_FORTIFY
  88. #define __NO_FORTIFY /* FORTIFY_SOURCE uses __builtin_memcpy, etc. */
  89. #endif
  90. #endif
  91. #ifdef CONFIG_ARCH_HAS_UACCESS_FLUSHCACHE
  92. #define __HAVE_ARCH_MEMCPY_FLUSHCACHE 1
  93. void __memcpy_flushcache(void *dst, const void *src, size_t cnt);
  94. static __always_inline void memcpy_flushcache(void *dst, const void *src, size_t cnt)
  95. {
  96. if (__builtin_constant_p(cnt)) {
  97. switch (cnt) {
  98. case 4:
  99. asm ("movntil %1, %0" : "=m"(*(u32 *)dst) : "r"(*(u32 *)src));
  100. return;
  101. case 8:
  102. asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
  103. return;
  104. case 16:
  105. asm ("movntiq %1, %0" : "=m"(*(u64 *)dst) : "r"(*(u64 *)src));
  106. asm ("movntiq %1, %0" : "=m"(*(u64 *)(dst + 8)) : "r"(*(u64 *)(src + 8)));
  107. return;
  108. }
  109. }
  110. __memcpy_flushcache(dst, src, cnt);
  111. }
  112. #endif
  113. #endif /* __KERNEL__ */
  114. #endif /* _ASM_X86_STRING_64_H */