entry-fpsimd.S 2.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. * FP/SIMD state saving and restoring
  4. *
  5. * Copyright (C) 2012 ARM Ltd.
  6. * Author: Catalin Marinas <[email protected]>
  7. */
  8. #include <linux/linkage.h>
  9. #include <asm/assembler.h>
  10. #include <asm/fpsimdmacros.h>
  11. /*
  12. * Save the FP registers.
  13. *
  14. * x0 - pointer to struct fpsimd_state
  15. */
  16. SYM_FUNC_START(fpsimd_save_state)
  17. fpsimd_save x0, 8
  18. ret
  19. SYM_FUNC_END(fpsimd_save_state)
  20. /*
  21. * Load the FP registers.
  22. *
  23. * x0 - pointer to struct fpsimd_state
  24. */
  25. SYM_FUNC_START(fpsimd_load_state)
  26. fpsimd_restore x0, 8
  27. ret
  28. SYM_FUNC_END(fpsimd_load_state)
  29. #ifdef CONFIG_ARM64_SVE
  30. /*
  31. * Save the SVE state
  32. *
  33. * x0 - pointer to buffer for state
  34. * x1 - pointer to storage for FPSR
  35. * x2 - Save FFR if non-zero
  36. */
  37. SYM_FUNC_START(sve_save_state)
  38. sve_save 0, x1, x2, 3
  39. ret
  40. SYM_FUNC_END(sve_save_state)
  41. /*
  42. * Load the SVE state
  43. *
  44. * x0 - pointer to buffer for state
  45. * x1 - pointer to storage for FPSR
  46. * x2 - Restore FFR if non-zero
  47. */
  48. SYM_FUNC_START(sve_load_state)
  49. sve_load 0, x1, x2, 4
  50. ret
  51. SYM_FUNC_END(sve_load_state)
  52. SYM_FUNC_START(sve_get_vl)
  53. _sve_rdvl 0, 1
  54. ret
  55. SYM_FUNC_END(sve_get_vl)
  56. SYM_FUNC_START(sve_set_vq)
  57. sve_load_vq x0, x1, x2
  58. ret
  59. SYM_FUNC_END(sve_set_vq)
  60. /*
  61. * Zero all SVE registers but the first 128-bits of each vector
  62. *
  63. * VQ must already be configured by caller, any further updates of VQ
  64. * will need to ensure that the register state remains valid.
  65. *
  66. * x0 = include FFR?
  67. * x1 = VQ - 1
  68. */
  69. SYM_FUNC_START(sve_flush_live)
  70. cbz x1, 1f // A VQ-1 of 0 is 128 bits so no extra Z state
  71. sve_flush_z
  72. 1: sve_flush_p
  73. tbz x0, #0, 2f
  74. sve_flush_ffr
  75. 2: ret
  76. SYM_FUNC_END(sve_flush_live)
  77. #endif /* CONFIG_ARM64_SVE */
  78. #ifdef CONFIG_ARM64_SME
  79. SYM_FUNC_START(sme_get_vl)
  80. _sme_rdsvl 0, 1
  81. ret
  82. SYM_FUNC_END(sme_get_vl)
  83. SYM_FUNC_START(sme_set_vq)
  84. sme_load_vq x0, x1, x2
  85. ret
  86. SYM_FUNC_END(sme_set_vq)
  87. /*
  88. * Save the SME state
  89. *
  90. * x0 - pointer to buffer for state
  91. */
  92. SYM_FUNC_START(za_save_state)
  93. _sme_rdsvl 1, 1 // x1 = VL/8
  94. sme_save_za 0, x1, 12
  95. ret
  96. SYM_FUNC_END(za_save_state)
  97. /*
  98. * Load the SME state
  99. *
  100. * x0 - pointer to buffer for state
  101. */
  102. SYM_FUNC_START(za_load_state)
  103. _sme_rdsvl 1, 1 // x1 = VL/8
  104. sme_load_za 0, x1, 12
  105. ret
  106. SYM_FUNC_END(za_load_state)
  107. #endif /* CONFIG_ARM64_SME */