verify_cpu.S 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140
  1. /* SPDX-License-Identifier: GPL-2.0-only */
  2. /*
  3. *
  4. * verify_cpu.S - Code for cpu long mode and SSE verification. This
  5. * code has been borrowed from boot/setup.S and was introduced by
  6. * Andi Kleen.
  7. *
  8. * Copyright (c) 2007 Andi Kleen ([email protected])
  9. * Copyright (c) 2007 Eric Biederman ([email protected])
  10. * Copyright (c) 2007 Vivek Goyal ([email protected])
  11. * Copyright (c) 2010 Kees Cook ([email protected])
  12. *
  13. * This is a common code for verification whether CPU supports
  14. * long mode and SSE or not. It is not called directly instead this
  15. * file is included at various places and compiled in that context.
  16. * This file is expected to run in 32bit code. Currently:
  17. *
  18. * arch/x86/boot/compressed/head_64.S: Boot cpu verification
  19. * arch/x86/kernel/trampoline_64.S: secondary processor verification
  20. * arch/x86/kernel/head_32.S: processor startup
  21. *
  22. * verify_cpu, returns the status of longmode and SSE in register %eax.
  23. * 0: Success 1: Failure
  24. *
  25. * On Intel, the XD_DISABLE flag will be cleared as a side-effect.
  26. *
  27. * The caller needs to check for the error code and take the action
  28. * appropriately. Either display a message or halt.
  29. */
  30. #include <asm/cpufeatures.h>
  31. #include <asm/msr-index.h>
  32. SYM_FUNC_START_LOCAL(verify_cpu)
  33. pushf # Save caller passed flags
  34. push $0 # Kill any dangerous flags
  35. popf
  36. #ifndef __x86_64__
  37. pushfl # standard way to check for cpuid
  38. popl %eax
  39. movl %eax,%ebx
  40. xorl $0x200000,%eax
  41. pushl %eax
  42. popfl
  43. pushfl
  44. popl %eax
  45. cmpl %eax,%ebx
  46. jz .Lverify_cpu_no_longmode # cpu has no cpuid
  47. #endif
  48. movl $0x0,%eax # See if cpuid 1 is implemented
  49. cpuid
  50. cmpl $0x1,%eax
  51. jb .Lverify_cpu_no_longmode # no cpuid 1
  52. xor %di,%di
  53. cmpl $0x68747541,%ebx # AuthenticAMD
  54. jnz .Lverify_cpu_noamd
  55. cmpl $0x69746e65,%edx
  56. jnz .Lverify_cpu_noamd
  57. cmpl $0x444d4163,%ecx
  58. jnz .Lverify_cpu_noamd
  59. mov $1,%di # cpu is from AMD
  60. jmp .Lverify_cpu_check
  61. .Lverify_cpu_noamd:
  62. cmpl $0x756e6547,%ebx # GenuineIntel?
  63. jnz .Lverify_cpu_check
  64. cmpl $0x49656e69,%edx
  65. jnz .Lverify_cpu_check
  66. cmpl $0x6c65746e,%ecx
  67. jnz .Lverify_cpu_check
  68. # only call IA32_MISC_ENABLE when:
  69. # family > 6 || (family == 6 && model >= 0xd)
  70. movl $0x1, %eax # check CPU family and model
  71. cpuid
  72. movl %eax, %ecx
  73. andl $0x0ff00f00, %eax # mask family and extended family
  74. shrl $8, %eax
  75. cmpl $6, %eax
  76. ja .Lverify_cpu_clear_xd # family > 6, ok
  77. jb .Lverify_cpu_check # family < 6, skip
  78. andl $0x000f00f0, %ecx # mask model and extended model
  79. shrl $4, %ecx
  80. cmpl $0xd, %ecx
  81. jb .Lverify_cpu_check # family == 6, model < 0xd, skip
  82. .Lverify_cpu_clear_xd:
  83. movl $MSR_IA32_MISC_ENABLE, %ecx
  84. rdmsr
  85. btrl $2, %edx # clear MSR_IA32_MISC_ENABLE_XD_DISABLE
  86. jnc .Lverify_cpu_check # only write MSR if bit was changed
  87. wrmsr
  88. .Lverify_cpu_check:
  89. movl $0x1,%eax # Does the cpu have what it takes
  90. cpuid
  91. andl $REQUIRED_MASK0,%edx
  92. xorl $REQUIRED_MASK0,%edx
  93. jnz .Lverify_cpu_no_longmode
  94. movl $0x80000000,%eax # See if extended cpuid is implemented
  95. cpuid
  96. cmpl $0x80000001,%eax
  97. jb .Lverify_cpu_no_longmode # no extended cpuid
  98. movl $0x80000001,%eax # Does the cpu have what it takes
  99. cpuid
  100. andl $REQUIRED_MASK1,%edx
  101. xorl $REQUIRED_MASK1,%edx
  102. jnz .Lverify_cpu_no_longmode
  103. .Lverify_cpu_sse_test:
  104. movl $1,%eax
  105. cpuid
  106. andl $SSE_MASK,%edx
  107. cmpl $SSE_MASK,%edx
  108. je .Lverify_cpu_sse_ok
  109. test %di,%di
  110. jz .Lverify_cpu_no_longmode # only try to force SSE on AMD
  111. movl $MSR_K7_HWCR,%ecx
  112. rdmsr
  113. btr $15,%eax # enable SSE
  114. wrmsr
  115. xor %di,%di # don't loop
  116. jmp .Lverify_cpu_sse_test # try again
  117. .Lverify_cpu_no_longmode:
  118. popf # Restore caller passed flags
  119. movl $1,%eax
  120. RET
  121. .Lverify_cpu_sse_ok:
  122. popf # Restore caller passed flags
  123. xorl %eax, %eax
  124. RET
  125. SYM_FUNC_END(verify_cpu)