Makefile 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234
  1. #
  2. # arch/arm64/Makefile
  3. #
  4. # This file is included by the global makefile so that you can add your own
  5. # architecture-specific flags and dependencies.
  6. #
  7. # This file is subject to the terms and conditions of the GNU General Public
  8. # License. See the file "COPYING" in the main directory of this archive
  9. # for more details.
  10. #
  11. # Copyright (C) 1995-2001 by Russell King
  12. LDFLAGS_vmlinux :=--no-undefined -X
  13. ifeq ($(CONFIG_RELOCATABLE), y)
  14. # Pass --no-apply-dynamic-relocs to restore pre-binutils-2.27 behaviour
  15. # for relative relocs, since this leads to better Image compression
  16. # with the relocation offsets always being zero.
  17. LDFLAGS_vmlinux += -shared -Bsymbolic -z notext \
  18. $(call ld-option, --no-apply-dynamic-relocs)
  19. endif
  20. ifeq ($(CONFIG_ARM64_ERRATUM_843419),y)
  21. ifeq ($(CONFIG_ARM64_LD_HAS_FIX_ERRATUM_843419),y)
  22. LDFLAGS_vmlinux += --fix-cortex-a53-843419
  23. endif
  24. endif
  25. cc_has_k_constraint := $(call try-run,echo \
  26. 'int main(void) { \
  27. asm volatile("and w0, w0, %w0" :: "K" (4294967295)); \
  28. return 0; \
  29. }' | $(CC) -S -x c -o "$$TMP" -,,-DCONFIG_CC_HAS_K_CONSTRAINT=1)
  30. ifeq ($(CONFIG_BROKEN_GAS_INST),y)
  31. $(warning Detected assembler with broken .inst; disassembly will be unreliable)
  32. endif
  33. KBUILD_CFLAGS += -mgeneral-regs-only \
  34. $(compat_vdso) $(cc_has_k_constraint)
  35. KBUILD_CFLAGS += $(call cc-disable-warning, psabi)
  36. KBUILD_AFLAGS += $(compat_vdso)
  37. KBUILD_CFLAGS += $(call cc-option,-mabi=lp64)
  38. KBUILD_AFLAGS += $(call cc-option,-mabi=lp64)
  39. # Avoid generating .eh_frame* sections.
  40. ifneq ($(CONFIG_UNWIND_TABLES),y)
  41. KBUILD_CFLAGS += -fno-asynchronous-unwind-tables -fno-unwind-tables
  42. KBUILD_AFLAGS += -fno-asynchronous-unwind-tables -fno-unwind-tables
  43. else
  44. KBUILD_CFLAGS += -fasynchronous-unwind-tables
  45. KBUILD_AFLAGS += -fasynchronous-unwind-tables
  46. endif
  47. ifeq ($(CONFIG_STACKPROTECTOR_PER_TASK),y)
  48. prepare: stack_protector_prepare
  49. stack_protector_prepare: prepare0
  50. $(eval KBUILD_CFLAGS += -mstack-protector-guard=sysreg \
  51. -mstack-protector-guard-reg=sp_el0 \
  52. -mstack-protector-guard-offset=$(shell \
  53. awk '{if ($$2 == "TSK_STACK_CANARY") print $$3;}' \
  54. include/generated/asm-offsets.h))
  55. endif
  56. ifeq ($(CONFIG_AS_HAS_ARMV8_2), y)
  57. # make sure to pass the newest target architecture to -march.
  58. asm-arch := armv8.2-a
  59. endif
  60. # Ensure that if the compiler supports branch protection we default it
  61. # off, this will be overridden if we are using branch protection.
  62. branch-prot-flags-y += $(call cc-option,-mbranch-protection=none)
  63. ifeq ($(CONFIG_ARM64_PTR_AUTH_KERNEL),y)
  64. branch-prot-flags-$(CONFIG_CC_HAS_SIGN_RETURN_ADDRESS) := -msign-return-address=all
  65. # We enable additional protection for leaf functions as there is some
  66. # narrow potential for ROP protection benefits and no substantial
  67. # performance impact has been observed.
  68. PACRET-y := pac-ret+leaf
  69. # Using a shadow call stack in leaf functions is too costly, so avoid PAC there
  70. # as well when we may be patching PAC into SCS
  71. PACRET-$(CONFIG_UNWIND_PATCH_PAC_INTO_SCS) := pac-ret
  72. ifeq ($(CONFIG_ARM64_BTI_KERNEL),y)
  73. branch-prot-flags-$(CONFIG_CC_HAS_BRANCH_PROT_PAC_RET_BTI) := -mbranch-protection=$(PACRET-y)+bti
  74. else
  75. branch-prot-flags-$(CONFIG_CC_HAS_BRANCH_PROT_PAC_RET) := -mbranch-protection=$(PACRET-y)
  76. endif
  77. # -march=armv8.3-a enables the non-nops instructions for PAC, to avoid the
  78. # compiler to generate them and consequently to break the single image contract
  79. # we pass it only to the assembler. This option is utilized only in case of non
  80. # integrated assemblers.
  81. ifeq ($(CONFIG_AS_HAS_PAC), y)
  82. asm-arch := armv8.3-a
  83. endif
  84. endif
  85. KBUILD_CFLAGS += $(branch-prot-flags-y)
  86. ifeq ($(CONFIG_AS_HAS_ARMV8_4), y)
  87. # make sure to pass the newest target architecture to -march.
  88. asm-arch := armv8.4-a
  89. endif
  90. ifeq ($(CONFIG_AS_HAS_ARMV8_5), y)
  91. # make sure to pass the newest target architecture to -march.
  92. asm-arch := armv8.5-a
  93. endif
  94. ifdef asm-arch
  95. KBUILD_CFLAGS += -Wa,-march=$(asm-arch) \
  96. -DARM64_ASM_ARCH='"$(asm-arch)"'
  97. endif
  98. ifeq ($(CONFIG_SHADOW_CALL_STACK), y)
  99. KBUILD_CFLAGS += -ffixed-x18
  100. endif
  101. ifeq ($(CONFIG_CPU_BIG_ENDIAN), y)
  102. KBUILD_CPPFLAGS += -mbig-endian
  103. CHECKFLAGS += -D__AARCH64EB__
  104. # Prefer the baremetal ELF build target, but not all toolchains include
  105. # it so fall back to the standard linux version if needed.
  106. KBUILD_LDFLAGS += -EB $(call ld-option, -maarch64elfb, -maarch64linuxb -z norelro)
  107. UTS_MACHINE := aarch64_be
  108. else
  109. KBUILD_CPPFLAGS += -mlittle-endian
  110. CHECKFLAGS += -D__AARCH64EL__
  111. # Same as above, prefer ELF but fall back to linux target if needed.
  112. KBUILD_LDFLAGS += -EL $(call ld-option, -maarch64elf, -maarch64linux -z norelro)
  113. UTS_MACHINE := aarch64
  114. endif
  115. ifeq ($(CONFIG_LD_IS_LLD), y)
  116. KBUILD_LDFLAGS += -z norelro
  117. endif
  118. CHECKFLAGS += -D__aarch64__
  119. ifeq ($(CONFIG_DYNAMIC_FTRACE_WITH_REGS),y)
  120. KBUILD_CPPFLAGS += -DCC_USING_PATCHABLE_FUNCTION_ENTRY
  121. CC_FLAGS_FTRACE := -fpatchable-function-entry=2
  122. endif
  123. ifeq ($(CONFIG_KASAN_SW_TAGS), y)
  124. KASAN_SHADOW_SCALE_SHIFT := 4
  125. else ifeq ($(CONFIG_KASAN_GENERIC), y)
  126. KASAN_SHADOW_SCALE_SHIFT := 3
  127. endif
  128. KBUILD_CFLAGS += -DKASAN_SHADOW_SCALE_SHIFT=$(KASAN_SHADOW_SCALE_SHIFT)
  129. KBUILD_CPPFLAGS += -DKASAN_SHADOW_SCALE_SHIFT=$(KASAN_SHADOW_SCALE_SHIFT)
  130. KBUILD_AFLAGS += -DKASAN_SHADOW_SCALE_SHIFT=$(KASAN_SHADOW_SCALE_SHIFT)
  131. libs-y := arch/arm64/lib/ $(libs-y)
  132. libs-$(CONFIG_EFI_STUB) += $(objtree)/drivers/firmware/efi/libstub/lib.a
  133. # Default target when executing plain make
  134. boot := arch/arm64/boot
  135. ifeq ($(CONFIG_EFI_ZBOOT),)
  136. KBUILD_IMAGE := $(boot)/Image.gz
  137. else
  138. KBUILD_IMAGE := $(boot)/vmlinuz.efi
  139. endif
  140. # Don't compile Image in mixed build with "all" target
  141. ifndef KBUILD_MIXED_TREE
  142. all: $(notdir $(KBUILD_IMAGE))
  143. endif
  144. Image vmlinuz.efi: vmlinux
  145. $(Q)$(MAKE) $(build)=$(boot) $(boot)/$@
  146. Image.%: Image
  147. $(Q)$(MAKE) $(build)=$(boot) $(boot)/$@
  148. install: KBUILD_IMAGE := $(boot)/Image
  149. install zinstall:
  150. $(call cmd,install)
  151. PHONY += vdso_install
  152. vdso_install:
  153. $(Q)$(MAKE) $(build)=arch/arm64/kernel/vdso $@
  154. $(if $(CONFIG_COMPAT_VDSO), \
  155. $(Q)$(MAKE) $(build)=arch/arm64/kernel/vdso32 $@)
  156. archprepare:
  157. $(Q)$(MAKE) $(build)=arch/arm64/tools kapi
  158. ifeq ($(CONFIG_ARM64_ERRATUM_843419),y)
  159. ifneq ($(CONFIG_ARM64_LD_HAS_FIX_ERRATUM_843419),y)
  160. @echo "warning: ld does not support --fix-cortex-a53-843419; kernel may be susceptible to erratum" >&2
  161. endif
  162. endif
  163. ifeq ($(CONFIG_ARM64_USE_LSE_ATOMICS),y)
  164. ifneq ($(CONFIG_ARM64_LSE_ATOMICS),y)
  165. @echo "warning: LSE atomics not supported by binutils" >&2
  166. endif
  167. endif
  168. ifeq ($(CONFIG_KVM),y)
  169. archscripts:
  170. $(Q)$(MAKE) $(build)=arch/arm64/tools gen-hyprel
  171. endif
  172. ifeq ($(KBUILD_EXTMOD),)
  173. # We need to generate vdso-offsets.h before compiling certain files in kernel/.
  174. # In order to do that, we should use the archprepare target, but we can't since
  175. # asm-offsets.h is included in some files used to generate vdso-offsets.h, and
  176. # asm-offsets.h is built in prepare0, for which archprepare is a dependency.
  177. # Therefore we need to generate the header after prepare0 has been made, hence
  178. # this hack.
  179. prepare: vdso_prepare
  180. vdso_prepare: prepare0
  181. $(Q)$(MAKE) $(build)=arch/arm64/kernel/vdso \
  182. include/generated/vdso-offsets.h arch/arm64/kernel/vdso/vdso.so
  183. ifdef CONFIG_COMPAT_VDSO
  184. $(Q)$(MAKE) $(build)=arch/arm64/kernel/vdso32 \
  185. include/generated/vdso32-offsets.h arch/arm64/kernel/vdso32/vdso.so
  186. endif
  187. endif
  188. define archhelp
  189. echo '* Image.gz - Compressed kernel image (arch/$(ARCH)/boot/Image.gz)'
  190. echo ' Image - Uncompressed kernel image (arch/$(ARCH)/boot/Image)'
  191. echo ' install - Install uncompressed kernel'
  192. echo ' zinstall - Install compressed kernel'
  193. echo ' Install using (your) ~/bin/installkernel or'
  194. echo ' (distribution) /sbin/installkernel or'
  195. echo ' install to $$(INSTALL_PATH) and run lilo'
  196. endef