cpu/x86/64bit: Turn jumping to long mode into a macro

This makes it easier to reuse, e.g. if you want to do it twice in one
assembly file.

Change-Id: Ida861338004187e4e714be41e17c8447fa4cf935
Signed-off-by: Arthur Heymans <arthur@aheymans.xyz>
Reviewed-on: https://review.coreboot.org/c/coreboot/+/79261
Tested-by: build bot (Jenkins) <no-reply@coreboot.org>
Reviewed-by: Felix Held <felix-coreboot@felixheld.de>
diff --git a/src/cpu/intel/car/core2/cache_as_ram.S b/src/cpu/intel/car/core2/cache_as_ram.S
index e134717..2e4d9c8 100644
--- a/src/cpu/intel/car/core2/cache_as_ram.S
+++ b/src/cpu/intel/car/core2/cache_as_ram.S
@@ -4,6 +4,7 @@
 #include <cpu/x86/mtrr.h>
 #include <cpu/x86/cache.h>
 #include <cpu/x86/post_code.h>
+#include <cpu/x86/64bit/entry64.inc>
 
 .section .init
 .global bootblock_pre_c_entry
@@ -162,8 +163,7 @@
 	subl	$4, %esp
 
 #if ENV_X86_64
-
-	#include <cpu/x86/64bit/entry64.inc>
+	setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC)
 
 	movd	%mm2, %rdi
 	shlq	$32, %rdi
diff --git a/src/cpu/intel/car/non-evict/cache_as_ram.S b/src/cpu/intel/car/non-evict/cache_as_ram.S
index 76986ff..578bf03 100644
--- a/src/cpu/intel/car/non-evict/cache_as_ram.S
+++ b/src/cpu/intel/car/non-evict/cache_as_ram.S
@@ -4,6 +4,7 @@
 #include <cpu/x86/mtrr.h>
 #include <cpu/x86/cache.h>
 #include <cpu/x86/post_code.h>
+#include <cpu/x86/64bit/entry64.inc>
 
 #define NoEvictMod_MSR 0x2e0
 #define BBL_CR_CTL3_MSR 0x11e
@@ -213,8 +214,7 @@
 	andl	$0xfffffff0, %esp
 
 #if ENV_X86_64
-
-	#include <cpu/x86/64bit/entry64.inc>
+	setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC)
 
 	movd	%mm2, %rdi
 	shlq	$32, %rdi
diff --git a/src/cpu/intel/car/p4-netburst/cache_as_ram.S b/src/cpu/intel/car/p4-netburst/cache_as_ram.S
index f7c023b..32fddd6 100644
--- a/src/cpu/intel/car/p4-netburst/cache_as_ram.S
+++ b/src/cpu/intel/car/p4-netburst/cache_as_ram.S
@@ -13,6 +13,7 @@
 .global bootblock_pre_c_entry
 
 #include <cpu/intel/car/cache_as_ram_symbols.inc>
+#include <cpu/x86/64bit/entry64.inc>
 
 .code32
 _cache_as_ram_setup:
@@ -362,7 +363,7 @@
 	subl	$4, %esp
 
 #if ENV_X86_64
-	#include <cpu/x86/64bit/entry64.inc>
+	setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC)
 
 	movd	%mm2, %rdi
 	shlq	$32, %rdi	/* BIST */
diff --git a/src/cpu/qemu-x86/cache_as_ram_bootblock.S b/src/cpu/qemu-x86/cache_as_ram_bootblock.S
index 859b760..e2662c2 100644
--- a/src/cpu/qemu-x86/cache_as_ram_bootblock.S
+++ b/src/cpu/qemu-x86/cache_as_ram_bootblock.S
@@ -1,6 +1,7 @@
 /* SPDX-License-Identifier: GPL-2.0-only */
 
 #include <cpu/x86/post_code.h>
+#include <cpu/x86/64bit/entry64.inc>
 
 #define CBFS_FILE_MAGIC 0
 #define CBFS_FILE_LEN (CBFS_FILE_MAGIC + 8)
@@ -79,7 +80,7 @@
 
 #if ENV_X86_64
 	/* entry64.inc preserves ebx. */
-#include <cpu/x86/64bit/entry64.inc>
+ 	setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC)
 
 	/* Restore the BIST result and timestamps. */
 	movd	%mm2, %rdi
diff --git a/src/cpu/x86/64bit/entry64.inc b/src/cpu/x86/64bit/entry64.inc
index 7da68b4..878f310 100644
--- a/src/cpu/x86/64bit/entry64.inc
+++ b/src/cpu/x86/64bit/entry64.inc
@@ -22,10 +22,9 @@
 #include <arch/rom_segs.h>
 #endif
 
-
-setup_longmode:
+.macro setup_longmode page_table
 	/* Get page table address */
-	movl	$(CONFIG_ARCH_X86_64_PGTBL_LOC), %eax
+	movl	\page_table, %eax
 
 	/* load identity mapped page tables */
 	movl	%eax, %cr3
@@ -48,12 +47,13 @@
 
 	/* use long jump to switch to 64-bit code segment */
 #if defined(__RAMSTAGE__)
-	ljmp $RAM_CODE_SEG64, $__longmode_start
+	ljmp $RAM_CODE_SEG64, $jmp_addr\@
 #else
-	ljmp $ROM_CODE_SEG64, $__longmode_start
+	ljmp $ROM_CODE_SEG64, $jmp_addr\@
 
 #endif
 .code64
-__longmode_start:
+jmp_addr\@:
+.endm
 
 #endif
diff --git a/src/cpu/x86/64bit/mode_switch.S b/src/cpu/x86/64bit/mode_switch.S
index c4198f3..01fe003 100644
--- a/src/cpu/x86/64bit/mode_switch.S
+++ b/src/cpu/x86/64bit/mode_switch.S
@@ -1,4 +1,5 @@
 /* SPDX-License-Identifier: GPL-2.0-only */
+#include <cpu/x86/64bit/entry64.inc>
 
 /* Calls a x86_32 function from x86_64 context */
 .text
@@ -42,8 +43,8 @@
 	call	*%ebx
 	movl	%eax, %ebx
 
-	/* Jump to long mode. Preserves ebx */
-	#include <cpu/x86/64bit/entry64.inc>
+	/* Preserves ebx */
+	setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC)
 
 	/* Place return value in rax */
 	movl	%ebx, %eax
diff --git a/src/cpu/x86/64bit/mode_switch2.S b/src/cpu/x86/64bit/mode_switch2.S
index 65e9d94..1807d2e 100644
--- a/src/cpu/x86/64bit/mode_switch2.S
+++ b/src/cpu/x86/64bit/mode_switch2.S
@@ -4,6 +4,8 @@
  * Must not be directly invoked from C code!
  */
 
+#include <cpu/x86/64bit/entry64.inc>
+
 .text
 .code32
 	.section ".text.long_mode_call_3arg", "ax", @progbits
@@ -19,7 +21,7 @@
 	mov	%esp, %ebp
 
 	/* Enter long mode, preserves ebx */
-	#include <cpu/x86/64bit/entry64.inc>
+	setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC)
 
 	/* Align stack */
 	movabs	$0xfffffffffffffff0, %rax
diff --git a/src/cpu/x86/sipi_vector.S b/src/cpu/x86/sipi_vector.S
index 39973db..923e398 100644
--- a/src/cpu/x86/sipi_vector.S
+++ b/src/cpu/x86/sipi_vector.S
@@ -6,6 +6,7 @@
 #include <arch/ram_segs.h>
 
 #define __RAMSTAGE__
+#include <cpu/x86/64bit/entry64.inc>
 
 /* The SIPI vector is responsible for initializing the APs in the system. It
  * loads microcode, sets up MSRs, and enables caching before calling into
@@ -223,7 +224,8 @@
 
 #if ENV_X86_64
 	/* entry64.inc preserves ebx, esi, edi, ebp */
-#include <cpu/x86/64bit/entry64.inc>
+	setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC)
+
 	movabs	c_handler, %eax
 	call	*%rax
 #else
diff --git a/src/cpu/x86/smm/smm_stub.S b/src/cpu/x86/smm/smm_stub.S
index f97ab59..9f1f21d 100644
--- a/src/cpu/x86/smm/smm_stub.S
+++ b/src/cpu/x86/smm/smm_stub.S
@@ -12,6 +12,7 @@
 #include <cpu/x86/cr.h>
 #include <cpu/x86/msr.h>
 #include <cpu/x86/lapic_def.h>
+#include <cpu/x86/64bit/entry64.inc>
 
 .code32
 .section ".module_parameters", "aw", @progbits
@@ -195,7 +196,7 @@
 #if ENV_X86_64
 	mov	%ecx, %edi
 	/* entry64.inc preserves ebx, esi, edi, ebp */
-#include <cpu/x86/64bit/entry64.inc>
+	setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC)
 	mov	%edi, %ecx
 
 
diff --git a/src/soc/amd/common/block/cpu/noncar/pre_c.S b/src/soc/amd/common/block/cpu/noncar/pre_c.S
index eb556fa..bb2203b 100644
--- a/src/soc/amd/common/block/cpu/noncar/pre_c.S
+++ b/src/soc/amd/common/block/cpu/noncar/pre_c.S
@@ -1,6 +1,7 @@
 /* SPDX-License-Identifier: GPL-2.0-only */
 
 #include <amdblocks/post_codes.h>
+#include <cpu/x86/64bit/entry64.inc>
 #include <cpu/x86/post_code.h>
 
 .section .init, "ax", @progbits
@@ -27,7 +28,7 @@
 	post_code(POSTCODE_BOOTBLOCK_PRE_C_ENTRY)
 
 #if ENV_X86_64
-	#include <cpu/x86/64bit/entry64.inc>
+	setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC)
 #endif
 
 	/* Clear .bss section */
diff --git a/src/soc/intel/common/block/cpu/car/cache_as_ram.S b/src/soc/intel/common/block/cpu/car/cache_as_ram.S
index 61cbe307..c22e7d9 100644
--- a/src/soc/intel/common/block/cpu/car/cache_as_ram.S
+++ b/src/soc/intel/common/block/cpu/car/cache_as_ram.S
@@ -9,6 +9,7 @@
 #include <cpu/x86/post_code.h>
 #include <intelblocks/msr.h>
 #include <intelblocks/post_codes.h>
+#include <cpu/x86/64bit/entry64.inc>
 
 .section .init, "ax", @progbits
 
@@ -279,7 +280,8 @@
 	andl	$0xfffffff0, %esp
 
 #if ENV_X86_64
-	#include <cpu/x86/64bit/entry64.inc>
+	setup_longmode $(CONFIG_ARCH_X86_64_PGTBL_LOC)
+
 	movd	%mm2, %rdi
 	shlq	$32, %rdi
 	movd	%mm1, %rsi