cpu/x86/64bit/mode_switch: Simplify assembly code

Drop the first argument specifying the number of arguments pushed
to the stack. Instead always push the 3 arguments to stack and use
the first one as function pointer to call while in protected mode.

While on it add more comments and simplify register restore code.

Tested:
- On qemu can call x86_32 function and pass argument and return
  value.
- Booted Lenovo X220 in x86_64 mode using x86_32 MRC.

Change-Id: I30809453a1800ba3c0df60acd7eca778841c520f
Signed-off-by: Patrick Rudolph <patrick.rudolph@9elements.com>
Reviewed-on: https://review.coreboot.org/c/coreboot/+/79752
Reviewed-by: Arthur Heymans <arthur@aheymans.xyz>
Reviewed-by: Jérémy Compostella <jeremy.compostella@intel.com>
Tested-by: build bot (Jenkins) <no-reply@coreboot.org>
diff --git a/src/arch/x86/include/mode_switch.h b/src/arch/x86/include/mode_switch.h
index 24efb1e..4235db9 100644
--- a/src/arch/x86/include/mode_switch.h
+++ b/src/arch/x86/include/mode_switch.h
@@ -3,10 +3,18 @@
 #include <stdint.h>
 
 #if ENV_X86_64
-int protected_mode_call_narg(uint32_t arg_count,
-			     uint32_t func_ptr,
+/*
+ * Assembly code that drops into protected mode and calls the function
+ * specified as first argument, which must have been compiled for x86_32.
+ * After the function returns it enters long mode again.
+ * The function pointer destination must be below 4GiB in physical memory.
+ *
+ * The called function has 0-3 arguments and returns an int.
+ */
+int protected_mode_call_3arg(uint32_t func_ptr,
 			     uint32_t opt_arg1,
-			     uint32_t opt_arg2);
+			     uint32_t opt_arg2,
+			     uint32_t opt_arg3);
 
 /*
  * Drops into protected mode and calls the function, which must have been compiled for x86_32.
@@ -17,7 +25,7 @@
  */
 static inline int protected_mode_call(void *func)
 {
-	return protected_mode_call_narg(0, (uintptr_t)func, 0, 0);
+	return protected_mode_call_3arg((uintptr_t)func, 0, 0, 0);
 }
 
 /*
@@ -30,7 +38,7 @@
  */
 static inline int protected_mode_call_1arg(void *func, uint32_t arg1)
 {
-	return protected_mode_call_narg(1, (uintptr_t)func, arg1, 0);
+	return protected_mode_call_3arg((uintptr_t)func, arg1, 0, 0);
 }
 
 /*
@@ -43,7 +51,7 @@
  */
 static inline int protected_mode_call_2arg(void *func, uint32_t arg1, uint32_t arg2)
 {
-	return protected_mode_call_narg(2, (uintptr_t)func, arg1, arg2);
+	return protected_mode_call_3arg((uintptr_t)func, arg1, arg2, 0);
 }
 #else
 static inline int protected_mode_call(void *func)
diff --git a/src/cpu/x86/64bit/mode_switch.S b/src/cpu/x86/64bit/mode_switch.S
index c27f540..f9f784e 100644
--- a/src/cpu/x86/64bit/mode_switch.S
+++ b/src/cpu/x86/64bit/mode_switch.S
@@ -1,14 +1,13 @@
 /* SPDX-License-Identifier: GPL-2.0-only */
 
+/* Calls a x86_32 function from x86_64 context */
 .text
 .code64
 	.section ".text.protected_mode_call", "ax", @progbits
-	.globl protected_mode_call_narg
-protected_mode_call_narg:
-
-	push	%rbp
-	mov	%rsp, %rbp
+	.globl protected_mode_call_3arg
+protected_mode_call_3arg:
 	/* Preserve registers */
+	push	%rbp
 	push	%rbx
 	push	%r12
 	push	%r13
@@ -19,58 +18,47 @@
 	movl	%gs, %eax
 	push	%rax
 
-	/* Arguments to stack */
-	push	%rdi
-	push	%rsi
-	push	%rdx
-	push	%rcx
+	/* Store stack pointer */
+	mov	%rsp, %rbp
 
+	/* Align stack and make space for arguments */
+	movabs	$0xfffffffffffffff0, %rax
+	andq	%rax, %rsp
+	sub	$16, %rsp
+
+	/* Arguments to stack */
+	movl	%edi, 12(%rsp)
+	movl	%esi, 0(%rsp)
+	movl	%edx, 4(%rsp)
+	movl	%ecx, 8(%rsp)
+
+	/* Drop to protected mode */
 	#include <cpu/x86/64bit/exit32.inc>
 
-	movl	-56(%ebp), %eax	/* Argument count */
-	movl	-72(%ebp), %edx	/* Argument 0 */
-	movl	-80(%ebp), %ecx	/* Argument 1 */
+	/* Fetch function to call */
+	movl	12(%esp), %ebx
 
-	/* Align the stack */
-	andl	$0xFFFFFFF0, %esp
-	test	%eax, %eax
-	je	1f	/* Zero arguments */
-
-	subl	$1, %eax
-	test	%eax, %eax
-	je	2f	/* One argument */
-
-	/* Two arguments */
-	subl	$8, %esp
-	pushl	%ecx	/* Argument 1 */
-	pushl	%edx	/* Argument 0 */
-	jmp	1f
-2:
-	subl	$12, %esp
-	pushl	%edx	/* Argument 0 */
-
-1:
-	movl	-64(%ebp), %ebx	/* Function to call */
+	/* Call function */
 	call	*%ebx
 	movl	%eax, %ebx
 
-	/* Preserves ebx */
+	/* Jump to long mode. Preserves ebx */
 	#include <cpu/x86/64bit/entry64.inc>
 
 	/* Place return value in rax */
 	movl	%ebx, %eax
 
-	/* Restore registers */
-	mov	-48(%rbp), %rbx
-	movl	%ebx, %gs
-	mov	-40(%rbp), %r15
-	mov	-32(%rbp), %r14
-	mov	-24(%rbp), %r13
-	mov	-16(%rbp), %r12
-	mov	-8(%rbp), %rbx
-
 	/* Restore stack pointer */
 	mov	%rbp, %rsp
+
+	/* Restore registers */
+	pop	%rbx
+	movl	%ebx, %gs
+	pop	%r15
+	pop	%r14
+	pop	%r13
+	pop	%r12
+	pop	%rbx
 	pop	%rbp
 
 	ret