summaryrefslogtreecommitdiffstats
path: root/arch/x86/kernel/efi_stub_64.S
blob: 99b47d48c9f4ba23c71d5e5a5acebf55b62272ee (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
/*
 * Function calling ABI conversion from Linux to EFI for x86_64
 *
 * Copyright (C) 2007 Intel Corp
 *	Bibo Mao <bibo.mao@intel.com>
 *	Huang Ying <ying.huang@intel.com>
 */

#include <linux/linkage.h>

#define SAVE_XMM			\
	mov %rsp, %rax;			\
	subq $0x70, %rsp;		\
	and $~0xf, %rsp;		\
	mov %rax, (%rsp);		\
	mov %cr0, %rax;			\
	clts;				\
	mov %rax, 0x8(%rsp);		\
	movaps %xmm0, 0x60(%rsp);	\
	movaps %xmm1, 0x50(%rsp);	\
	movaps %xmm2, 0x40(%rsp);	\
	movaps %xmm3, 0x30(%rsp);	\
	movaps %xmm4, 0x20(%rsp);	\
	movaps %xmm5, 0x10(%rsp)

#define RESTORE_XMM			\
	movaps 0x60(%rsp), %xmm0;	\
	movaps 0x50(%rsp), %xmm1;	\
	movaps 0x40(%rsp), %xmm2;	\
	movaps 0x30(%rsp), %xmm3;	\
	movaps 0x20(%rsp), %xmm4;	\
	movaps 0x10(%rsp), %xmm5;	\
	mov 0x8(%rsp), %rsi;		\
	mov %rsi, %cr0;			\
	mov (%rsp), %rsp

ENTRY(efi_call0)
	SAVE_XMM
	subq $32, %rsp
	call *%rdi
	addq $32, %rsp
	RESTORE_XMM
	ret

ENTRY(efi_call1)
	SAVE_XMM
	subq $32, %rsp
	mov  %rsi, %rcx
	call *%rdi
	addq $32, %rsp
	RESTORE_XMM
	ret

ENTRY(efi_call2)
	SAVE_XMM
	subq $32, %rsp
	mov  %rsi, %rcx
	call *%rdi
	addq $32, %rsp
	RESTORE_XMM
	ret

ENTRY(efi_call3)
	SAVE_XMM
	subq $32, %rsp
	mov  %rcx, %r8
	mov  %rsi, %rcx
	call *%rdi
	addq $32, %rsp
	RESTORE_XMM
	ret

ENTRY(efi_call4)
	SAVE_XMM
	subq $32, %rsp
	mov %r8, %r9
	mov %rcx, %r8
	mov %rsi, %rcx
	call *%rdi
	addq $32, %rsp
	RESTORE_XMM
	ret

ENTRY(efi_call5)
	SAVE_XMM
	subq $48, %rsp
	mov %r9, 32(%rsp)
	mov %r8, %r9
	mov %rcx, %r8
	mov %rsi, %rcx
	call *%rdi
	addq $48, %rsp
	RESTORE_XMM
	ret

ENTRY(efi_call6)
	SAVE_XMM
	mov (%rsp), %rax
	mov 8(%rax), %rax
	subq $48, %rsp
	mov %r9, 32(%rsp)
	mov %rax, 40(%rsp)
	mov %r8, %r9
	mov %rcx, %r8
	mov %rsi, %rcx
	call *%rdi
	addq $48, %rsp
	RESTORE_XMM
	ret