1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
|
#------------------------------------------------------------------------------
#
# LoongArch synchronization ASM functions.
#
# Copyright (c) 2022, Loongson Technology Corporation Limited. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
#------------------------------------------------------------------------------
ASM_GLOBAL ASM_PFX(AsmInternalSyncCompareExchange16)
ASM_GLOBAL ASM_PFX(AsmInternalSyncCompareExchange32)
ASM_GLOBAL ASM_PFX(AsmInternalSyncCompareExchange64)
ASM_GLOBAL ASM_PFX(AsmInternalSyncIncrement)
ASM_GLOBAL ASM_PFX(AsmInternalSyncDecrement)
/**
UINT32
EFIAPI
AsmInternalSyncCompareExchange16 (
IN volatile UINT32 *Ptr32,
IN UINT64 Mask,
IN UINT64 LocalCompareValue,
IN UINT64 LocalExchangeValue
)
**/
ASM_PFX(AsmInternalSyncCompareExchange16):
1:
ll.w $t0, $a0, 0x0
and $t1, $t0, $a1
bne $t1, $a2, 2f
andn $t1, $t0, $a1
or $t1, $t1, $a3
sc.w $t1, $a0, 0x0
beqz $t1, 1b
b 3f
2:
dbar 0
3:
move $a0, $t0
jirl $zero, $ra, 0
/**
UINT32
EFIAPI
AsmInternalSyncCompareExchange32 (
IN volatile UINT32 *Value,
IN UINT64 CompareValue,
IN UINT64 ExchangeValue
)
**/
ASM_PFX(AsmInternalSyncCompareExchange32):
1:
ll.w $t0, $a0, 0x0
bne $t0, $a1, 2f
move $t0, $a2
sc.w $t0, $a0, 0x0
beqz $t0, 1b
b 3f
2:
dbar 0
3:
move $a0, $t0
jirl $zero, $ra, 0
/**
UINT64
EFIAPI
AsmInternalSyncCompareExchange64 (
IN volatile UINT64 *Value,
IN UINT64 CompareValue,
IN UINT64 ExchangeValue
)
**/
ASM_PFX(AsmInternalSyncCompareExchange64):
1:
ll.d $t0, $a0, 0x0
bne $t0, $a1, 2f
move $t0, $a2
sc.d $t0, $a0, 0x0
beqz $t0, 1b
b 3f
2:
dbar 0
3:
move $a0, $t0
jirl $zero, $ra, 0
/**
UINT32
EFIAPI
AsmInternalSyncIncrement (
IN volatile UINT32 *Value
)
**/
ASM_PFX(AsmInternalSyncIncrement):
move $t0, $a0
dbar 0
ld.w $t1, $t0, 0x0
li.w $t2, 1
amadd.w $t1, $t2, $t0
ld.w $a0, $t0, 0x0
jirl $zero, $ra, 0
/**
UINT32
EFIAPI
AsmInternalSyncDecrement (
IN volatile UINT32 *Value
)
**/
ASM_PFX(AsmInternalSyncDecrement):
move $t0, $a0
dbar 0
ld.w $t1, $t0, 0x0
li.w $t2, -1
amadd.w $t1, $t2, $t0
ld.w $a0, $t0, 0x0
jirl $zero, $ra, 0
.end
|