]> git.proxmox.com Git - ceph.git/blob - ceph/src/crypto/isa-l/isa-l_crypto/aes/aarch64/xts_aes_128_common.S
update ceph source to reef 18.1.2
[ceph.git] / ceph / src / crypto / isa-l / isa-l_crypto / aes / aarch64 / xts_aes_128_common.S
1 /**********************************************************************
2 Copyright(c) 2021 Arm Corporation All rights reserved.
3
4 Redistribution and use in source and binary forms, with or without
5 modification, are permitted provided that the following conditions
6 are met:
7 * Redistributions of source code must retain the above copyright
8 notice, this list of conditions and the following disclaimer.
9 * Redistributions in binary form must reproduce the above copyright
10 notice, this list of conditions and the following disclaimer in
11 the documentation and/or other materials provided with the
12 distribution.
13 * Neither the name of Arm Corporation nor the names of its
14 contributors may be used to endorse or promote products derived
15 from this software without specific prior written permission.
16
17 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 **********************************************************************/
29
30 .altmacro
31 .macro aes_key_expand_next next:req,prev:req,ctx:req
32 .if \next == 9
33 mov \ctx, 0x1b
34 .endif
35 dup vdest.4s,vKey\prev\().s[3]
36 ext vtmp.16b,vzero.16b,vKey\prev\().16b,#12
37 aese vdest.16b,vzero.16b
38 eor vKey\next\().16b,vKey\prev\().16b,vtmp.16b
39 ext vtmp.16b,vzero.16b,vtmp.16b,#12
40 eor vKey\next\().16b,vKey\next\().16b,vtmp.16b
41 ext vtmp.16b,vzero.16b,vtmp.16b,#12
42 mov tmpw,vdest.s[0]
43 eor tmpw,\ctx,tmpw,ror 8
44 dup vdest.4s,tmpw
45 eor vKey\next\().16b,vKey\next\().16b,vtmp.16b
46 mov \ctx,ctx,lsl 1
47 eor vKey\next\().16b,vKey\next\().16b,vdest.16b
48 .endm
49
50 /* when loadin key = 0
51 * arg1 = input key
52 * arg2 = rcon ctx register (optional)
53 * when loading key > 0
54 * arg1 = rcon ctx register (optional)
55 */
56 .macro aes_key_expand key:req,arg1,arg2
57 .if \key == 0
58 ld1 {vKey\key\().4s},[\arg1]
59 movi vzero.4s, 0
60 .ifb \arg2
61 mov rcon,#0x01
62 .endif
63
64 .ifnb \arg2
65 mov \arg2,#0x01
66 .endif
67 .endif
68
69 .if \key > 0
70 prev=\key-1
71 .ifb \arg1
72 aes_key_expand_next \key,%prev,rcon
73 .endif
74
75 .ifnb \arg1
76 aes_key_expand_next \key,%prev,\arg1
77 .endif
78 .endif
79 .endm
80
81 .macro aes_round block:req,key:req,mode:req
82 .if \key < 9
83 .if mode == 0
84 aese \block\().16b,vKey\key\().16b
85 aesmc \block\().16b,\block\().16b
86 .else
87 aesd \block\().16b,vKey\key\().16b
88 aesimc \block\().16b,\block\().16b
89 .endif
90 .endif
91 .if \key == 9
92 .if mode == 0
93 aese \block\().16b,vKey\key\().16b
94 .else
95 aesd \block\().16b,vKey\key\().16b
96 .endif
97 .endif
98 .if \key == 10
99 eor \block\().16b,\block\().16b,vKey\key\().16b
100 .endif
101 .endm
102
103 .macro aes_round_interleave b0:req,b1:req,b2:req,b3:req,key:req,mode:req,last_key
104 .if \key < 9
105 .if \mode == 0
106 aese \b0\().16b,vKey\key\().16b
107 aesmc \b0\().16b,\b0\().16b
108 aese \b1\().16b,vKey\key\().16b
109 aesmc \b1\().16b,\b1\().16b
110 aese \b2\().16b,vKey\key\().16b
111 aesmc \b2\().16b,\b2\().16b
112 aese \b3\().16b,vKey\key\().16b
113 aesmc \b3\().16b,\b3\().16b
114 .else
115 aesd \b0\().16b,vKey\key\().16b
116 aesimc \b0\().16b,\b0\().16b
117 aesd \b1\().16b,vKey\key\().16b
118 aesimc \b1\().16b,\b1\().16b
119 aesd \b2\().16b,vKey\key\().16b
120 aesimc \b2\().16b,\b2\().16b
121 aesd \b3\().16b,vKey\key\().16b
122 aesimc \b3\().16b,\b3\().16b
123 .endif
124 .endif
125
126 .if \key == 9
127 .if \mode == 0
128 aese \b0\().16b,vKey\key\().16b
129 eor \b0\().16b,\b0\().16b,vKey\last_key\().16b
130 aese \b1\().16b,vKey\key\().16b
131 eor \b1\().16b,\b1\().16b,vKey\last_key\().16b
132 aese \b2\().16b,vKey\key\().16b
133 eor \b2\().16b,\b2\().16b,vKey\last_key\().16b
134 aese \b3\().16b,vKey\key\().16b
135 eor \b3\().16b,\b3\().16b,vKey\last_key\().16b
136 .else
137 aesd \b0\().16b,vKey\key\().16b
138 eor \b0\().16b,\b0\().16b,vKey\last_key\().16b
139 aesd \b1\().16b,vKey\key\().16b
140 eor \b1\().16b,\b1\().16b,vKey\last_key\().16b
141 aesd \b2\().16b,vKey\key\().16b
142 eor \b2\().16b,\b2\().16b,vKey\last_key\().16b
143 aesd \b3\().16b,vKey\key\().16b
144 eor \b3\().16b,\b3\().16b,vKey\last_key\().16b
145 .endif
146 .endif
147 .endm
148
149 .macro aes_rounds_interleave b0:req,b1:req,b2:req,b3:req,mode
150 aes_round_interleave \b0,\b1,\b2,\b3,0,\mode
151 aes_round_interleave \b0,\b1,\b2,\b3,1,\mode
152 aes_round_interleave \b0,\b1,\b2,\b3,2,\mode
153 aes_round_interleave \b0,\b1,\b2,\b3,3,\mode
154 aes_round_interleave \b0,\b1,\b2,\b3,4,\mode
155 aes_round_interleave \b0,\b1,\b2,\b3,5,\mode
156 aes_round_interleave \b0,\b1,\b2,\b3,6,\mode
157 aes_round_interleave \b0,\b1,\b2,\b3,7,\mode
158 aes_round_interleave \b0,\b1,\b2,\b3,8,\mode
159 aes_round_interleave \b0,\b1,\b2,\b3,9,\mode,10
160 .endm
161
162 .macro aes_rounds blk:req,mode:req
163 aes_round \blk,0,\mode
164 aes_round \blk,1,\mode
165 aes_round \blk,2,\mode
166 aes_round \blk,3,\mode
167 aes_round \blk,4,\mode
168 aes_round \blk,5,\mode
169 aes_round \blk,6,\mode
170 aes_round \blk,7,\mode
171 aes_round \blk,8,\mode
172 aes_round \blk,9,\mode
173 aes_round \blk,10,\mode
174 .endm
175
176 /* load k1/k2 from memory and encrypt the tweak by k2
177 * boths keys will share the same set of registers
178 * but will never overlap (k2 is used only once and discarded)
179 */
180 .macro keyload_and_encrypt_tweak iv:req,k2:req,k1:req
181 ldp qKey0,qKey1,[\k2],#32
182 aes_enc_round \iv,0
183 ldp qKey2,qKey3,[\k2],#32
184 aes_enc_round \iv,1
185 ldp qKey0,qKey1,[\k1],#32
186 aes_enc_round \iv,2
187 ldp qKey4,qKey5,[\k2],#32
188 aes_enc_round \iv,3
189 ldp qKey2,qKey3,[\k1],#32
190 aes_enc_round \iv,4
191 ldp qKey6,qKey7,[\k2],#32
192 aes_enc_round \iv,5
193 ldp qKey4,qKey5,[\k1],#32
194 aes_enc_round \iv,6
195 ldp qKey8,qKey9,[k2],#32
196 aes_enc_round \iv,7
197 ldp qKey6,qKey7,[\k1],#32
198 aes_enc_round \iv,8
199 ld1 {vKey10.16b},[\k2],#16
200 aes_enc_round \iv,9
201 ldp qKey8,qKey9,[\k1],#32
202 aes_enc_round \iv,10
203 ld1 {vKey10.16b},[\k1],#16
204 .endm
205
206 .macro save_stack
207 stp d8,d9,[sp, -32]!
208 add tmpbuf,sp,16
209 .endm
210
211 .macro restore_stack
212 ldp d8,d9,[sp],32
213 .endm
214