1 /**********************************************************************
2 Copyright(c) 2021 Arm Corporation All rights reserved.
4 Redistribution and use in source and binary forms, with or without
5 modification, are permitted provided that the following conditions
7 * Redistributions of source code must retain the above copyright
8 notice, this list of conditions and the following disclaimer.
9 * Redistributions in binary form must reproduce the above copyright
10 notice, this list of conditions and the following disclaimer in
11 the documentation and/or other materials provided with the
13 * Neither the name of Arm Corporation nor the names of its
14 contributors may be used to endorse or promote products derived
15 from this software without specific prior written permission.
17 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 **********************************************************************/
31 .macro aes_key_expand_next next:req,prev:req,ctx:req
35 dup vdest.4s,vKey\prev\().s[3]
36 ext vtmp.16b,vzero.16b,vKey\prev\().16b,#12
37 aese vdest.16b,vzero.16b
38 eor vKey\next\().16b,vKey\prev\().16b,vtmp.16b
39 ext vtmp.16b,vzero.16b,vtmp.16b,#12
40 eor vKey\next\().16b,vKey\next\().16b,vtmp.16b
41 ext vtmp.16b,vzero.16b,vtmp.16b,#12
43 eor tmpw,\ctx,tmpw,ror 8
45 eor vKey\next\().16b,vKey\next\().16b,vtmp.16b
47 eor vKey\next\().16b,vKey\next\().16b,vdest.16b
50 /* when loadin key = 0
52 * arg2 = rcon ctx register (optional)
53 * when loading key > 0
54 * arg1 = rcon ctx register (optional)
56 .macro aes_key_expand key:req,arg1,arg2
58 ld1 {vKey\key\().4s},[\arg1]
72 aes_key_expand_next \key,%prev,rcon
76 aes_key_expand_next \key,%prev,\arg1
81 .macro aes_round block:req,key:req,mode:req
84 aese \block\().16b,vKey\key\().16b
85 aesmc \block\().16b,\block\().16b
87 aesd \block\().16b,vKey\key\().16b
88 aesimc \block\().16b,\block\().16b
93 aese \block\().16b,vKey\key\().16b
95 aesd \block\().16b,vKey\key\().16b
99 eor \block\().16b,\block\().16b,vKey\key\().16b
103 .macro aes_round_interleave b0:req,b1:req,b2:req,b3:req,key:req,mode:req,last_key
106 aese \b0\().16b,vKey\key\().16b
107 aesmc \b0\().16b,\b0\().16b
108 aese \b1\().16b,vKey\key\().16b
109 aesmc \b1\().16b,\b1\().16b
110 aese \b2\().16b,vKey\key\().16b
111 aesmc \b2\().16b,\b2\().16b
112 aese \b3\().16b,vKey\key\().16b
113 aesmc \b3\().16b,\b3\().16b
115 aesd \b0\().16b,vKey\key\().16b
116 aesimc \b0\().16b,\b0\().16b
117 aesd \b1\().16b,vKey\key\().16b
118 aesimc \b1\().16b,\b1\().16b
119 aesd \b2\().16b,vKey\key\().16b
120 aesimc \b2\().16b,\b2\().16b
121 aesd \b3\().16b,vKey\key\().16b
122 aesimc \b3\().16b,\b3\().16b
128 aese \b0\().16b,vKey\key\().16b
129 eor \b0\().16b,\b0\().16b,vKey\last_key\().16b
130 aese \b1\().16b,vKey\key\().16b
131 eor \b1\().16b,\b1\().16b,vKey\last_key\().16b
132 aese \b2\().16b,vKey\key\().16b
133 eor \b2\().16b,\b2\().16b,vKey\last_key\().16b
134 aese \b3\().16b,vKey\key\().16b
135 eor \b3\().16b,\b3\().16b,vKey\last_key\().16b
137 aesd \b0\().16b,vKey\key\().16b
138 eor \b0\().16b,\b0\().16b,vKey\last_key\().16b
139 aesd \b1\().16b,vKey\key\().16b
140 eor \b1\().16b,\b1\().16b,vKey\last_key\().16b
141 aesd \b2\().16b,vKey\key\().16b
142 eor \b2\().16b,\b2\().16b,vKey\last_key\().16b
143 aesd \b3\().16b,vKey\key\().16b
144 eor \b3\().16b,\b3\().16b,vKey\last_key\().16b
149 .macro aes_rounds_interleave b0:req,b1:req,b2:req,b3:req,mode
150 aes_round_interleave \b0,\b1,\b2,\b3,0,\mode
151 aes_round_interleave \b0,\b1,\b2,\b3,1,\mode
152 aes_round_interleave \b0,\b1,\b2,\b3,2,\mode
153 aes_round_interleave \b0,\b1,\b2,\b3,3,\mode
154 aes_round_interleave \b0,\b1,\b2,\b3,4,\mode
155 aes_round_interleave \b0,\b1,\b2,\b3,5,\mode
156 aes_round_interleave \b0,\b1,\b2,\b3,6,\mode
157 aes_round_interleave \b0,\b1,\b2,\b3,7,\mode
158 aes_round_interleave \b0,\b1,\b2,\b3,8,\mode
159 aes_round_interleave \b0,\b1,\b2,\b3,9,\mode,10
162 .macro aes_rounds blk:req,mode:req
163 aes_round \blk,0,\mode
164 aes_round \blk,1,\mode
165 aes_round \blk,2,\mode
166 aes_round \blk,3,\mode
167 aes_round \blk,4,\mode
168 aes_round \blk,5,\mode
169 aes_round \blk,6,\mode
170 aes_round \blk,7,\mode
171 aes_round \blk,8,\mode
172 aes_round \blk,9,\mode
173 aes_round \blk,10,\mode
176 /* load k1/k2 from memory and encrypt the tweak by k2
177 * boths keys will share the same set of registers
178 * but will never overlap (k2 is used only once and discarded)
180 .macro keyload_and_encrypt_tweak iv:req,k2:req,k1:req
181 ldp qKey0,qKey1,[\k2],#32
183 ldp qKey2,qKey3,[\k2],#32
185 ldp qKey0,qKey1,[\k1],#32
187 ldp qKey4,qKey5,[\k2],#32
189 ldp qKey2,qKey3,[\k1],#32
191 ldp qKey6,qKey7,[\k2],#32
193 ldp qKey4,qKey5,[\k1],#32
195 ldp qKey8,qKey9,[k2],#32
197 ldp qKey6,qKey7,[\k1],#32
199 ld1 {vKey10.16b},[\k2],#16
201 ldp qKey8,qKey9,[\k1],#32
203 ld1 {vKey10.16b},[\k1],#16