]> git.proxmox.com Git - ceph.git/blob - ceph/src/crypto/isa-l/isa-l_crypto/aes/aarch64/keyexp_256_aarch64_aes.S
update ceph source to reef 18.1.2
[ceph.git] / ceph / src / crypto / isa-l / isa-l_crypto / aes / aarch64 / keyexp_256_aarch64_aes.S
1 /**********************************************************************
2 Copyright(c) 2020 Arm Corporation All rights reserved.
3
4 Redistribution and use in source and binary forms, with or without
5 modification, are permitted provided that the following conditions
6 are met:
7 * Redistributions of source code must retain the above copyright
8 notice, this list of conditions and the following disclaimer.
9 * Redistributions in binary form must reproduce the above copyright
10 notice, this list of conditions and the following disclaimer in
11 the documentation and/or other materials provided with the
12 distribution.
13 * Neither the name of Arm Corporation nor the names of its
14 contributors may be used to endorse or promote products derived
15 from this software without specific prior written permission.
16
17 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
18 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
19 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
20 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
21 OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
22 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
23 LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 **********************************************************************/
29 .arch armv8-a+crypto
30
31 .text
32 /*
33 Macros
34 */
35 #define NUM_ROUNDS(a) (7+(a)/32)
36 .macro declare_var_vector_reg name:req,reg:req
37 q\name .req q\reg
38 v\name .req v\reg
39 s\name .req s\reg
40 .endm
41 .macro round_256 off:req,rcon:req,export_dec_key
42 .if \off == 0
43 ldp w_tmp6,w_tmp7,[key,24]
44 ldp w_tmp0,w_tmp1,[key]
45 ldp w_tmp2,w_tmp3,[key,8]
46 ldp w_tmp4,w_tmp5,[key,16]
47 movi vzero.4s,0
48 dup vsrc.4s,w_tmp7
49 stp w_tmp6,w_tmp7,[exp_key_enc,24]
50 stp w_tmp0,w_tmp1,[exp_key_enc]
51 stp w_tmp4,w_tmp5,[exp_key_enc,16]
52 stp w_tmp2,w_tmp3,[exp_key_enc,8]
53 .endif
54 mov w0,\rcon
55 mov vdest.16b,vzero.16b
56 aese vdest.16b,vsrc.16b
57 mov w_tmp,vdest.s[0]
58 eor w_tmp0,w_tmp0,w0
59 eor w_tmp0,w_tmp0,w_tmp,ror 8
60 eor w_tmp1,w_tmp0,w_tmp1
61 eor w_tmp2,w_tmp1,w_tmp2
62 eor w_tmp3,w_tmp2,w_tmp3
63 .if \off < 6
64 dup vsrc.4s,w_tmp3
65 mov vdest.16b,vzero.16b
66 aese vdest.16b,vsrc.16b
67 mov w_tmp,vdest.s[0]
68 eor w_tmp4,w_tmp4,w_tmp
69 eor w_tmp5,w_tmp5,w_tmp4
70 eor w_tmp6,w_tmp6,w_tmp5
71 eor w_tmp7,w_tmp7,w_tmp6
72 dup vsrc.4s,w_tmp7
73 stp w_tmp0,w_tmp1,[exp_key_enc,KEY_LEN*(\off+1)]
74 stp w_tmp2,w_tmp3,[exp_key_enc,KEY_LEN*(\off+1)+8]
75 stp w_tmp4,w_tmp5,[exp_key_enc,KEY_LEN*(\off+1)+16]
76 stp w_tmp6,w_tmp7,[exp_key_enc,KEY_LEN*(\off+1)+24]
77 .else
78 stp w_tmp0,w_tmp1,[exp_key_enc,KEY_LEN*(\off+1)]
79 stp w_tmp2,w_tmp3,[exp_key_enc,KEY_LEN*(\off+1)+8]
80 .endif
81 .endm
82
83 .macro export_dec_key rounds:req,enc_key:req,dec_key:req
84 ldr q0,[\enc_key]
85 ldr q1,[\enc_key,(\rounds-1)*16]
86 str q0,[\dec_key,(\rounds-1)*16]
87 str q1,[\dec_key]
88 ldp q0,q1,[\enc_key,1*16]
89 ldp q2,q3,[\enc_key,(1+2)*16]
90 ldp q4,q5,[\enc_key,(1+4)*16]
91 aesimc v0.16b,v0.16b
92 aesimc v1.16b,v1.16b
93 ldp q6,q7,[\enc_key,(1+6)*16]
94 aesimc v2.16b,v2.16b
95 aesimc v3.16b,v3.16b
96 stp q1,q0,[\dec_key,(\rounds-1-2)*16]
97 ldp q0,q1,[\enc_key,(1+8)*16]
98 aesimc v4.16b,v4.16b
99 aesimc v5.16b,v5.16b
100 stp q3,q2,[\dec_key,(\rounds-1-4)*16]
101 ldp q2,q3,[\enc_key,(1+10)*16]
102
103 aesimc v6.16b,v6.16b
104 aesimc v7.16b,v7.16b
105 stp q5,q4,[\dec_key,(\rounds-1-6)*16]
106 ldr q4,[\enc_key,(1+12)*16]
107 aesimc v0.16b,v0.16b
108 aesimc v1.16b,v1.16b
109 stp q7,q6,[\dec_key,(\rounds-1-8)*16]
110 aesimc v2.16b,v2.16b
111 aesimc v3.16b,v3.16b
112 stp q1,q0,[\dec_key,(\rounds-1-10)*16]
113 aesimc v4.16b,v4.16b
114 stp q3,q2,[\dec_key,(\rounds-1-12)*16]
115 str q4,[\dec_key,(\rounds-1-13)*16]
116 .endm
117 /**
118 void aes_keyexp_256_aes(const uint8_t * key,
119 uint8_t * exp_key_enc, uint8_t * exp_key_dec)
120 */
121 key .req x0
122 exp_key_enc .req x1
123 exp_key_dec .req x2
124 .equ KEY_LEN, (256/8)
125 w_tmp0 .req w3
126 w_tmp1 .req w4
127 w_tmp2 .req w5
128 w_tmp3 .req w6
129 w_tmp .req w7
130 w_tmp4 .req w9
131 w_tmp5 .req w10
132 w_tmp6 .req w11
133 w_tmp7 .req w12
134 declare_var_vector_reg dest,0
135 declare_var_vector_reg zero,1
136 declare_var_vector_reg src, 2
137
138
139 .global aes_keyexp_256_aes
140 .type aes_keyexp_256_aes, %function
141
142 aes_keyexp_256_aes:
143 .set rcon,1
144 .set off,0
145 .rept 7
146 round_256 off,rcon,1
147 .set off,off+1
148 .set rcon,(rcon << 1) ^ ((rcon >> 7) * 0x11b)
149 .endr
150 export_dec_key NUM_ROUNDS(256),exp_key_enc,exp_key_dec
151 ret
152 .size aes_keyexp_256_aes, .-aes_keyexp_256_aes
153