]> git.proxmox.com Git - mirror_ubuntu-hirsute-kernel.git/blame - drivers/crypto/talitos.c
crypto: sha - split sha.h into sha1.h and sha2.h
[mirror_ubuntu-hirsute-kernel.git] / drivers / crypto / talitos.c
CommitLineData
1a59d1b8 1// SPDX-License-Identifier: GPL-2.0-or-later
9c4a7965
KP
2/*
3 * talitos - Freescale Integrated Security Engine (SEC) device driver
4 *
5228f0f7 5 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
9c4a7965
KP
6 *
7 * Scatterlist Crypto API glue code copied from files with the following:
8 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
9 *
10 * Crypto algorithm registration code copied from hifn driver:
11 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
12 * All rights reserved.
9c4a7965
KP
13 */
14
15#include <linux/kernel.h>
16#include <linux/module.h>
17#include <linux/mod_devicetable.h>
18#include <linux/device.h>
19#include <linux/interrupt.h>
20#include <linux/crypto.h>
21#include <linux/hw_random.h>
5af50730
RH
22#include <linux/of_address.h>
23#include <linux/of_irq.h>
9c4a7965
KP
24#include <linux/of_platform.h>
25#include <linux/dma-mapping.h>
26#include <linux/io.h>
27#include <linux/spinlock.h>
28#include <linux/rtnetlink.h>
5a0e3ad6 29#include <linux/slab.h>
9c4a7965
KP
30
31#include <crypto/algapi.h>
32#include <crypto/aes.h>
9d574ae8 33#include <crypto/internal/des.h>
a24d22b2
EB
34#include <crypto/sha1.h>
35#include <crypto/sha2.h>
497f2e6b 36#include <crypto/md5.h>
e98014ab 37#include <crypto/internal/aead.h>
9c4a7965 38#include <crypto/authenc.h>
373960d7 39#include <crypto/internal/skcipher.h>
acbf7c62
LN
40#include <crypto/hash.h>
41#include <crypto/internal/hash.h>
4de9d0b5 42#include <crypto/scatterwalk.h>
9c4a7965
KP
43
44#include "talitos.h"
45
922f9dc8 46static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
da9de146 47 unsigned int len, bool is_sec1)
81eb024c 48{
edc6bd69 49 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
da9de146
LC
50 if (is_sec1) {
51 ptr->len1 = cpu_to_be16(len);
52 } else {
53 ptr->len = cpu_to_be16(len);
922f9dc8 54 ptr->eptr = upper_32_bits(dma_addr);
da9de146 55 }
81eb024c
KP
56}
57
340ff60a
HG
58static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
59 struct talitos_ptr *src_ptr, bool is_sec1)
60{
61 dst_ptr->ptr = src_ptr->ptr;
922f9dc8 62 if (is_sec1) {
da9de146 63 dst_ptr->len1 = src_ptr->len1;
922f9dc8 64 } else {
da9de146
LC
65 dst_ptr->len = src_ptr->len;
66 dst_ptr->eptr = src_ptr->eptr;
922f9dc8 67 }
538caf83
LC
68}
69
922f9dc8
LC
70static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
71 bool is_sec1)
538caf83 72{
922f9dc8
LC
73 if (is_sec1)
74 return be16_to_cpu(ptr->len1);
75 else
76 return be16_to_cpu(ptr->len);
538caf83
LC
77}
78
b096b544
LC
79static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
80 bool is_sec1)
185eb79f 81{
922f9dc8 82 if (!is_sec1)
b096b544
LC
83 ptr->j_extent = val;
84}
85
86static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
87{
88 if (!is_sec1)
89 ptr->j_extent |= val;
185eb79f
LC
90}
91
9c4a7965
KP
92/*
93 * map virtual single (contiguous) pointer to h/w descriptor pointer
94 */
6a4967c3
LC
95static void __map_single_talitos_ptr(struct device *dev,
96 struct talitos_ptr *ptr,
97 unsigned int len, void *data,
98 enum dma_data_direction dir,
99 unsigned long attrs)
100{
101 dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
102 struct talitos_private *priv = dev_get_drvdata(dev);
103 bool is_sec1 = has_ftr_sec1(priv);
104
105 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
106}
107
9c4a7965 108static void map_single_talitos_ptr(struct device *dev,
edc6bd69 109 struct talitos_ptr *ptr,
42e8b0d7 110 unsigned int len, void *data,
9c4a7965
KP
111 enum dma_data_direction dir)
112{
6a4967c3
LC
113 __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
114}
81eb024c 115
6a4967c3
LC
116static void map_single_talitos_ptr_nosync(struct device *dev,
117 struct talitos_ptr *ptr,
118 unsigned int len, void *data,
119 enum dma_data_direction dir)
120{
121 __map_single_talitos_ptr(dev, ptr, len, data, dir,
122 DMA_ATTR_SKIP_CPU_SYNC);
9c4a7965
KP
123}
124
125/*
126 * unmap bus single (contiguous) h/w descriptor pointer
127 */
128static void unmap_single_talitos_ptr(struct device *dev,
edc6bd69 129 struct talitos_ptr *ptr,
9c4a7965
KP
130 enum dma_data_direction dir)
131{
922f9dc8
LC
132 struct talitos_private *priv = dev_get_drvdata(dev);
133 bool is_sec1 = has_ftr_sec1(priv);
134
edc6bd69 135 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
922f9dc8 136 from_talitos_ptr_len(ptr, is_sec1), dir);
9c4a7965
KP
137}
138
139static int reset_channel(struct device *dev, int ch)
140{
141 struct talitos_private *priv = dev_get_drvdata(dev);
142 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 143 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 144
dd3c0987
LC
145 if (is_sec1) {
146 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
147 TALITOS1_CCCR_LO_RESET);
9c4a7965 148
dd3c0987
LC
149 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
150 TALITOS1_CCCR_LO_RESET) && --timeout)
151 cpu_relax();
152 } else {
153 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
154 TALITOS2_CCCR_RESET);
155
156 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
157 TALITOS2_CCCR_RESET) && --timeout)
158 cpu_relax();
159 }
9c4a7965
KP
160
161 if (timeout == 0) {
162 dev_err(dev, "failed to reset channel %d\n", ch);
163 return -EIO;
164 }
165
81eb024c 166 /* set 36-bit addressing, done writeback enable and done IRQ enable */
ad42d5fc 167 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
81eb024c 168 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
37b5e889
LC
169 /* enable chaining descriptors */
170 if (is_sec1)
171 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
172 TALITOS_CCCR_LO_NE);
9c4a7965 173
fe5720e2
KP
174 /* and ICCR writeback, if available */
175 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
ad42d5fc 176 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
fe5720e2
KP
177 TALITOS_CCCR_LO_IWSE);
178
9c4a7965
KP
179 return 0;
180}
181
182static int reset_device(struct device *dev)
183{
184 struct talitos_private *priv = dev_get_drvdata(dev);
185 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987
LC
186 bool is_sec1 = has_ftr_sec1(priv);
187 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
9c4a7965 188
c3e337f8 189 setbits32(priv->reg + TALITOS_MCR, mcr);
9c4a7965 190
dd3c0987 191 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
9c4a7965
KP
192 && --timeout)
193 cpu_relax();
194
2cdba3cf 195 if (priv->irq[1]) {
c3e337f8
KP
196 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
197 setbits32(priv->reg + TALITOS_MCR, mcr);
198 }
199
9c4a7965
KP
200 if (timeout == 0) {
201 dev_err(dev, "failed to reset device\n");
202 return -EIO;
203 }
204
205 return 0;
206}
207
208/*
209 * Reset and initialize the device
210 */
211static int init_device(struct device *dev)
212{
213 struct talitos_private *priv = dev_get_drvdata(dev);
214 int ch, err;
dd3c0987 215 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965
KP
216
217 /*
218 * Master reset
219 * errata documentation: warning: certain SEC interrupts
220 * are not fully cleared by writing the MCR:SWR bit,
221 * set bit twice to completely reset
222 */
223 err = reset_device(dev);
224 if (err)
225 return err;
226
227 err = reset_device(dev);
228 if (err)
229 return err;
230
231 /* reset channels */
232 for (ch = 0; ch < priv->num_channels; ch++) {
233 err = reset_channel(dev, ch);
234 if (err)
235 return err;
236 }
237
238 /* enable channel done and error interrupts */
dd3c0987
LC
239 if (is_sec1) {
240 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
241 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
242 /* disable parity error check in DEU (erroneous? test vect.) */
243 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
244 } else {
245 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
246 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
247 }
9c4a7965 248
fe5720e2
KP
249 /* disable integrity check error interrupts (use writeback instead) */
250 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
5fa7fa14 251 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
fe5720e2
KP
252 TALITOS_MDEUICR_LO_ICE);
253
9c4a7965
KP
254 return 0;
255}
256
257/**
258 * talitos_submit - submits a descriptor to the device for processing
259 * @dev: the SEC device to be used
5228f0f7 260 * @ch: the SEC device channel to be used
9c4a7965
KP
261 * @desc: the descriptor to be processed by the device
262 * @callback: whom to call when processing is complete
263 * @context: a handle for use by caller (optional)
264 *
265 * desc must contain valid dma-mapped (bus physical) address pointers.
266 * callback must check err and feedback in descriptor header
267 * for device processing status.
268 */
fbb8d46e
CL
269static int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
270 void (*callback)(struct device *dev,
271 struct talitos_desc *desc,
272 void *context, int error),
273 void *context)
9c4a7965
KP
274{
275 struct talitos_private *priv = dev_get_drvdata(dev);
276 struct talitos_request *request;
5228f0f7 277 unsigned long flags;
9c4a7965 278 int head;
7d607c6a 279 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 280
4b992628 281 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
9c4a7965 282
4b992628 283 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
ec6644d6 284 /* h/w fifo is full */
4b992628 285 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
286 return -EAGAIN;
287 }
288
4b992628
KP
289 head = priv->chan[ch].head;
290 request = &priv->chan[ch].fifo[head];
ec6644d6 291
9c4a7965 292 /* map descriptor and save caller data */
7d607c6a
LC
293 if (is_sec1) {
294 desc->hdr1 = desc->hdr;
7d607c6a
LC
295 request->dma_desc = dma_map_single(dev, &desc->hdr1,
296 TALITOS_DESC_SIZE,
297 DMA_BIDIRECTIONAL);
298 } else {
299 request->dma_desc = dma_map_single(dev, desc,
300 TALITOS_DESC_SIZE,
301 DMA_BIDIRECTIONAL);
302 }
9c4a7965
KP
303 request->callback = callback;
304 request->context = context;
305
306 /* increment fifo head */
4b992628 307 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
9c4a7965
KP
308
309 smp_wmb();
310 request->desc = desc;
311
312 /* GO! */
313 wmb();
ad42d5fc
KP
314 out_be32(priv->chan[ch].reg + TALITOS_FF,
315 upper_32_bits(request->dma_desc));
316 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
a752447a 317 lower_32_bits(request->dma_desc));
9c4a7965 318
4b992628 319 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
320
321 return -EINPROGRESS;
322}
323
58cdbc6d
CL
324static __be32 get_request_hdr(struct talitos_request *request, bool is_sec1)
325{
326 struct talitos_edesc *edesc;
327
328 if (!is_sec1)
329 return request->desc->hdr;
330
331 if (!request->desc->next_desc)
332 return request->desc->hdr1;
333
334 edesc = container_of(request->desc, struct talitos_edesc, desc);
335
336 return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1;
337}
9c4a7965
KP
338
339/*
340 * process what was done, notify callback of error if not
341 */
342static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
343{
344 struct talitos_private *priv = dev_get_drvdata(dev);
345 struct talitos_request *request, saved_req;
346 unsigned long flags;
347 int tail, status;
7d607c6a 348 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 349
4b992628 350 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
9c4a7965 351
4b992628
KP
352 tail = priv->chan[ch].tail;
353 while (priv->chan[ch].fifo[tail].desc) {
7d607c6a
LC
354 __be32 hdr;
355
4b992628 356 request = &priv->chan[ch].fifo[tail];
9c4a7965
KP
357
358 /* descriptors with their done bits set don't get the error */
359 rmb();
58cdbc6d 360 hdr = get_request_hdr(request, is_sec1);
7d607c6a
LC
361
362 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
9c4a7965 363 status = 0;
ca38a814 364 else
9c4a7965
KP
365 if (!error)
366 break;
367 else
368 status = error;
369
370 dma_unmap_single(dev, request->dma_desc,
7d607c6a 371 TALITOS_DESC_SIZE,
e938e465 372 DMA_BIDIRECTIONAL);
9c4a7965
KP
373
374 /* copy entries so we can call callback outside lock */
375 saved_req.desc = request->desc;
376 saved_req.callback = request->callback;
377 saved_req.context = request->context;
378
379 /* release request entry in fifo */
380 smp_wmb();
381 request->desc = NULL;
382
383 /* increment fifo tail */
4b992628 384 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
9c4a7965 385
4b992628 386 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
ec6644d6 387
4b992628 388 atomic_dec(&priv->chan[ch].submit_count);
ec6644d6 389
9c4a7965
KP
390 saved_req.callback(dev, saved_req.desc, saved_req.context,
391 status);
392 /* channel may resume processing in single desc error case */
393 if (error && !reset_ch && status == error)
394 return;
4b992628
KP
395 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
396 tail = priv->chan[ch].tail;
9c4a7965
KP
397 }
398
4b992628 399 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
9c4a7965
KP
400}
401
402/*
403 * process completed requests for channels that have done status
404 */
dd3c0987
LC
405#define DEF_TALITOS1_DONE(name, ch_done_mask) \
406static void talitos1_done_##name(unsigned long data) \
407{ \
408 struct device *dev = (struct device *)data; \
409 struct talitos_private *priv = dev_get_drvdata(dev); \
410 unsigned long flags; \
411 \
412 if (ch_done_mask & 0x10000000) \
413 flush_channel(dev, 0, 0, 0); \
dd3c0987
LC
414 if (ch_done_mask & 0x40000000) \
415 flush_channel(dev, 1, 0, 0); \
416 if (ch_done_mask & 0x00010000) \
417 flush_channel(dev, 2, 0, 0); \
418 if (ch_done_mask & 0x00040000) \
419 flush_channel(dev, 3, 0, 0); \
420 \
dd3c0987
LC
421 /* At this point, all completed channels have been processed */ \
422 /* Unmask done interrupts for channels completed later on. */ \
423 spin_lock_irqsave(&priv->reg_lock, flags); \
424 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
425 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
426 spin_unlock_irqrestore(&priv->reg_lock, flags); \
427}
428
429DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
9c02e285 430DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
dd3c0987
LC
431
432#define DEF_TALITOS2_DONE(name, ch_done_mask) \
433static void talitos2_done_##name(unsigned long data) \
c3e337f8
KP
434{ \
435 struct device *dev = (struct device *)data; \
436 struct talitos_private *priv = dev_get_drvdata(dev); \
511d63cb 437 unsigned long flags; \
c3e337f8
KP
438 \
439 if (ch_done_mask & 1) \
440 flush_channel(dev, 0, 0, 0); \
c3e337f8
KP
441 if (ch_done_mask & (1 << 2)) \
442 flush_channel(dev, 1, 0, 0); \
443 if (ch_done_mask & (1 << 4)) \
444 flush_channel(dev, 2, 0, 0); \
445 if (ch_done_mask & (1 << 6)) \
446 flush_channel(dev, 3, 0, 0); \
447 \
c3e337f8
KP
448 /* At this point, all completed channels have been processed */ \
449 /* Unmask done interrupts for channels completed later on. */ \
511d63cb 450 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8 451 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
dd3c0987 452 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
511d63cb 453 spin_unlock_irqrestore(&priv->reg_lock, flags); \
9c4a7965 454}
dd3c0987
LC
455
456DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
9c02e285 457DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
dd3c0987
LC
458DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
459DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
9c4a7965
KP
460
461/*
462 * locate current (offending) descriptor
463 */
02376161 464static __be32 current_desc_hdr(struct device *dev, int ch)
9c4a7965
KP
465{
466 struct talitos_private *priv = dev_get_drvdata(dev);
b62ffd8c 467 int tail, iter;
9c4a7965
KP
468 dma_addr_t cur_desc;
469
b62ffd8c
HG
470 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
471 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
9c4a7965 472
b62ffd8c
HG
473 if (!cur_desc) {
474 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
475 return 0;
476 }
477
478 tail = priv->chan[ch].tail;
479
480 iter = tail;
37b5e889 481 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
195404db 482 priv->chan[ch].fifo[iter].desc->next_desc != cpu_to_be32(cur_desc)) {
b62ffd8c
HG
483 iter = (iter + 1) & (priv->fifo_len - 1);
484 if (iter == tail) {
9c4a7965 485 dev_err(dev, "couldn't locate current descriptor\n");
3e721aeb 486 return 0;
9c4a7965
KP
487 }
488 }
489
195404db 490 if (priv->chan[ch].fifo[iter].desc->next_desc == cpu_to_be32(cur_desc)) {
58cdbc6d
CL
491 struct talitos_edesc *edesc;
492
493 edesc = container_of(priv->chan[ch].fifo[iter].desc,
494 struct talitos_edesc, desc);
495 return ((struct talitos_desc *)
496 (edesc->buf + edesc->dma_len))->hdr;
497 }
37b5e889 498
b62ffd8c 499 return priv->chan[ch].fifo[iter].desc->hdr;
9c4a7965
KP
500}
501
502/*
503 * user diagnostics; report root cause of error based on execution unit status
504 */
02376161 505static void report_eu_error(struct device *dev, int ch, __be32 desc_hdr)
9c4a7965
KP
506{
507 struct talitos_private *priv = dev_get_drvdata(dev);
508 int i;
509
3e721aeb 510 if (!desc_hdr)
02376161 511 desc_hdr = cpu_to_be32(in_be32(priv->chan[ch].reg + TALITOS_DESCBUF));
3e721aeb
KP
512
513 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
9c4a7965
KP
514 case DESC_HDR_SEL0_AFEU:
515 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
5fa7fa14
LC
516 in_be32(priv->reg_afeu + TALITOS_EUISR),
517 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
9c4a7965
KP
518 break;
519 case DESC_HDR_SEL0_DEU:
520 dev_err(dev, "DEUISR 0x%08x_%08x\n",
5fa7fa14
LC
521 in_be32(priv->reg_deu + TALITOS_EUISR),
522 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
9c4a7965
KP
523 break;
524 case DESC_HDR_SEL0_MDEUA:
525 case DESC_HDR_SEL0_MDEUB:
526 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
527 in_be32(priv->reg_mdeu + TALITOS_EUISR),
528 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
529 break;
530 case DESC_HDR_SEL0_RNG:
531 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
5fa7fa14
LC
532 in_be32(priv->reg_rngu + TALITOS_ISR),
533 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
9c4a7965
KP
534 break;
535 case DESC_HDR_SEL0_PKEU:
536 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
5fa7fa14
LC
537 in_be32(priv->reg_pkeu + TALITOS_EUISR),
538 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
539 break;
540 case DESC_HDR_SEL0_AESU:
541 dev_err(dev, "AESUISR 0x%08x_%08x\n",
5fa7fa14
LC
542 in_be32(priv->reg_aesu + TALITOS_EUISR),
543 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
9c4a7965
KP
544 break;
545 case DESC_HDR_SEL0_CRCU:
546 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
547 in_be32(priv->reg_crcu + TALITOS_EUISR),
548 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
549 break;
550 case DESC_HDR_SEL0_KEU:
551 dev_err(dev, "KEUISR 0x%08x_%08x\n",
5fa7fa14
LC
552 in_be32(priv->reg_pkeu + TALITOS_EUISR),
553 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
554 break;
555 }
556
3e721aeb 557 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
9c4a7965
KP
558 case DESC_HDR_SEL1_MDEUA:
559 case DESC_HDR_SEL1_MDEUB:
560 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
561 in_be32(priv->reg_mdeu + TALITOS_EUISR),
562 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
563 break;
564 case DESC_HDR_SEL1_CRCU:
565 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
566 in_be32(priv->reg_crcu + TALITOS_EUISR),
567 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
568 break;
569 }
570
571 for (i = 0; i < 8; i++)
572 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
ad42d5fc
KP
573 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
574 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
9c4a7965
KP
575}
576
577/*
578 * recover from error interrupts
579 */
5e718a09 580static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
9c4a7965 581{
9c4a7965
KP
582 struct talitos_private *priv = dev_get_drvdata(dev);
583 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 584 int ch, error, reset_dev = 0;
42e8b0d7 585 u32 v_lo;
dd3c0987
LC
586 bool is_sec1 = has_ftr_sec1(priv);
587 int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
9c4a7965
KP
588
589 for (ch = 0; ch < priv->num_channels; ch++) {
590 /* skip channels without errors */
dd3c0987
LC
591 if (is_sec1) {
592 /* bits 29, 31, 17, 19 */
593 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
594 continue;
595 } else {
596 if (!(isr & (1 << (ch * 2 + 1))))
597 continue;
598 }
9c4a7965
KP
599
600 error = -EINVAL;
601
ad42d5fc 602 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
9c4a7965
KP
603
604 if (v_lo & TALITOS_CCPSR_LO_DOF) {
605 dev_err(dev, "double fetch fifo overflow error\n");
606 error = -EAGAIN;
607 reset_ch = 1;
608 }
609 if (v_lo & TALITOS_CCPSR_LO_SOF) {
610 /* h/w dropped descriptor */
611 dev_err(dev, "single fetch fifo overflow error\n");
612 error = -EAGAIN;
613 }
614 if (v_lo & TALITOS_CCPSR_LO_MDTE)
615 dev_err(dev, "master data transfer error\n");
616 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
4d9b3a5b 617 dev_err(dev, is_sec1 ? "pointer not complete error\n"
dd3c0987 618 : "s/g data length zero error\n");
9c4a7965 619 if (v_lo & TALITOS_CCPSR_LO_FPZ)
dd3c0987
LC
620 dev_err(dev, is_sec1 ? "parity error\n"
621 : "fetch pointer zero error\n");
9c4a7965
KP
622 if (v_lo & TALITOS_CCPSR_LO_IDH)
623 dev_err(dev, "illegal descriptor header error\n");
624 if (v_lo & TALITOS_CCPSR_LO_IEU)
dd3c0987
LC
625 dev_err(dev, is_sec1 ? "static assignment error\n"
626 : "invalid exec unit error\n");
9c4a7965 627 if (v_lo & TALITOS_CCPSR_LO_EU)
3e721aeb 628 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
dd3c0987
LC
629 if (!is_sec1) {
630 if (v_lo & TALITOS_CCPSR_LO_GB)
631 dev_err(dev, "gather boundary error\n");
632 if (v_lo & TALITOS_CCPSR_LO_GRL)
633 dev_err(dev, "gather return/length error\n");
634 if (v_lo & TALITOS_CCPSR_LO_SB)
635 dev_err(dev, "scatter boundary error\n");
636 if (v_lo & TALITOS_CCPSR_LO_SRL)
637 dev_err(dev, "scatter return/length error\n");
638 }
9c4a7965
KP
639
640 flush_channel(dev, ch, error, reset_ch);
641
642 if (reset_ch) {
643 reset_channel(dev, ch);
644 } else {
ad42d5fc 645 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
dd3c0987 646 TALITOS2_CCCR_CONT);
ad42d5fc
KP
647 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
648 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
dd3c0987 649 TALITOS2_CCCR_CONT) && --timeout)
9c4a7965
KP
650 cpu_relax();
651 if (timeout == 0) {
652 dev_err(dev, "failed to restart channel %d\n",
653 ch);
654 reset_dev = 1;
655 }
656 }
657 }
dd3c0987
LC
658 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
659 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
660 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
661 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
662 isr, isr_lo);
663 else
664 dev_err(dev, "done overflow, internal time out, or "
665 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
9c4a7965
KP
666
667 /* purge request queues */
668 for (ch = 0; ch < priv->num_channels; ch++)
669 flush_channel(dev, ch, -EIO, 1);
670
671 /* reset and reinitialize the device */
672 init_device(dev);
673 }
674}
675
dd3c0987
LC
676#define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
677static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
678{ \
679 struct device *dev = data; \
680 struct talitos_private *priv = dev_get_drvdata(dev); \
681 u32 isr, isr_lo; \
682 unsigned long flags; \
683 \
684 spin_lock_irqsave(&priv->reg_lock, flags); \
685 isr = in_be32(priv->reg + TALITOS_ISR); \
686 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
687 /* Acknowledge interrupt */ \
688 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
689 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
690 \
691 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
692 spin_unlock_irqrestore(&priv->reg_lock, flags); \
693 talitos_error(dev, isr & ch_err_mask, isr_lo); \
694 } \
695 else { \
696 if (likely(isr & ch_done_mask)) { \
697 /* mask further done interrupts. */ \
698 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
699 /* done_task will unmask done interrupts at exit */ \
700 tasklet_schedule(&priv->done_task[tlet]); \
701 } \
702 spin_unlock_irqrestore(&priv->reg_lock, flags); \
703 } \
704 \
705 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
706 IRQ_NONE; \
707}
708
709DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
710
711#define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
712static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
c3e337f8
KP
713{ \
714 struct device *dev = data; \
715 struct talitos_private *priv = dev_get_drvdata(dev); \
716 u32 isr, isr_lo; \
511d63cb 717 unsigned long flags; \
c3e337f8 718 \
511d63cb 719 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8
KP
720 isr = in_be32(priv->reg + TALITOS_ISR); \
721 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
722 /* Acknowledge interrupt */ \
723 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
724 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
725 \
511d63cb
HG
726 if (unlikely(isr & ch_err_mask || isr_lo)) { \
727 spin_unlock_irqrestore(&priv->reg_lock, flags); \
728 talitos_error(dev, isr & ch_err_mask, isr_lo); \
729 } \
730 else { \
c3e337f8
KP
731 if (likely(isr & ch_done_mask)) { \
732 /* mask further done interrupts. */ \
733 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
734 /* done_task will unmask done interrupts at exit */ \
735 tasklet_schedule(&priv->done_task[tlet]); \
736 } \
511d63cb
HG
737 spin_unlock_irqrestore(&priv->reg_lock, flags); \
738 } \
c3e337f8
KP
739 \
740 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
741 IRQ_NONE; \
9c4a7965 742}
dd3c0987
LC
743
744DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
745DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
746 0)
747DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
748 1)
9c4a7965
KP
749
750/*
751 * hwrng
752 */
753static int talitos_rng_data_present(struct hwrng *rng, int wait)
754{
755 struct device *dev = (struct device *)rng->priv;
756 struct talitos_private *priv = dev_get_drvdata(dev);
757 u32 ofl;
758 int i;
759
760 for (i = 0; i < 20; i++) {
5fa7fa14 761 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
9c4a7965
KP
762 TALITOS_RNGUSR_LO_OFL;
763 if (ofl || !wait)
764 break;
765 udelay(10);
766 }
767
768 return !!ofl;
769}
770
771static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
772{
773 struct device *dev = (struct device *)rng->priv;
774 struct talitos_private *priv = dev_get_drvdata(dev);
775
776 /* rng fifo requires 64-bit accesses */
5fa7fa14
LC
777 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
778 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
9c4a7965
KP
779
780 return sizeof(u32);
781}
782
783static int talitos_rng_init(struct hwrng *rng)
784{
785 struct device *dev = (struct device *)rng->priv;
786 struct talitos_private *priv = dev_get_drvdata(dev);
787 unsigned int timeout = TALITOS_TIMEOUT;
788
5fa7fa14
LC
789 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
790 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
791 & TALITOS_RNGUSR_LO_RD)
9c4a7965
KP
792 && --timeout)
793 cpu_relax();
794 if (timeout == 0) {
795 dev_err(dev, "failed to reset rng hw\n");
796 return -ENODEV;
797 }
798
799 /* start generating */
5fa7fa14 800 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
9c4a7965
KP
801
802 return 0;
803}
804
805static int talitos_register_rng(struct device *dev)
806{
807 struct talitos_private *priv = dev_get_drvdata(dev);
35a3bb3d 808 int err;
9c4a7965 809
77450fd7
JL
810 priv->rng.name = dev_driver_string(dev);
811 priv->rng.init = talitos_rng_init;
812 priv->rng.data_present = talitos_rng_data_present;
813 priv->rng.data_read = talitos_rng_data_read;
9c4a7965
KP
814 priv->rng.priv = (unsigned long)dev;
815
35a3bb3d
AS
816 err = hwrng_register(&priv->rng);
817 if (!err)
818 priv->rng_registered = true;
819
820 return err;
9c4a7965
KP
821}
822
823static void talitos_unregister_rng(struct device *dev)
824{
825 struct talitos_private *priv = dev_get_drvdata(dev);
826
35a3bb3d
AS
827 if (!priv->rng_registered)
828 return;
829
9c4a7965 830 hwrng_unregister(&priv->rng);
35a3bb3d 831 priv->rng_registered = false;
9c4a7965
KP
832}
833
834/*
835 * crypto alg
836 */
837#define TALITOS_CRA_PRIORITY 3000
7405c8d7
LC
838/*
839 * Defines a priority for doing AEAD with descriptors type
840 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
841 */
842#define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
192125ed 843#ifdef CONFIG_CRYPTO_DEV_TALITOS2
03d2c511 844#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
b8fbdc2b
CL
845#else
846#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
847#endif
3952f17e 848#define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
70bcaca7 849
9c4a7965
KP
850struct talitos_ctx {
851 struct device *dev;
5228f0f7 852 int ch;
9c4a7965
KP
853 __be32 desc_hdr_template;
854 u8 key[TALITOS_MAX_KEY_SIZE];
70bcaca7 855 u8 iv[TALITOS_MAX_IV_LENGTH];
2e13ce08 856 dma_addr_t dma_key;
9c4a7965
KP
857 unsigned int keylen;
858 unsigned int enckeylen;
859 unsigned int authkeylen;
9c4a7965
KP
860};
861
497f2e6b
LN
862#define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
863#define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
864
865struct talitos_ahash_req_ctx {
60f208d7 866 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
497f2e6b 867 unsigned int hw_context_size;
3c0dd190
LC
868 u8 buf[2][HASH_MAX_BLOCK_SIZE];
869 int buf_idx;
60f208d7 870 unsigned int swinit;
497f2e6b
LN
871 unsigned int first;
872 unsigned int last;
873 unsigned int to_hash_later;
42e8b0d7 874 unsigned int nbuf;
497f2e6b
LN
875 struct scatterlist bufsl[2];
876 struct scatterlist *psrc;
877};
878
3639ca84
HG
879struct talitos_export_state {
880 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
881 u8 buf[HASH_MAX_BLOCK_SIZE];
882 unsigned int swinit;
883 unsigned int first;
884 unsigned int last;
885 unsigned int to_hash_later;
886 unsigned int nbuf;
887};
888
56af8cd4
LN
889static int aead_setkey(struct crypto_aead *authenc,
890 const u8 *key, unsigned int keylen)
9c4a7965
KP
891{
892 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
2e13ce08 893 struct device *dev = ctx->dev;
c306a98d 894 struct crypto_authenc_keys keys;
9c4a7965 895
c306a98d 896 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
9c4a7965
KP
897 goto badkey;
898
c306a98d 899 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
9c4a7965
KP
900 goto badkey;
901
2e13ce08
LC
902 if (ctx->keylen)
903 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
904
c306a98d
MK
905 memcpy(ctx->key, keys.authkey, keys.authkeylen);
906 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
9c4a7965 907
c306a98d
MK
908 ctx->keylen = keys.authkeylen + keys.enckeylen;
909 ctx->enckeylen = keys.enckeylen;
910 ctx->authkeylen = keys.authkeylen;
2e13ce08
LC
911 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
912 DMA_TO_DEVICE);
9c4a7965 913
8f0691fc 914 memzero_explicit(&keys, sizeof(keys));
9c4a7965
KP
915 return 0;
916
917badkey:
8f0691fc 918 memzero_explicit(&keys, sizeof(keys));
9c4a7965
KP
919 return -EINVAL;
920}
921
ef7c5c85
HX
922static int aead_des3_setkey(struct crypto_aead *authenc,
923 const u8 *key, unsigned int keylen)
924{
925 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
926 struct device *dev = ctx->dev;
927 struct crypto_authenc_keys keys;
ef7c5c85
HX
928 int err;
929
930 err = crypto_authenc_extractkeys(&keys, key, keylen);
931 if (unlikely(err))
674f368a 932 goto out;
ef7c5c85
HX
933
934 err = -EINVAL;
935 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
674f368a 936 goto out;
ef7c5c85 937
9d574ae8
AB
938 err = verify_aead_des3_key(authenc, keys.enckey, keys.enckeylen);
939 if (err)
ef7c5c85 940 goto out;
ef7c5c85
HX
941
942 if (ctx->keylen)
943 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
944
945 memcpy(ctx->key, keys.authkey, keys.authkeylen);
946 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
947
948 ctx->keylen = keys.authkeylen + keys.enckeylen;
949 ctx->enckeylen = keys.enckeylen;
950 ctx->authkeylen = keys.authkeylen;
951 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
952 DMA_TO_DEVICE);
953
954out:
955 memzero_explicit(&keys, sizeof(keys));
956 return err;
ef7c5c85
HX
957}
958
4de9d0b5
LN
959static void talitos_sg_unmap(struct device *dev,
960 struct talitos_edesc *edesc,
961 struct scatterlist *src,
6a1e8d14
LC
962 struct scatterlist *dst,
963 unsigned int len, unsigned int offset)
4de9d0b5 964{
6a1e8d14
LC
965 struct talitos_private *priv = dev_get_drvdata(dev);
966 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
967 unsigned int src_nents = edesc->src_nents ? : 1;
968 unsigned int dst_nents = edesc->dst_nents ? : 1;
969
6a1e8d14
LC
970 if (is_sec1 && dst && dst_nents > 1) {
971 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
972 len, DMA_FROM_DEVICE);
973 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
974 offset);
975 }
4de9d0b5 976 if (src != dst) {
6a1e8d14
LC
977 if (src_nents == 1 || !is_sec1)
978 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
4de9d0b5 979
6a1e8d14 980 if (dst && (dst_nents == 1 || !is_sec1))
b8a011d4 981 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
6a1e8d14 982 } else if (src_nents == 1 || !is_sec1) {
b8a011d4 983 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
246a87cd
LC
984 }
985}
986
9c4a7965 987static void ipsec_esp_unmap(struct device *dev,
56af8cd4 988 struct talitos_edesc *edesc,
7ede4c36 989 struct aead_request *areq, bool encrypt)
9c4a7965 990{
549bd8bc
LC
991 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
992 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
993 unsigned int ivsize = crypto_aead_ivsize(aead);
7ede4c36
CL
994 unsigned int authsize = crypto_aead_authsize(aead);
995 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
9a655608
LC
996 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
997 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
549bd8bc 998
9a655608 999 if (is_ipsec_esp)
549bd8bc
LC
1000 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1001 DMA_FROM_DEVICE);
9a655608 1002 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
9c4a7965 1003
e345177d
CL
1004 talitos_sg_unmap(dev, edesc, areq->src, areq->dst,
1005 cryptlen + authsize, areq->assoclen);
9c4a7965
KP
1006
1007 if (edesc->dma_len)
1008 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1009 DMA_BIDIRECTIONAL);
549bd8bc 1010
9a655608 1011 if (!is_ipsec_esp) {
549bd8bc
LC
1012 unsigned int dst_nents = edesc->dst_nents ? : 1;
1013
1014 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
7ede4c36 1015 areq->assoclen + cryptlen - ivsize);
549bd8bc 1016 }
9c4a7965
KP
1017}
1018
1019/*
1020 * ipsec_esp descriptor callbacks
1021 */
1022static void ipsec_esp_encrypt_done(struct device *dev,
1023 struct talitos_desc *desc, void *context,
1024 int err)
1025{
1026 struct aead_request *areq = context;
9c4a7965 1027 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
2e13ce08 1028 unsigned int ivsize = crypto_aead_ivsize(authenc);
19bbbc63 1029 struct talitos_edesc *edesc;
9c4a7965 1030
19bbbc63
KP
1031 edesc = container_of(desc, struct talitos_edesc, desc);
1032
7ede4c36 1033 ipsec_esp_unmap(dev, edesc, areq, true);
9c4a7965 1034
2e13ce08
LC
1035 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1036
9c4a7965
KP
1037 kfree(edesc);
1038
1039 aead_request_complete(areq, err);
1040}
1041
fe5720e2 1042static void ipsec_esp_decrypt_swauth_done(struct device *dev,
e938e465
KP
1043 struct talitos_desc *desc,
1044 void *context, int err)
9c4a7965
KP
1045{
1046 struct aead_request *req = context;
9c4a7965 1047 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1048 unsigned int authsize = crypto_aead_authsize(authenc);
19bbbc63 1049 struct talitos_edesc *edesc;
aeb4c132 1050 char *oicv, *icv;
9c4a7965 1051
19bbbc63
KP
1052 edesc = container_of(desc, struct talitos_edesc, desc);
1053
7ede4c36 1054 ipsec_esp_unmap(dev, edesc, req, false);
9c4a7965
KP
1055
1056 if (!err) {
1057 /* auth check */
e345177d
CL
1058 oicv = edesc->buf + edesc->dma_len;
1059 icv = oicv - authsize;
aeb4c132 1060
79960943 1061 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
9c4a7965
KP
1062 }
1063
1064 kfree(edesc);
1065
1066 aead_request_complete(req, err);
1067}
1068
fe5720e2 1069static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
e938e465
KP
1070 struct talitos_desc *desc,
1071 void *context, int err)
fe5720e2
KP
1072{
1073 struct aead_request *req = context;
19bbbc63
KP
1074 struct talitos_edesc *edesc;
1075
1076 edesc = container_of(desc, struct talitos_edesc, desc);
fe5720e2 1077
7ede4c36 1078 ipsec_esp_unmap(dev, edesc, req, false);
fe5720e2
KP
1079
1080 /* check ICV auth status */
e938e465
KP
1081 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1082 DESC_HDR_LO_ICCR1_PASS))
1083 err = -EBADMSG;
fe5720e2
KP
1084
1085 kfree(edesc);
1086
1087 aead_request_complete(req, err);
1088}
1089
9c4a7965
KP
1090/*
1091 * convert scatterlist to SEC h/w link table format
1092 * stop at cryptlen bytes
1093 */
aeb4c132 1094static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
e345177d 1095 unsigned int offset, int datalen, int elen,
aeb4c132 1096 struct talitos_ptr *link_tbl_ptr)
9c4a7965 1097{
e345177d 1098 int n_sg = elen ? sg_count + 1 : sg_count;
aeb4c132 1099 int count = 0;
e345177d 1100 int cryptlen = datalen + elen;
70bcaca7 1101
aeb4c132
HX
1102 while (cryptlen && sg && n_sg--) {
1103 unsigned int len = sg_dma_len(sg);
9c4a7965 1104
aeb4c132
HX
1105 if (offset >= len) {
1106 offset -= len;
1107 goto next;
1108 }
1109
1110 len -= offset;
1111
1112 if (len > cryptlen)
1113 len = cryptlen;
1114
e345177d
CL
1115 if (datalen > 0 && len > datalen) {
1116 to_talitos_ptr(link_tbl_ptr + count,
1117 sg_dma_address(sg) + offset, datalen, 0);
1118 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1119 count++;
1120 len -= datalen;
1121 offset += datalen;
1122 }
aeb4c132 1123 to_talitos_ptr(link_tbl_ptr + count,
da9de146 1124 sg_dma_address(sg) + offset, len, 0);
b096b544 1125 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
aeb4c132
HX
1126 count++;
1127 cryptlen -= len;
e345177d 1128 datalen -= len;
aeb4c132
HX
1129 offset = 0;
1130
1131next:
1132 sg = sg_next(sg);
70bcaca7 1133 }
9c4a7965
KP
1134
1135 /* tag end of link table */
aeb4c132 1136 if (count > 0)
b096b544 1137 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
e345177d 1138 DESC_PTR_LNKTBL_RET, 0);
70bcaca7 1139
aeb4c132
HX
1140 return count;
1141}
1142
2b122730
LC
1143static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1144 unsigned int len, struct talitos_edesc *edesc,
1145 struct talitos_ptr *ptr, int sg_count,
e345177d
CL
1146 unsigned int offset, int tbl_off, int elen,
1147 bool force)
246a87cd 1148{
246a87cd
LC
1149 struct talitos_private *priv = dev_get_drvdata(dev);
1150 bool is_sec1 = has_ftr_sec1(priv);
1151
87a81dce
LC
1152 if (!src) {
1153 to_talitos_ptr(ptr, 0, 0, is_sec1);
1154 return 1;
1155 }
2b122730 1156 to_talitos_ptr_ext_set(ptr, elen, is_sec1);
e345177d 1157 if (sg_count == 1 && !force) {
da9de146 1158 to_talitos_ptr(ptr, sg_dma_address(src) + offset, len, is_sec1);
6a1e8d14 1159 return sg_count;
246a87cd 1160 }
246a87cd 1161 if (is_sec1) {
da9de146 1162 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, len, is_sec1);
6a1e8d14 1163 return sg_count;
246a87cd 1164 }
e345177d 1165 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len, elen,
6a1e8d14 1166 &edesc->link_tbl[tbl_off]);
e345177d 1167 if (sg_count == 1 && !force) {
6a1e8d14
LC
1168 /* Only one segment now, so no link tbl needed*/
1169 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1170 return sg_count;
1171 }
1172 to_talitos_ptr(ptr, edesc->dma_link_tbl +
da9de146 1173 tbl_off * sizeof(struct talitos_ptr), len, is_sec1);
6a1e8d14
LC
1174 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1175
1176 return sg_count;
246a87cd
LC
1177}
1178
2b122730
LC
1179static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1180 unsigned int len, struct talitos_edesc *edesc,
1181 struct talitos_ptr *ptr, int sg_count,
1182 unsigned int offset, int tbl_off)
1183{
1184 return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
e345177d 1185 tbl_off, 0, false);
2b122730
LC
1186}
1187
9c4a7965
KP
1188/*
1189 * fill in and submit ipsec_esp descriptor
1190 */
56af8cd4 1191static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
7ede4c36 1192 bool encrypt,
aeb4c132
HX
1193 void (*callback)(struct device *dev,
1194 struct talitos_desc *desc,
1195 void *context, int error))
9c4a7965
KP
1196{
1197 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
aeb4c132 1198 unsigned int authsize = crypto_aead_authsize(aead);
9c4a7965
KP
1199 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1200 struct device *dev = ctx->dev;
1201 struct talitos_desc *desc = &edesc->desc;
7ede4c36 1202 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
e41256f1 1203 unsigned int ivsize = crypto_aead_ivsize(aead);
aeb4c132 1204 int tbl_off = 0;
fa86a267 1205 int sg_count, ret;
2b122730 1206 int elen = 0;
549bd8bc
LC
1207 bool sync_needed = false;
1208 struct talitos_private *priv = dev_get_drvdata(dev);
1209 bool is_sec1 = has_ftr_sec1(priv);
9a655608
LC
1210 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1211 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1212 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
e345177d 1213 dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize;
9c4a7965
KP
1214
1215 /* hmac key */
2e13ce08 1216 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
79fd31d3 1217
549bd8bc
LC
1218 sg_count = edesc->src_nents ?: 1;
1219 if (is_sec1 && sg_count > 1)
1220 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1221 areq->assoclen + cryptlen);
1222 else
1223 sg_count = dma_map_sg(dev, areq->src, sg_count,
1224 (areq->src == areq->dst) ?
1225 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
79fd31d3 1226
549bd8bc
LC
1227 /* hmac data */
1228 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1229 &desc->ptr[1], sg_count, 0, tbl_off);
340ff60a 1230
549bd8bc 1231 if (ret > 1) {
340ff60a 1232 tbl_off += ret;
549bd8bc 1233 sync_needed = true;
79fd31d3
HG
1234 }
1235
9c4a7965 1236 /* cipher iv */
9a655608 1237 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
9c4a7965
KP
1238
1239 /* cipher key */
2e13ce08
LC
1240 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1241 ctx->enckeylen, is_sec1);
9c4a7965
KP
1242
1243 /*
1244 * cipher in
1245 * map and adjust cipher len to aead request cryptlen.
1246 * extent is bytes of HMAC postpended to ciphertext,
1247 * typically 12 for ipsec
1248 */
2b122730
LC
1249 if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1250 elen = authsize;
9c4a7965 1251
2b122730 1252 ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
e345177d
CL
1253 sg_count, areq->assoclen, tbl_off, elen,
1254 false);
549bd8bc 1255
ec8c7d14
LC
1256 if (ret > 1) {
1257 tbl_off += ret;
549bd8bc
LC
1258 sync_needed = true;
1259 }
9c4a7965 1260
549bd8bc
LC
1261 /* cipher out */
1262 if (areq->src != areq->dst) {
1263 sg_count = edesc->dst_nents ? : 1;
1264 if (!is_sec1 || sg_count == 1)
1265 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1266 }
9c4a7965 1267
e345177d
CL
1268 if (is_ipsec_esp && encrypt)
1269 elen = authsize;
1270 else
1271 elen = 0;
1272 ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1273 sg_count, areq->assoclen, tbl_off, elen,
1274 is_ipsec_esp && !encrypt);
1275 tbl_off += ret;
549bd8bc 1276
e345177d
CL
1277 if (!encrypt && is_ipsec_esp) {
1278 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
e04a61be 1279
e345177d
CL
1280 /* Add an entry to the link table for ICV data */
1281 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1282 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RET, is_sec1);
e04a61be 1283
e345177d
CL
1284 /* icv data follows link tables */
1285 to_talitos_ptr(tbl_ptr, dma_icv, authsize, is_sec1);
1286 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1287 sync_needed = true;
1288 } else if (!encrypt) {
1289 to_talitos_ptr(&desc->ptr[6], dma_icv, authsize, is_sec1);
1290 sync_needed = true;
9a655608 1291 } else if (!is_ipsec_esp) {
e345177d
CL
1292 talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6],
1293 sg_count, areq->assoclen + cryptlen, tbl_off);
549bd8bc
LC
1294 }
1295
9c4a7965 1296 /* iv out */
9a655608 1297 if (is_ipsec_esp)
549bd8bc
LC
1298 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1299 DMA_FROM_DEVICE);
1300
1301 if (sync_needed)
1302 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1303 edesc->dma_len,
1304 DMA_BIDIRECTIONAL);
9c4a7965 1305
5228f0f7 1306 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
fa86a267 1307 if (ret != -EINPROGRESS) {
7ede4c36 1308 ipsec_esp_unmap(dev, edesc, areq, encrypt);
fa86a267
KP
1309 kfree(edesc);
1310 }
1311 return ret;
9c4a7965
KP
1312}
1313
9c4a7965 1314/*
56af8cd4 1315 * allocate and map the extended descriptor
9c4a7965 1316 */
4de9d0b5
LN
1317static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1318 struct scatterlist *src,
1319 struct scatterlist *dst,
79fd31d3
HG
1320 u8 *iv,
1321 unsigned int assoclen,
4de9d0b5
LN
1322 unsigned int cryptlen,
1323 unsigned int authsize,
79fd31d3 1324 unsigned int ivsize,
4de9d0b5 1325 int icv_stashing,
62293a37
HG
1326 u32 cryptoflags,
1327 bool encrypt)
9c4a7965 1328{
56af8cd4 1329 struct talitos_edesc *edesc;
6a1e8d14 1330 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
79fd31d3 1331 dma_addr_t iv_dma = 0;
4de9d0b5 1332 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
586725f8 1333 GFP_ATOMIC;
6f65f6ac
LC
1334 struct talitos_private *priv = dev_get_drvdata(dev);
1335 bool is_sec1 = has_ftr_sec1(priv);
1336 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
9c4a7965 1337
6f65f6ac 1338 if (cryptlen + authsize > max_len) {
4de9d0b5 1339 dev_err(dev, "length exceeds h/w max limit\n");
9c4a7965
KP
1340 return ERR_PTR(-EINVAL);
1341 }
1342
62293a37 1343 if (!dst || dst == src) {
6a1e8d14
LC
1344 src_len = assoclen + cryptlen + authsize;
1345 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1346 if (src_nents < 0) {
1347 dev_err(dev, "Invalid number of src SG.\n");
c56c2e17 1348 return ERR_PTR(-EINVAL);
8e409fe1 1349 }
62293a37
HG
1350 src_nents = (src_nents == 1) ? 0 : src_nents;
1351 dst_nents = dst ? src_nents : 0;
6a1e8d14 1352 dst_len = 0;
62293a37 1353 } else { /* dst && dst != src*/
6a1e8d14
LC
1354 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1355 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1356 if (src_nents < 0) {
1357 dev_err(dev, "Invalid number of src SG.\n");
c56c2e17 1358 return ERR_PTR(-EINVAL);
8e409fe1 1359 }
62293a37 1360 src_nents = (src_nents == 1) ? 0 : src_nents;
6a1e8d14
LC
1361 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1362 dst_nents = sg_nents_for_len(dst, dst_len);
8e409fe1
LC
1363 if (dst_nents < 0) {
1364 dev_err(dev, "Invalid number of dst SG.\n");
c56c2e17 1365 return ERR_PTR(-EINVAL);
8e409fe1 1366 }
62293a37 1367 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
9c4a7965
KP
1368 }
1369
1370 /*
1371 * allocate space for base edesc plus the link tables,
aeb4c132
HX
1372 * allowing for two separate entries for AD and generated ICV (+ 2),
1373 * and space for two sets of ICVs (stashed and generated)
9c4a7965 1374 */
56af8cd4 1375 alloc_len = sizeof(struct talitos_edesc);
e345177d 1376 if (src_nents || dst_nents || !encrypt) {
6f65f6ac 1377 if (is_sec1)
6a1e8d14 1378 dma_len = (src_nents ? src_len : 0) +
e345177d 1379 (dst_nents ? dst_len : 0) + authsize;
6f65f6ac 1380 else
aeb4c132 1381 dma_len = (src_nents + dst_nents + 2) *
e345177d 1382 sizeof(struct talitos_ptr) + authsize;
9c4a7965
KP
1383 alloc_len += dma_len;
1384 } else {
1385 dma_len = 0;
9c4a7965 1386 }
e345177d 1387 alloc_len += icv_stashing ? authsize : 0;
9c4a7965 1388
37b5e889
LC
1389 /* if its a ahash, add space for a second desc next to the first one */
1390 if (is_sec1 && !dst)
1391 alloc_len += sizeof(struct talitos_desc);
1bea445b 1392 alloc_len += ivsize;
37b5e889 1393
586725f8 1394 edesc = kmalloc(alloc_len, GFP_DMA | flags);
c56c2e17
CL
1395 if (!edesc)
1396 return ERR_PTR(-ENOMEM);
1bea445b
CL
1397 if (ivsize) {
1398 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
c56c2e17 1399 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1bea445b 1400 }
e4a647c4 1401 memset(&edesc->desc, 0, sizeof(edesc->desc));
9c4a7965
KP
1402
1403 edesc->src_nents = src_nents;
1404 edesc->dst_nents = dst_nents;
79fd31d3 1405 edesc->iv_dma = iv_dma;
9c4a7965 1406 edesc->dma_len = dma_len;
58cdbc6d
CL
1407 if (dma_len)
1408 edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0],
497f2e6b
LN
1409 edesc->dma_len,
1410 DMA_BIDIRECTIONAL);
58cdbc6d 1411
9c4a7965
KP
1412 return edesc;
1413}
1414
79fd31d3 1415static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
62293a37 1416 int icv_stashing, bool encrypt)
4de9d0b5
LN
1417{
1418 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 1419 unsigned int authsize = crypto_aead_authsize(authenc);
4de9d0b5 1420 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
79fd31d3 1421 unsigned int ivsize = crypto_aead_ivsize(authenc);
7ede4c36 1422 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
4de9d0b5 1423
aeb4c132 1424 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
7ede4c36 1425 iv, areq->assoclen, cryptlen,
aeb4c132 1426 authsize, ivsize, icv_stashing,
62293a37 1427 areq->base.flags, encrypt);
4de9d0b5
LN
1428}
1429
56af8cd4 1430static int aead_encrypt(struct aead_request *req)
9c4a7965
KP
1431{
1432 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1433 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
56af8cd4 1434 struct talitos_edesc *edesc;
9c4a7965
KP
1435
1436 /* allocate extended descriptor */
62293a37 1437 edesc = aead_edesc_alloc(req, req->iv, 0, true);
9c4a7965
KP
1438 if (IS_ERR(edesc))
1439 return PTR_ERR(edesc);
1440
1441 /* set encrypt */
70bcaca7 1442 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
9c4a7965 1443
7ede4c36 1444 return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done);
9c4a7965
KP
1445}
1446
56af8cd4 1447static int aead_decrypt(struct aead_request *req)
9c4a7965
KP
1448{
1449 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1450 unsigned int authsize = crypto_aead_authsize(authenc);
9c4a7965 1451 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
fe5720e2 1452 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
56af8cd4 1453 struct talitos_edesc *edesc;
9c4a7965
KP
1454 void *icvdata;
1455
9c4a7965 1456 /* allocate extended descriptor */
62293a37 1457 edesc = aead_edesc_alloc(req, req->iv, 1, false);
9c4a7965
KP
1458 if (IS_ERR(edesc))
1459 return PTR_ERR(edesc);
1460
4bbfb839
CL
1461 if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) &&
1462 (priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
e938e465
KP
1463 ((!edesc->src_nents && !edesc->dst_nents) ||
1464 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
9c4a7965 1465
fe5720e2 1466 /* decrypt and check the ICV */
e938e465
KP
1467 edesc->desc.hdr = ctx->desc_hdr_template |
1468 DESC_HDR_DIR_INBOUND |
fe5720e2 1469 DESC_HDR_MODE1_MDEU_CICV;
9c4a7965 1470
fe5720e2 1471 /* reset integrity check result bits */
9c4a7965 1472
7ede4c36
CL
1473 return ipsec_esp(edesc, req, false,
1474 ipsec_esp_decrypt_hwauth_done);
e938e465 1475 }
fe5720e2 1476
e938e465
KP
1477 /* Have to check the ICV with software */
1478 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
fe5720e2 1479
e938e465 1480 /* stash incoming ICV for later cmp with ICV generated by the h/w */
e345177d 1481 icvdata = edesc->buf + edesc->dma_len;
fe5720e2 1482
eae55a58
CL
1483 sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1484 req->assoclen + req->cryptlen - authsize);
fe5720e2 1485
7ede4c36 1486 return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done);
9c4a7965
KP
1487}
1488
373960d7 1489static int skcipher_setkey(struct crypto_skcipher *cipher,
4de9d0b5
LN
1490 const u8 *key, unsigned int keylen)
1491{
373960d7 1492 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
2e13ce08 1493 struct device *dev = ctx->dev;
4de9d0b5 1494
ef7c5c85
HX
1495 if (ctx->keylen)
1496 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1497
1498 memcpy(&ctx->key, key, keylen);
1499 ctx->keylen = keylen;
1500
1501 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1502
1503 return 0;
1504}
1505
373960d7 1506static int skcipher_des_setkey(struct crypto_skcipher *cipher,
ef7c5c85
HX
1507 const u8 *key, unsigned int keylen)
1508{
373960d7
AB
1509 return verify_skcipher_des_key(cipher, key) ?:
1510 skcipher_setkey(cipher, key, keylen);
ef7c5c85 1511}
2e13ce08 1512
373960d7 1513static int skcipher_des3_setkey(struct crypto_skcipher *cipher,
ef7c5c85
HX
1514 const u8 *key, unsigned int keylen)
1515{
373960d7
AB
1516 return verify_skcipher_des3_key(cipher, key) ?:
1517 skcipher_setkey(cipher, key, keylen);
4de9d0b5
LN
1518}
1519
373960d7 1520static int skcipher_aes_setkey(struct crypto_skcipher *cipher,
1ba34e71
CL
1521 const u8 *key, unsigned int keylen)
1522{
1523 if (keylen == AES_KEYSIZE_128 || keylen == AES_KEYSIZE_192 ||
1524 keylen == AES_KEYSIZE_256)
373960d7 1525 return skcipher_setkey(cipher, key, keylen);
1ba34e71 1526
1ba34e71
CL
1527 return -EINVAL;
1528}
1529
4de9d0b5
LN
1530static void common_nonsnoop_unmap(struct device *dev,
1531 struct talitos_edesc *edesc,
373960d7 1532 struct skcipher_request *areq)
4de9d0b5
LN
1533{
1534 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
032d197e 1535
373960d7 1536 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen, 0);
4de9d0b5
LN
1537 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1538
4de9d0b5
LN
1539 if (edesc->dma_len)
1540 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1541 DMA_BIDIRECTIONAL);
1542}
1543
373960d7 1544static void skcipher_done(struct device *dev,
4de9d0b5
LN
1545 struct talitos_desc *desc, void *context,
1546 int err)
1547{
373960d7
AB
1548 struct skcipher_request *areq = context;
1549 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1550 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1551 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
19bbbc63
KP
1552 struct talitos_edesc *edesc;
1553
1554 edesc = container_of(desc, struct talitos_edesc, desc);
4de9d0b5
LN
1555
1556 common_nonsnoop_unmap(dev, edesc, areq);
373960d7 1557 memcpy(areq->iv, ctx->iv, ivsize);
4de9d0b5
LN
1558
1559 kfree(edesc);
1560
1561 areq->base.complete(&areq->base, err);
1562}
1563
1564static int common_nonsnoop(struct talitos_edesc *edesc,
373960d7 1565 struct skcipher_request *areq,
4de9d0b5
LN
1566 void (*callback) (struct device *dev,
1567 struct talitos_desc *desc,
1568 void *context, int error))
1569{
373960d7
AB
1570 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1571 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
4de9d0b5
LN
1572 struct device *dev = ctx->dev;
1573 struct talitos_desc *desc = &edesc->desc;
373960d7
AB
1574 unsigned int cryptlen = areq->cryptlen;
1575 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
4de9d0b5 1576 int sg_count, ret;
6a1e8d14 1577 bool sync_needed = false;
922f9dc8
LC
1578 struct talitos_private *priv = dev_get_drvdata(dev);
1579 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
1580
1581 /* first DWORD empty */
4de9d0b5
LN
1582
1583 /* cipher iv */
da9de146 1584 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
4de9d0b5
LN
1585
1586 /* cipher key */
2e13ce08 1587 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
4de9d0b5 1588
6a1e8d14
LC
1589 sg_count = edesc->src_nents ?: 1;
1590 if (is_sec1 && sg_count > 1)
1591 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1592 cryptlen);
1593 else
1594 sg_count = dma_map_sg(dev, areq->src, sg_count,
1595 (areq->src == areq->dst) ?
1596 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
4de9d0b5
LN
1597 /*
1598 * cipher in
1599 */
6a1e8d14
LC
1600 sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc,
1601 &desc->ptr[3], sg_count, 0, 0);
1602 if (sg_count > 1)
1603 sync_needed = true;
4de9d0b5
LN
1604
1605 /* cipher out */
6a1e8d14
LC
1606 if (areq->src != areq->dst) {
1607 sg_count = edesc->dst_nents ? : 1;
1608 if (!is_sec1 || sg_count == 1)
1609 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1610 }
1611
1612 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1613 sg_count, 0, (edesc->src_nents + 1));
1614 if (ret > 1)
1615 sync_needed = true;
4de9d0b5
LN
1616
1617 /* iv out */
a2b35aa8 1618 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
4de9d0b5
LN
1619 DMA_FROM_DEVICE);
1620
1621 /* last DWORD empty */
4de9d0b5 1622
6a1e8d14
LC
1623 if (sync_needed)
1624 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1625 edesc->dma_len, DMA_BIDIRECTIONAL);
1626
5228f0f7 1627 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
4de9d0b5
LN
1628 if (ret != -EINPROGRESS) {
1629 common_nonsnoop_unmap(dev, edesc, areq);
1630 kfree(edesc);
1631 }
1632 return ret;
1633}
1634
373960d7 1635static struct talitos_edesc *skcipher_edesc_alloc(struct skcipher_request *
62293a37 1636 areq, bool encrypt)
4de9d0b5 1637{
373960d7
AB
1638 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1639 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
1640 unsigned int ivsize = crypto_skcipher_ivsize(cipher);
4de9d0b5 1641
aeb4c132 1642 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
373960d7 1643 areq->iv, 0, areq->cryptlen, 0, ivsize, 0,
62293a37 1644 areq->base.flags, encrypt);
4de9d0b5
LN
1645}
1646
373960d7 1647static int skcipher_encrypt(struct skcipher_request *areq)
4de9d0b5 1648{
373960d7
AB
1649 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1650 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
4de9d0b5 1651 struct talitos_edesc *edesc;
ee483d32 1652 unsigned int blocksize =
373960d7 1653 crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
ee483d32 1654
373960d7 1655 if (!areq->cryptlen)
ee483d32
CL
1656 return 0;
1657
373960d7 1658 if (areq->cryptlen % blocksize)
ee483d32 1659 return -EINVAL;
4de9d0b5
LN
1660
1661 /* allocate extended descriptor */
373960d7 1662 edesc = skcipher_edesc_alloc(areq, true);
4de9d0b5
LN
1663 if (IS_ERR(edesc))
1664 return PTR_ERR(edesc);
1665
1666 /* set encrypt */
1667 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1668
373960d7 1669 return common_nonsnoop(edesc, areq, skcipher_done);
4de9d0b5
LN
1670}
1671
373960d7 1672static int skcipher_decrypt(struct skcipher_request *areq)
4de9d0b5 1673{
373960d7
AB
1674 struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(areq);
1675 struct talitos_ctx *ctx = crypto_skcipher_ctx(cipher);
4de9d0b5 1676 struct talitos_edesc *edesc;
ee483d32 1677 unsigned int blocksize =
373960d7 1678 crypto_tfm_alg_blocksize(crypto_skcipher_tfm(cipher));
ee483d32 1679
373960d7 1680 if (!areq->cryptlen)
ee483d32
CL
1681 return 0;
1682
373960d7 1683 if (areq->cryptlen % blocksize)
ee483d32 1684 return -EINVAL;
4de9d0b5
LN
1685
1686 /* allocate extended descriptor */
373960d7 1687 edesc = skcipher_edesc_alloc(areq, false);
4de9d0b5
LN
1688 if (IS_ERR(edesc))
1689 return PTR_ERR(edesc);
1690
1691 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1692
373960d7 1693 return common_nonsnoop(edesc, areq, skcipher_done);
4de9d0b5
LN
1694}
1695
497f2e6b
LN
1696static void common_nonsnoop_hash_unmap(struct device *dev,
1697 struct talitos_edesc *edesc,
1698 struct ahash_request *areq)
1699{
1700 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
7a6eda5b 1701 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
ad4cd51f
LC
1702 struct talitos_private *priv = dev_get_drvdata(dev);
1703 bool is_sec1 = has_ftr_sec1(priv);
1704 struct talitos_desc *desc = &edesc->desc;
58cdbc6d
CL
1705 struct talitos_desc *desc2 = (struct talitos_desc *)
1706 (edesc->buf + edesc->dma_len);
ad4cd51f
LC
1707
1708 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1709 if (desc->next_desc &&
1710 desc->ptr[5].ptr != desc2->ptr[5].ptr)
1711 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
7a6eda5b
CL
1712 if (req_ctx->last)
1713 memcpy(areq->result, req_ctx->hw_context,
1714 crypto_ahash_digestsize(tfm));
497f2e6b 1715
58cdbc6d
CL
1716 if (req_ctx->psrc)
1717 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
032d197e 1718
ad4cd51f
LC
1719 /* When using hashctx-in, must unmap it. */
1720 if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1721 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1722 DMA_TO_DEVICE);
1723 else if (desc->next_desc)
1724 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1725 DMA_TO_DEVICE);
1726
1727 if (is_sec1 && req_ctx->nbuf)
1728 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1729 DMA_TO_DEVICE);
1730
497f2e6b
LN
1731 if (edesc->dma_len)
1732 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1733 DMA_BIDIRECTIONAL);
1734
37b5e889
LC
1735 if (edesc->desc.next_desc)
1736 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1737 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
497f2e6b
LN
1738}
1739
1740static void ahash_done(struct device *dev,
1741 struct talitos_desc *desc, void *context,
1742 int err)
1743{
1744 struct ahash_request *areq = context;
1745 struct talitos_edesc *edesc =
1746 container_of(desc, struct talitos_edesc, desc);
1747 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1748
1749 if (!req_ctx->last && req_ctx->to_hash_later) {
1750 /* Position any partial block for next update/final/finup */
3c0dd190 1751 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
5e833bc4 1752 req_ctx->nbuf = req_ctx->to_hash_later;
497f2e6b
LN
1753 }
1754 common_nonsnoop_hash_unmap(dev, edesc, areq);
1755
1756 kfree(edesc);
1757
1758 areq->base.complete(&areq->base, err);
1759}
1760
2d02905e
LC
1761/*
1762 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1763 * ourself and submit a padded block
1764 */
5b2cf268 1765static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
2d02905e
LC
1766 struct talitos_edesc *edesc,
1767 struct talitos_ptr *ptr)
1768{
1769 static u8 padded_hash[64] = {
1770 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1771 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1772 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1773 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1774 };
1775
1776 pr_err_once("Bug in SEC1, padding ourself\n");
1777 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1778 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1779 (char *)padded_hash, DMA_TO_DEVICE);
1780}
1781
497f2e6b
LN
1782static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1783 struct ahash_request *areq, unsigned int length,
1784 void (*callback) (struct device *dev,
1785 struct talitos_desc *desc,
1786 void *context, int error))
1787{
1788 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1789 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1790 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1791 struct device *dev = ctx->dev;
1792 struct talitos_desc *desc = &edesc->desc;
032d197e 1793 int ret;
6a1e8d14 1794 bool sync_needed = false;
922f9dc8
LC
1795 struct talitos_private *priv = dev_get_drvdata(dev);
1796 bool is_sec1 = has_ftr_sec1(priv);
6a1e8d14 1797 int sg_count;
497f2e6b
LN
1798
1799 /* first DWORD empty */
497f2e6b 1800
60f208d7
KP
1801 /* hash context in */
1802 if (!req_ctx->first || req_ctx->swinit) {
6a4967c3
LC
1803 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1804 req_ctx->hw_context_size,
1805 req_ctx->hw_context,
1806 DMA_TO_DEVICE);
60f208d7 1807 req_ctx->swinit = 0;
497f2e6b 1808 }
afd62fa2
LC
1809 /* Indicate next op is not the first. */
1810 req_ctx->first = 0;
497f2e6b
LN
1811
1812 /* HMAC key */
1813 if (ctx->keylen)
2e13ce08
LC
1814 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1815 is_sec1);
497f2e6b 1816
37b5e889
LC
1817 if (is_sec1 && req_ctx->nbuf)
1818 length -= req_ctx->nbuf;
1819
6a1e8d14
LC
1820 sg_count = edesc->src_nents ?: 1;
1821 if (is_sec1 && sg_count > 1)
58cdbc6d 1822 sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length);
37b5e889 1823 else if (length)
6a1e8d14
LC
1824 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1825 DMA_TO_DEVICE);
497f2e6b
LN
1826 /*
1827 * data in
1828 */
37b5e889 1829 if (is_sec1 && req_ctx->nbuf) {
ad4cd51f
LC
1830 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1831 req_ctx->buf[req_ctx->buf_idx],
1832 DMA_TO_DEVICE);
37b5e889
LC
1833 } else {
1834 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
58cdbc6d 1835 &desc->ptr[3], sg_count, 0, 0);
37b5e889
LC
1836 if (sg_count > 1)
1837 sync_needed = true;
1838 }
497f2e6b
LN
1839
1840 /* fifth DWORD empty */
497f2e6b
LN
1841
1842 /* hash/HMAC out -or- hash context out */
1843 if (req_ctx->last)
1844 map_single_talitos_ptr(dev, &desc->ptr[5],
1845 crypto_ahash_digestsize(tfm),
7a6eda5b 1846 req_ctx->hw_context, DMA_FROM_DEVICE);
497f2e6b 1847 else
6a4967c3
LC
1848 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1849 req_ctx->hw_context_size,
1850 req_ctx->hw_context,
1851 DMA_FROM_DEVICE);
497f2e6b
LN
1852
1853 /* last DWORD empty */
497f2e6b 1854
2d02905e
LC
1855 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1856 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1857
37b5e889 1858 if (is_sec1 && req_ctx->nbuf && length) {
58cdbc6d
CL
1859 struct talitos_desc *desc2 = (struct talitos_desc *)
1860 (edesc->buf + edesc->dma_len);
37b5e889
LC
1861 dma_addr_t next_desc;
1862
1863 memset(desc2, 0, sizeof(*desc2));
1864 desc2->hdr = desc->hdr;
1865 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1866 desc2->hdr1 = desc2->hdr;
1867 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1868 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1869 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1870
ad4cd51f
LC
1871 if (desc->ptr[1].ptr)
1872 copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1873 is_sec1);
1874 else
6a4967c3
LC
1875 map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1876 req_ctx->hw_context_size,
1877 req_ctx->hw_context,
1878 DMA_TO_DEVICE);
37b5e889
LC
1879 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1880 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
58cdbc6d 1881 &desc2->ptr[3], sg_count, 0, 0);
37b5e889
LC
1882 if (sg_count > 1)
1883 sync_needed = true;
1884 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1885 if (req_ctx->last)
6a4967c3
LC
1886 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1887 req_ctx->hw_context_size,
1888 req_ctx->hw_context,
1889 DMA_FROM_DEVICE);
37b5e889
LC
1890
1891 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1892 DMA_BIDIRECTIONAL);
1893 desc->next_desc = cpu_to_be32(next_desc);
1894 }
1895
6a1e8d14
LC
1896 if (sync_needed)
1897 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1898 edesc->dma_len, DMA_BIDIRECTIONAL);
1899
5228f0f7 1900 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
497f2e6b
LN
1901 if (ret != -EINPROGRESS) {
1902 common_nonsnoop_hash_unmap(dev, edesc, areq);
1903 kfree(edesc);
1904 }
1905 return ret;
1906}
1907
1908static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1909 unsigned int nbytes)
1910{
1911 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1912 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1913 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
37b5e889
LC
1914 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1915 bool is_sec1 = has_ftr_sec1(priv);
1916
1917 if (is_sec1)
1918 nbytes -= req_ctx->nbuf;
497f2e6b 1919
aeb4c132 1920 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
62293a37 1921 nbytes, 0, 0, 0, areq->base.flags, false);
497f2e6b
LN
1922}
1923
1924static int ahash_init(struct ahash_request *areq)
1925{
1926 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
6a4967c3
LC
1927 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1928 struct device *dev = ctx->dev;
497f2e6b 1929 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
49f9783b 1930 unsigned int size;
6a4967c3 1931 dma_addr_t dma;
497f2e6b
LN
1932
1933 /* Initialize the context */
3c0dd190 1934 req_ctx->buf_idx = 0;
5e833bc4 1935 req_ctx->nbuf = 0;
60f208d7
KP
1936 req_ctx->first = 1; /* first indicates h/w must init its context */
1937 req_ctx->swinit = 0; /* assume h/w init of context */
49f9783b 1938 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
497f2e6b
LN
1939 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1940 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 1941 req_ctx->hw_context_size = size;
497f2e6b 1942
6a4967c3
LC
1943 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
1944 DMA_TO_DEVICE);
1945 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
1946
497f2e6b
LN
1947 return 0;
1948}
1949
60f208d7
KP
1950/*
1951 * on h/w without explicit sha224 support, we initialize h/w context
1952 * manually with sha224 constants, and tell it to run sha256.
1953 */
1954static int ahash_init_sha224_swinit(struct ahash_request *areq)
1955{
1956 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1957
a752447a
KP
1958 req_ctx->hw_context[0] = SHA224_H0;
1959 req_ctx->hw_context[1] = SHA224_H1;
1960 req_ctx->hw_context[2] = SHA224_H2;
1961 req_ctx->hw_context[3] = SHA224_H3;
1962 req_ctx->hw_context[4] = SHA224_H4;
1963 req_ctx->hw_context[5] = SHA224_H5;
1964 req_ctx->hw_context[6] = SHA224_H6;
1965 req_ctx->hw_context[7] = SHA224_H7;
60f208d7
KP
1966
1967 /* init 64-bit count */
1968 req_ctx->hw_context[8] = 0;
1969 req_ctx->hw_context[9] = 0;
1970
6a4967c3
LC
1971 ahash_init(areq);
1972 req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
1973
60f208d7
KP
1974 return 0;
1975}
1976
497f2e6b
LN
1977static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1978{
1979 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1980 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1981 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1982 struct talitos_edesc *edesc;
1983 unsigned int blocksize =
1984 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1985 unsigned int nbytes_to_hash;
1986 unsigned int to_hash_later;
5e833bc4 1987 unsigned int nsg;
8e409fe1 1988 int nents;
37b5e889
LC
1989 struct device *dev = ctx->dev;
1990 struct talitos_private *priv = dev_get_drvdata(dev);
1991 bool is_sec1 = has_ftr_sec1(priv);
3c0dd190 1992 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
497f2e6b 1993
5e833bc4
LN
1994 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1995 /* Buffer up to one whole block */
8e409fe1
LC
1996 nents = sg_nents_for_len(areq->src, nbytes);
1997 if (nents < 0) {
1998 dev_err(ctx->dev, "Invalid number of src SG.\n");
1999 return nents;
2000 }
2001 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2002 ctx_buf + req_ctx->nbuf, nbytes);
5e833bc4 2003 req_ctx->nbuf += nbytes;
497f2e6b
LN
2004 return 0;
2005 }
2006
5e833bc4
LN
2007 /* At least (blocksize + 1) bytes are available to hash */
2008 nbytes_to_hash = nbytes + req_ctx->nbuf;
2009 to_hash_later = nbytes_to_hash & (blocksize - 1);
2010
2011 if (req_ctx->last)
2012 to_hash_later = 0;
2013 else if (to_hash_later)
2014 /* There is a partial block. Hash the full block(s) now */
2015 nbytes_to_hash -= to_hash_later;
2016 else {
2017 /* Keep one block buffered */
2018 nbytes_to_hash -= blocksize;
2019 to_hash_later = blocksize;
2020 }
2021
2022 /* Chain in any previously buffered data */
37b5e889 2023 if (!is_sec1 && req_ctx->nbuf) {
5e833bc4
LN
2024 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2025 sg_init_table(req_ctx->bufsl, nsg);
3c0dd190 2026 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
5e833bc4 2027 if (nsg > 1)
c56f6d12 2028 sg_chain(req_ctx->bufsl, 2, areq->src);
497f2e6b 2029 req_ctx->psrc = req_ctx->bufsl;
37b5e889 2030 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
58cdbc6d
CL
2031 int offset;
2032
37b5e889
LC
2033 if (nbytes_to_hash > blocksize)
2034 offset = blocksize - req_ctx->nbuf;
2035 else
2036 offset = nbytes_to_hash - req_ctx->nbuf;
2037 nents = sg_nents_for_len(areq->src, offset);
2038 if (nents < 0) {
2039 dev_err(ctx->dev, "Invalid number of src SG.\n");
2040 return nents;
2041 }
2042 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2043 ctx_buf + req_ctx->nbuf, offset);
37b5e889 2044 req_ctx->nbuf += offset;
58cdbc6d
CL
2045 req_ctx->psrc = scatterwalk_ffwd(req_ctx->bufsl, areq->src,
2046 offset);
5e833bc4 2047 } else
497f2e6b 2048 req_ctx->psrc = areq->src;
5e833bc4
LN
2049
2050 if (to_hash_later) {
8e409fe1
LC
2051 nents = sg_nents_for_len(areq->src, nbytes);
2052 if (nents < 0) {
2053 dev_err(ctx->dev, "Invalid number of src SG.\n");
2054 return nents;
2055 }
d0525723 2056 sg_pcopy_to_buffer(areq->src, nents,
3c0dd190 2057 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
5e833bc4
LN
2058 to_hash_later,
2059 nbytes - to_hash_later);
497f2e6b 2060 }
5e833bc4 2061 req_ctx->to_hash_later = to_hash_later;
497f2e6b 2062
5e833bc4 2063 /* Allocate extended descriptor */
497f2e6b
LN
2064 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2065 if (IS_ERR(edesc))
2066 return PTR_ERR(edesc);
2067
2068 edesc->desc.hdr = ctx->desc_hdr_template;
2069
2070 /* On last one, request SEC to pad; otherwise continue */
2071 if (req_ctx->last)
2072 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2073 else
2074 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2075
60f208d7
KP
2076 /* request SEC to INIT hash. */
2077 if (req_ctx->first && !req_ctx->swinit)
497f2e6b
LN
2078 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2079
2080 /* When the tfm context has a keylen, it's an HMAC.
2081 * A first or last (ie. not middle) descriptor must request HMAC.
2082 */
2083 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2084 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2085
58cdbc6d 2086 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done);
497f2e6b
LN
2087}
2088
2089static int ahash_update(struct ahash_request *areq)
2090{
2091 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2092
2093 req_ctx->last = 0;
2094
2095 return ahash_process_req(areq, areq->nbytes);
2096}
2097
2098static int ahash_final(struct ahash_request *areq)
2099{
2100 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2101
2102 req_ctx->last = 1;
2103
2104 return ahash_process_req(areq, 0);
2105}
2106
2107static int ahash_finup(struct ahash_request *areq)
2108{
2109 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2110
2111 req_ctx->last = 1;
2112
2113 return ahash_process_req(areq, areq->nbytes);
2114}
2115
2116static int ahash_digest(struct ahash_request *areq)
2117{
2118 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
60f208d7 2119 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
497f2e6b 2120
60f208d7 2121 ahash->init(areq);
497f2e6b
LN
2122 req_ctx->last = 1;
2123
2124 return ahash_process_req(areq, areq->nbytes);
2125}
2126
3639ca84
HG
2127static int ahash_export(struct ahash_request *areq, void *out)
2128{
2129 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2130 struct talitos_export_state *export = out;
6a4967c3
LC
2131 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2132 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2133 struct device *dev = ctx->dev;
2134 dma_addr_t dma;
2135
2136 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2137 DMA_FROM_DEVICE);
2138 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
3639ca84
HG
2139
2140 memcpy(export->hw_context, req_ctx->hw_context,
2141 req_ctx->hw_context_size);
3c0dd190 2142 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
3639ca84
HG
2143 export->swinit = req_ctx->swinit;
2144 export->first = req_ctx->first;
2145 export->last = req_ctx->last;
2146 export->to_hash_later = req_ctx->to_hash_later;
2147 export->nbuf = req_ctx->nbuf;
2148
2149 return 0;
2150}
2151
2152static int ahash_import(struct ahash_request *areq, const void *in)
2153{
2154 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2155 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
6a4967c3
LC
2156 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2157 struct device *dev = ctx->dev;
3639ca84 2158 const struct talitos_export_state *export = in;
49f9783b 2159 unsigned int size;
6a4967c3 2160 dma_addr_t dma;
3639ca84
HG
2161
2162 memset(req_ctx, 0, sizeof(*req_ctx));
49f9783b 2163 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
3639ca84
HG
2164 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2165 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 2166 req_ctx->hw_context_size = size;
49f9783b 2167 memcpy(req_ctx->hw_context, export->hw_context, size);
3c0dd190 2168 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
3639ca84
HG
2169 req_ctx->swinit = export->swinit;
2170 req_ctx->first = export->first;
2171 req_ctx->last = export->last;
2172 req_ctx->to_hash_later = export->to_hash_later;
2173 req_ctx->nbuf = export->nbuf;
2174
6a4967c3
LC
2175 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2176 DMA_TO_DEVICE);
2177 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2178
3639ca84
HG
2179 return 0;
2180}
2181
79b3a418
LN
2182static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2183 u8 *hash)
2184{
2185 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2186
2187 struct scatterlist sg[1];
2188 struct ahash_request *req;
f1c90ac3 2189 struct crypto_wait wait;
79b3a418
LN
2190 int ret;
2191
f1c90ac3 2192 crypto_init_wait(&wait);
79b3a418
LN
2193
2194 req = ahash_request_alloc(tfm, GFP_KERNEL);
2195 if (!req)
2196 return -ENOMEM;
2197
2198 /* Keep tfm keylen == 0 during hash of the long key */
2199 ctx->keylen = 0;
2200 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
f1c90ac3 2201 crypto_req_done, &wait);
79b3a418
LN
2202
2203 sg_init_one(&sg[0], key, keylen);
2204
2205 ahash_request_set_crypt(req, sg, hash, keylen);
f1c90ac3
GBY
2206 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2207
79b3a418
LN
2208 ahash_request_free(req);
2209
2210 return ret;
2211}
2212
2213static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2214 unsigned int keylen)
2215{
2216 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2e13ce08 2217 struct device *dev = ctx->dev;
79b3a418
LN
2218 unsigned int blocksize =
2219 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2220 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2221 unsigned int keysize = keylen;
2222 u8 hash[SHA512_DIGEST_SIZE];
2223 int ret;
2224
2225 if (keylen <= blocksize)
2226 memcpy(ctx->key, key, keysize);
2227 else {
2228 /* Must get the hash of the long key */
2229 ret = keyhash(tfm, key, keylen, hash);
2230
674f368a 2231 if (ret)
79b3a418 2232 return -EINVAL;
79b3a418
LN
2233
2234 keysize = digestsize;
2235 memcpy(ctx->key, hash, digestsize);
2236 }
2237
2e13ce08
LC
2238 if (ctx->keylen)
2239 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2240
79b3a418 2241 ctx->keylen = keysize;
2e13ce08 2242 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
79b3a418
LN
2243
2244 return 0;
2245}
2246
2247
9c4a7965 2248struct talitos_alg_template {
d5e4aaef 2249 u32 type;
b0057763 2250 u32 priority;
d5e4aaef 2251 union {
373960d7 2252 struct skcipher_alg skcipher;
acbf7c62 2253 struct ahash_alg hash;
aeb4c132 2254 struct aead_alg aead;
d5e4aaef 2255 } alg;
9c4a7965
KP
2256 __be32 desc_hdr_template;
2257};
2258
2259static struct talitos_alg_template driver_algs[] = {
991155ba 2260 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
d5e4aaef 2261 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2262 .alg.aead = {
2263 .base = {
2264 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2265 .cra_driver_name = "authenc-hmac-sha1-"
2266 "cbc-aes-talitos",
2267 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2268 .cra_flags = CRYPTO_ALG_ASYNC |
2269 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2270 },
2271 .ivsize = AES_BLOCK_SIZE,
2272 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2273 },
9c4a7965
KP
2274 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2275 DESC_HDR_SEL0_AESU |
2276 DESC_HDR_MODE0_AESU_CBC |
2277 DESC_HDR_SEL1_MDEUA |
2278 DESC_HDR_MODE1_MDEU_INIT |
2279 DESC_HDR_MODE1_MDEU_PAD |
2280 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
70bcaca7 2281 },
7405c8d7
LC
2282 { .type = CRYPTO_ALG_TYPE_AEAD,
2283 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2284 .alg.aead = {
2285 .base = {
2286 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2287 .cra_driver_name = "authenc-hmac-sha1-"
a1a42f84 2288 "cbc-aes-talitos-hsna",
7405c8d7 2289 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2290 .cra_flags = CRYPTO_ALG_ASYNC |
2291 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2292 },
2293 .ivsize = AES_BLOCK_SIZE,
2294 .maxauthsize = SHA1_DIGEST_SIZE,
2295 },
2296 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2297 DESC_HDR_SEL0_AESU |
2298 DESC_HDR_MODE0_AESU_CBC |
2299 DESC_HDR_SEL1_MDEUA |
2300 DESC_HDR_MODE1_MDEU_INIT |
2301 DESC_HDR_MODE1_MDEU_PAD |
2302 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2303 },
d5e4aaef 2304 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2305 .alg.aead = {
2306 .base = {
2307 .cra_name = "authenc(hmac(sha1),"
2308 "cbc(des3_ede))",
2309 .cra_driver_name = "authenc-hmac-sha1-"
2310 "cbc-3des-talitos",
2311 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2312 .cra_flags = CRYPTO_ALG_ASYNC |
2313 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2314 },
2315 .ivsize = DES3_EDE_BLOCK_SIZE,
2316 .maxauthsize = SHA1_DIGEST_SIZE,
ef7c5c85 2317 .setkey = aead_des3_setkey,
56af8cd4 2318 },
70bcaca7
LN
2319 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2320 DESC_HDR_SEL0_DEU |
2321 DESC_HDR_MODE0_DEU_CBC |
2322 DESC_HDR_MODE0_DEU_3DES |
2323 DESC_HDR_SEL1_MDEUA |
2324 DESC_HDR_MODE1_MDEU_INIT |
2325 DESC_HDR_MODE1_MDEU_PAD |
2326 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
3952f17e 2327 },
7405c8d7
LC
2328 { .type = CRYPTO_ALG_TYPE_AEAD,
2329 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2330 .alg.aead = {
2331 .base = {
2332 .cra_name = "authenc(hmac(sha1),"
2333 "cbc(des3_ede))",
2334 .cra_driver_name = "authenc-hmac-sha1-"
a1a42f84 2335 "cbc-3des-talitos-hsna",
7405c8d7 2336 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2337 .cra_flags = CRYPTO_ALG_ASYNC |
2338 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2339 },
2340 .ivsize = DES3_EDE_BLOCK_SIZE,
2341 .maxauthsize = SHA1_DIGEST_SIZE,
ef7c5c85 2342 .setkey = aead_des3_setkey,
7405c8d7
LC
2343 },
2344 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2345 DESC_HDR_SEL0_DEU |
2346 DESC_HDR_MODE0_DEU_CBC |
2347 DESC_HDR_MODE0_DEU_3DES |
2348 DESC_HDR_SEL1_MDEUA |
2349 DESC_HDR_MODE1_MDEU_INIT |
2350 DESC_HDR_MODE1_MDEU_PAD |
2351 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2352 },
357fb605 2353 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2354 .alg.aead = {
2355 .base = {
2356 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2357 .cra_driver_name = "authenc-hmac-sha224-"
2358 "cbc-aes-talitos",
2359 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2360 .cra_flags = CRYPTO_ALG_ASYNC |
2361 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2362 },
2363 .ivsize = AES_BLOCK_SIZE,
2364 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2365 },
2366 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2367 DESC_HDR_SEL0_AESU |
2368 DESC_HDR_MODE0_AESU_CBC |
2369 DESC_HDR_SEL1_MDEUA |
2370 DESC_HDR_MODE1_MDEU_INIT |
2371 DESC_HDR_MODE1_MDEU_PAD |
2372 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2373 },
7405c8d7
LC
2374 { .type = CRYPTO_ALG_TYPE_AEAD,
2375 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2376 .alg.aead = {
2377 .base = {
2378 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2379 .cra_driver_name = "authenc-hmac-sha224-"
a1a42f84 2380 "cbc-aes-talitos-hsna",
7405c8d7 2381 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2382 .cra_flags = CRYPTO_ALG_ASYNC |
2383 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2384 },
2385 .ivsize = AES_BLOCK_SIZE,
2386 .maxauthsize = SHA224_DIGEST_SIZE,
2387 },
2388 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2389 DESC_HDR_SEL0_AESU |
2390 DESC_HDR_MODE0_AESU_CBC |
2391 DESC_HDR_SEL1_MDEUA |
2392 DESC_HDR_MODE1_MDEU_INIT |
2393 DESC_HDR_MODE1_MDEU_PAD |
2394 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2395 },
357fb605 2396 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2397 .alg.aead = {
2398 .base = {
2399 .cra_name = "authenc(hmac(sha224),"
2400 "cbc(des3_ede))",
2401 .cra_driver_name = "authenc-hmac-sha224-"
2402 "cbc-3des-talitos",
2403 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2404 .cra_flags = CRYPTO_ALG_ASYNC |
2405 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2406 },
2407 .ivsize = DES3_EDE_BLOCK_SIZE,
2408 .maxauthsize = SHA224_DIGEST_SIZE,
ef7c5c85 2409 .setkey = aead_des3_setkey,
357fb605
HG
2410 },
2411 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2412 DESC_HDR_SEL0_DEU |
2413 DESC_HDR_MODE0_DEU_CBC |
2414 DESC_HDR_MODE0_DEU_3DES |
2415 DESC_HDR_SEL1_MDEUA |
2416 DESC_HDR_MODE1_MDEU_INIT |
2417 DESC_HDR_MODE1_MDEU_PAD |
2418 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2419 },
7405c8d7
LC
2420 { .type = CRYPTO_ALG_TYPE_AEAD,
2421 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2422 .alg.aead = {
2423 .base = {
2424 .cra_name = "authenc(hmac(sha224),"
2425 "cbc(des3_ede))",
2426 .cra_driver_name = "authenc-hmac-sha224-"
a1a42f84 2427 "cbc-3des-talitos-hsna",
7405c8d7 2428 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2429 .cra_flags = CRYPTO_ALG_ASYNC |
2430 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2431 },
2432 .ivsize = DES3_EDE_BLOCK_SIZE,
2433 .maxauthsize = SHA224_DIGEST_SIZE,
ef7c5c85 2434 .setkey = aead_des3_setkey,
7405c8d7
LC
2435 },
2436 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2437 DESC_HDR_SEL0_DEU |
2438 DESC_HDR_MODE0_DEU_CBC |
2439 DESC_HDR_MODE0_DEU_3DES |
2440 DESC_HDR_SEL1_MDEUA |
2441 DESC_HDR_MODE1_MDEU_INIT |
2442 DESC_HDR_MODE1_MDEU_PAD |
2443 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2444 },
d5e4aaef 2445 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2446 .alg.aead = {
2447 .base = {
2448 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2449 .cra_driver_name = "authenc-hmac-sha256-"
2450 "cbc-aes-talitos",
2451 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2452 .cra_flags = CRYPTO_ALG_ASYNC |
2453 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2454 },
2455 .ivsize = AES_BLOCK_SIZE,
2456 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2457 },
3952f17e
LN
2458 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2459 DESC_HDR_SEL0_AESU |
2460 DESC_HDR_MODE0_AESU_CBC |
2461 DESC_HDR_SEL1_MDEUA |
2462 DESC_HDR_MODE1_MDEU_INIT |
2463 DESC_HDR_MODE1_MDEU_PAD |
2464 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2465 },
7405c8d7
LC
2466 { .type = CRYPTO_ALG_TYPE_AEAD,
2467 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2468 .alg.aead = {
2469 .base = {
2470 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2471 .cra_driver_name = "authenc-hmac-sha256-"
a1a42f84 2472 "cbc-aes-talitos-hsna",
7405c8d7 2473 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2474 .cra_flags = CRYPTO_ALG_ASYNC |
2475 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2476 },
2477 .ivsize = AES_BLOCK_SIZE,
2478 .maxauthsize = SHA256_DIGEST_SIZE,
2479 },
2480 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2481 DESC_HDR_SEL0_AESU |
2482 DESC_HDR_MODE0_AESU_CBC |
2483 DESC_HDR_SEL1_MDEUA |
2484 DESC_HDR_MODE1_MDEU_INIT |
2485 DESC_HDR_MODE1_MDEU_PAD |
2486 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2487 },
d5e4aaef 2488 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2489 .alg.aead = {
2490 .base = {
2491 .cra_name = "authenc(hmac(sha256),"
2492 "cbc(des3_ede))",
2493 .cra_driver_name = "authenc-hmac-sha256-"
2494 "cbc-3des-talitos",
2495 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2496 .cra_flags = CRYPTO_ALG_ASYNC |
2497 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2498 },
2499 .ivsize = DES3_EDE_BLOCK_SIZE,
2500 .maxauthsize = SHA256_DIGEST_SIZE,
ef7c5c85 2501 .setkey = aead_des3_setkey,
56af8cd4 2502 },
3952f17e
LN
2503 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2504 DESC_HDR_SEL0_DEU |
2505 DESC_HDR_MODE0_DEU_CBC |
2506 DESC_HDR_MODE0_DEU_3DES |
2507 DESC_HDR_SEL1_MDEUA |
2508 DESC_HDR_MODE1_MDEU_INIT |
2509 DESC_HDR_MODE1_MDEU_PAD |
2510 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2511 },
7405c8d7
LC
2512 { .type = CRYPTO_ALG_TYPE_AEAD,
2513 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2514 .alg.aead = {
2515 .base = {
2516 .cra_name = "authenc(hmac(sha256),"
2517 "cbc(des3_ede))",
2518 .cra_driver_name = "authenc-hmac-sha256-"
a1a42f84 2519 "cbc-3des-talitos-hsna",
7405c8d7 2520 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2521 .cra_flags = CRYPTO_ALG_ASYNC |
2522 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2523 },
2524 .ivsize = DES3_EDE_BLOCK_SIZE,
2525 .maxauthsize = SHA256_DIGEST_SIZE,
ef7c5c85 2526 .setkey = aead_des3_setkey,
7405c8d7
LC
2527 },
2528 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2529 DESC_HDR_SEL0_DEU |
2530 DESC_HDR_MODE0_DEU_CBC |
2531 DESC_HDR_MODE0_DEU_3DES |
2532 DESC_HDR_SEL1_MDEUA |
2533 DESC_HDR_MODE1_MDEU_INIT |
2534 DESC_HDR_MODE1_MDEU_PAD |
2535 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2536 },
d5e4aaef 2537 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2538 .alg.aead = {
2539 .base = {
2540 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2541 .cra_driver_name = "authenc-hmac-sha384-"
2542 "cbc-aes-talitos",
2543 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2544 .cra_flags = CRYPTO_ALG_ASYNC |
2545 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2546 },
2547 .ivsize = AES_BLOCK_SIZE,
2548 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2549 },
2550 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2551 DESC_HDR_SEL0_AESU |
2552 DESC_HDR_MODE0_AESU_CBC |
2553 DESC_HDR_SEL1_MDEUB |
2554 DESC_HDR_MODE1_MDEU_INIT |
2555 DESC_HDR_MODE1_MDEU_PAD |
2556 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2557 },
2558 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2559 .alg.aead = {
2560 .base = {
2561 .cra_name = "authenc(hmac(sha384),"
2562 "cbc(des3_ede))",
2563 .cra_driver_name = "authenc-hmac-sha384-"
2564 "cbc-3des-talitos",
2565 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2566 .cra_flags = CRYPTO_ALG_ASYNC |
2567 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2568 },
2569 .ivsize = DES3_EDE_BLOCK_SIZE,
2570 .maxauthsize = SHA384_DIGEST_SIZE,
ef7c5c85 2571 .setkey = aead_des3_setkey,
357fb605
HG
2572 },
2573 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2574 DESC_HDR_SEL0_DEU |
2575 DESC_HDR_MODE0_DEU_CBC |
2576 DESC_HDR_MODE0_DEU_3DES |
2577 DESC_HDR_SEL1_MDEUB |
2578 DESC_HDR_MODE1_MDEU_INIT |
2579 DESC_HDR_MODE1_MDEU_PAD |
2580 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2581 },
2582 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2583 .alg.aead = {
2584 .base = {
2585 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2586 .cra_driver_name = "authenc-hmac-sha512-"
2587 "cbc-aes-talitos",
2588 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2589 .cra_flags = CRYPTO_ALG_ASYNC |
2590 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2591 },
2592 .ivsize = AES_BLOCK_SIZE,
2593 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2594 },
2595 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2596 DESC_HDR_SEL0_AESU |
2597 DESC_HDR_MODE0_AESU_CBC |
2598 DESC_HDR_SEL1_MDEUB |
2599 DESC_HDR_MODE1_MDEU_INIT |
2600 DESC_HDR_MODE1_MDEU_PAD |
2601 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2602 },
2603 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2604 .alg.aead = {
2605 .base = {
2606 .cra_name = "authenc(hmac(sha512),"
2607 "cbc(des3_ede))",
2608 .cra_driver_name = "authenc-hmac-sha512-"
2609 "cbc-3des-talitos",
2610 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2611 .cra_flags = CRYPTO_ALG_ASYNC |
2612 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2613 },
2614 .ivsize = DES3_EDE_BLOCK_SIZE,
2615 .maxauthsize = SHA512_DIGEST_SIZE,
ef7c5c85 2616 .setkey = aead_des3_setkey,
357fb605
HG
2617 },
2618 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2619 DESC_HDR_SEL0_DEU |
2620 DESC_HDR_MODE0_DEU_CBC |
2621 DESC_HDR_MODE0_DEU_3DES |
2622 DESC_HDR_SEL1_MDEUB |
2623 DESC_HDR_MODE1_MDEU_INIT |
2624 DESC_HDR_MODE1_MDEU_PAD |
2625 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2626 },
2627 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2628 .alg.aead = {
2629 .base = {
2630 .cra_name = "authenc(hmac(md5),cbc(aes))",
2631 .cra_driver_name = "authenc-hmac-md5-"
2632 "cbc-aes-talitos",
2633 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2634 .cra_flags = CRYPTO_ALG_ASYNC |
2635 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2636 },
2637 .ivsize = AES_BLOCK_SIZE,
2638 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2639 },
3952f17e
LN
2640 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2641 DESC_HDR_SEL0_AESU |
2642 DESC_HDR_MODE0_AESU_CBC |
2643 DESC_HDR_SEL1_MDEUA |
2644 DESC_HDR_MODE1_MDEU_INIT |
2645 DESC_HDR_MODE1_MDEU_PAD |
2646 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2647 },
7405c8d7
LC
2648 { .type = CRYPTO_ALG_TYPE_AEAD,
2649 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2650 .alg.aead = {
2651 .base = {
2652 .cra_name = "authenc(hmac(md5),cbc(aes))",
2653 .cra_driver_name = "authenc-hmac-md5-"
a1a42f84 2654 "cbc-aes-talitos-hsna",
7405c8d7 2655 .cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2656 .cra_flags = CRYPTO_ALG_ASYNC |
2657 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2658 },
2659 .ivsize = AES_BLOCK_SIZE,
2660 .maxauthsize = MD5_DIGEST_SIZE,
2661 },
2662 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2663 DESC_HDR_SEL0_AESU |
2664 DESC_HDR_MODE0_AESU_CBC |
2665 DESC_HDR_SEL1_MDEUA |
2666 DESC_HDR_MODE1_MDEU_INIT |
2667 DESC_HDR_MODE1_MDEU_PAD |
2668 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2669 },
d5e4aaef 2670 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2671 .alg.aead = {
2672 .base = {
2673 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2674 .cra_driver_name = "authenc-hmac-md5-"
2675 "cbc-3des-talitos",
2676 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2677 .cra_flags = CRYPTO_ALG_ASYNC |
2678 CRYPTO_ALG_ALLOCATES_MEMORY,
aeb4c132
HX
2679 },
2680 .ivsize = DES3_EDE_BLOCK_SIZE,
2681 .maxauthsize = MD5_DIGEST_SIZE,
ef7c5c85 2682 .setkey = aead_des3_setkey,
56af8cd4 2683 },
3952f17e
LN
2684 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2685 DESC_HDR_SEL0_DEU |
2686 DESC_HDR_MODE0_DEU_CBC |
2687 DESC_HDR_MODE0_DEU_3DES |
2688 DESC_HDR_SEL1_MDEUA |
2689 DESC_HDR_MODE1_MDEU_INIT |
2690 DESC_HDR_MODE1_MDEU_PAD |
2691 DESC_HDR_MODE1_MDEU_MD5_HMAC,
4de9d0b5 2692 },
7405c8d7
LC
2693 { .type = CRYPTO_ALG_TYPE_AEAD,
2694 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2695 .alg.aead = {
2696 .base = {
2697 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2698 .cra_driver_name = "authenc-hmac-md5-"
a1a42f84 2699 "cbc-3des-talitos-hsna",
7405c8d7 2700 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2701 .cra_flags = CRYPTO_ALG_ASYNC |
2702 CRYPTO_ALG_ALLOCATES_MEMORY,
7405c8d7
LC
2703 },
2704 .ivsize = DES3_EDE_BLOCK_SIZE,
2705 .maxauthsize = MD5_DIGEST_SIZE,
ef7c5c85 2706 .setkey = aead_des3_setkey,
7405c8d7
LC
2707 },
2708 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2709 DESC_HDR_SEL0_DEU |
2710 DESC_HDR_MODE0_DEU_CBC |
2711 DESC_HDR_MODE0_DEU_3DES |
2712 DESC_HDR_SEL1_MDEUA |
2713 DESC_HDR_MODE1_MDEU_INIT |
2714 DESC_HDR_MODE1_MDEU_PAD |
2715 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2716 },
373960d7
AB
2717 /* SKCIPHER algorithms. */
2718 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2719 .alg.skcipher = {
2720 .base.cra_name = "ecb(aes)",
2721 .base.cra_driver_name = "ecb-aes-talitos",
2722 .base.cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2723 .base.cra_flags = CRYPTO_ALG_ASYNC |
2724 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2725 .min_keysize = AES_MIN_KEY_SIZE,
2726 .max_keysize = AES_MAX_KEY_SIZE,
2727 .setkey = skcipher_aes_setkey,
5e75ae1b
LC
2728 },
2729 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2730 DESC_HDR_SEL0_AESU,
2731 },
373960d7
AB
2732 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2733 .alg.skcipher = {
2734 .base.cra_name = "cbc(aes)",
2735 .base.cra_driver_name = "cbc-aes-talitos",
2736 .base.cra_blocksize = AES_BLOCK_SIZE,
b8aa7dc5
MP
2737 .base.cra_flags = CRYPTO_ALG_ASYNC |
2738 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2739 .min_keysize = AES_MIN_KEY_SIZE,
2740 .max_keysize = AES_MAX_KEY_SIZE,
2741 .ivsize = AES_BLOCK_SIZE,
2742 .setkey = skcipher_aes_setkey,
4de9d0b5
LN
2743 },
2744 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2745 DESC_HDR_SEL0_AESU |
2746 DESC_HDR_MODE0_AESU_CBC,
2747 },
373960d7
AB
2748 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2749 .alg.skcipher = {
2750 .base.cra_name = "ctr(aes)",
2751 .base.cra_driver_name = "ctr-aes-talitos",
2752 .base.cra_blocksize = 1,
b8aa7dc5
MP
2753 .base.cra_flags = CRYPTO_ALG_ASYNC |
2754 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2755 .min_keysize = AES_MIN_KEY_SIZE,
2756 .max_keysize = AES_MAX_KEY_SIZE,
2757 .ivsize = AES_BLOCK_SIZE,
2758 .setkey = skcipher_aes_setkey,
5e75ae1b 2759 },
70d355cc 2760 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
5e75ae1b
LC
2761 DESC_HDR_SEL0_AESU |
2762 DESC_HDR_MODE0_AESU_CTR,
2763 },
373960d7
AB
2764 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2765 .alg.skcipher = {
2766 .base.cra_name = "ecb(des)",
2767 .base.cra_driver_name = "ecb-des-talitos",
2768 .base.cra_blocksize = DES_BLOCK_SIZE,
b8aa7dc5
MP
2769 .base.cra_flags = CRYPTO_ALG_ASYNC |
2770 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2771 .min_keysize = DES_KEY_SIZE,
2772 .max_keysize = DES_KEY_SIZE,
2773 .setkey = skcipher_des_setkey,
5e75ae1b
LC
2774 },
2775 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2776 DESC_HDR_SEL0_DEU,
2777 },
373960d7
AB
2778 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2779 .alg.skcipher = {
2780 .base.cra_name = "cbc(des)",
2781 .base.cra_driver_name = "cbc-des-talitos",
2782 .base.cra_blocksize = DES_BLOCK_SIZE,
b8aa7dc5
MP
2783 .base.cra_flags = CRYPTO_ALG_ASYNC |
2784 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2785 .min_keysize = DES_KEY_SIZE,
2786 .max_keysize = DES_KEY_SIZE,
2787 .ivsize = DES_BLOCK_SIZE,
2788 .setkey = skcipher_des_setkey,
5e75ae1b
LC
2789 },
2790 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2791 DESC_HDR_SEL0_DEU |
2792 DESC_HDR_MODE0_DEU_CBC,
2793 },
373960d7
AB
2794 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2795 .alg.skcipher = {
2796 .base.cra_name = "ecb(des3_ede)",
2797 .base.cra_driver_name = "ecb-3des-talitos",
2798 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2799 .base.cra_flags = CRYPTO_ALG_ASYNC |
2800 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2801 .min_keysize = DES3_EDE_KEY_SIZE,
2802 .max_keysize = DES3_EDE_KEY_SIZE,
2803 .setkey = skcipher_des3_setkey,
5e75ae1b
LC
2804 },
2805 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2806 DESC_HDR_SEL0_DEU |
2807 DESC_HDR_MODE0_DEU_3DES,
2808 },
373960d7
AB
2809 { .type = CRYPTO_ALG_TYPE_SKCIPHER,
2810 .alg.skcipher = {
2811 .base.cra_name = "cbc(des3_ede)",
2812 .base.cra_driver_name = "cbc-3des-talitos",
2813 .base.cra_blocksize = DES3_EDE_BLOCK_SIZE,
b8aa7dc5
MP
2814 .base.cra_flags = CRYPTO_ALG_ASYNC |
2815 CRYPTO_ALG_ALLOCATES_MEMORY,
373960d7
AB
2816 .min_keysize = DES3_EDE_KEY_SIZE,
2817 .max_keysize = DES3_EDE_KEY_SIZE,
2818 .ivsize = DES3_EDE_BLOCK_SIZE,
2819 .setkey = skcipher_des3_setkey,
4de9d0b5
LN
2820 },
2821 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2822 DESC_HDR_SEL0_DEU |
2823 DESC_HDR_MODE0_DEU_CBC |
2824 DESC_HDR_MODE0_DEU_3DES,
497f2e6b
LN
2825 },
2826 /* AHASH algorithms. */
2827 { .type = CRYPTO_ALG_TYPE_AHASH,
2828 .alg.hash = {
497f2e6b 2829 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2830 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2831 .halg.base = {
2832 .cra_name = "md5",
2833 .cra_driver_name = "md5-talitos",
b3988618 2834 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
b8aa7dc5
MP
2835 .cra_flags = CRYPTO_ALG_ASYNC |
2836 CRYPTO_ALG_ALLOCATES_MEMORY,
497f2e6b
LN
2837 }
2838 },
2839 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2840 DESC_HDR_SEL0_MDEUA |
2841 DESC_HDR_MODE0_MDEU_MD5,
2842 },
2843 { .type = CRYPTO_ALG_TYPE_AHASH,
2844 .alg.hash = {
497f2e6b 2845 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2846 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2847 .halg.base = {
2848 .cra_name = "sha1",
2849 .cra_driver_name = "sha1-talitos",
2850 .cra_blocksize = SHA1_BLOCK_SIZE,
b8aa7dc5
MP
2851 .cra_flags = CRYPTO_ALG_ASYNC |
2852 CRYPTO_ALG_ALLOCATES_MEMORY,
497f2e6b
LN
2853 }
2854 },
2855 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2856 DESC_HDR_SEL0_MDEUA |
2857 DESC_HDR_MODE0_MDEU_SHA1,
2858 },
60f208d7
KP
2859 { .type = CRYPTO_ALG_TYPE_AHASH,
2860 .alg.hash = {
60f208d7 2861 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2862 .halg.statesize = sizeof(struct talitos_export_state),
60f208d7
KP
2863 .halg.base = {
2864 .cra_name = "sha224",
2865 .cra_driver_name = "sha224-talitos",
2866 .cra_blocksize = SHA224_BLOCK_SIZE,
b8aa7dc5
MP
2867 .cra_flags = CRYPTO_ALG_ASYNC |
2868 CRYPTO_ALG_ALLOCATES_MEMORY,
60f208d7
KP
2869 }
2870 },
2871 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2872 DESC_HDR_SEL0_MDEUA |
2873 DESC_HDR_MODE0_MDEU_SHA224,
2874 },
497f2e6b
LN
2875 { .type = CRYPTO_ALG_TYPE_AHASH,
2876 .alg.hash = {
497f2e6b 2877 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2878 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2879 .halg.base = {
2880 .cra_name = "sha256",
2881 .cra_driver_name = "sha256-talitos",
2882 .cra_blocksize = SHA256_BLOCK_SIZE,
b8aa7dc5
MP
2883 .cra_flags = CRYPTO_ALG_ASYNC |
2884 CRYPTO_ALG_ALLOCATES_MEMORY,
497f2e6b
LN
2885 }
2886 },
2887 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2888 DESC_HDR_SEL0_MDEUA |
2889 DESC_HDR_MODE0_MDEU_SHA256,
2890 },
2891 { .type = CRYPTO_ALG_TYPE_AHASH,
2892 .alg.hash = {
497f2e6b 2893 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2894 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2895 .halg.base = {
2896 .cra_name = "sha384",
2897 .cra_driver_name = "sha384-talitos",
2898 .cra_blocksize = SHA384_BLOCK_SIZE,
b8aa7dc5
MP
2899 .cra_flags = CRYPTO_ALG_ASYNC |
2900 CRYPTO_ALG_ALLOCATES_MEMORY,
497f2e6b
LN
2901 }
2902 },
2903 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2904 DESC_HDR_SEL0_MDEUB |
2905 DESC_HDR_MODE0_MDEUB_SHA384,
2906 },
2907 { .type = CRYPTO_ALG_TYPE_AHASH,
2908 .alg.hash = {
497f2e6b 2909 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2910 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2911 .halg.base = {
2912 .cra_name = "sha512",
2913 .cra_driver_name = "sha512-talitos",
2914 .cra_blocksize = SHA512_BLOCK_SIZE,
b8aa7dc5
MP
2915 .cra_flags = CRYPTO_ALG_ASYNC |
2916 CRYPTO_ALG_ALLOCATES_MEMORY,
497f2e6b
LN
2917 }
2918 },
2919 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2920 DESC_HDR_SEL0_MDEUB |
2921 DESC_HDR_MODE0_MDEUB_SHA512,
2922 },
79b3a418
LN
2923 { .type = CRYPTO_ALG_TYPE_AHASH,
2924 .alg.hash = {
79b3a418 2925 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2926 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2927 .halg.base = {
2928 .cra_name = "hmac(md5)",
2929 .cra_driver_name = "hmac-md5-talitos",
b3988618 2930 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
b8aa7dc5
MP
2931 .cra_flags = CRYPTO_ALG_ASYNC |
2932 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
2933 }
2934 },
2935 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2936 DESC_HDR_SEL0_MDEUA |
2937 DESC_HDR_MODE0_MDEU_MD5,
2938 },
2939 { .type = CRYPTO_ALG_TYPE_AHASH,
2940 .alg.hash = {
79b3a418 2941 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2942 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2943 .halg.base = {
2944 .cra_name = "hmac(sha1)",
2945 .cra_driver_name = "hmac-sha1-talitos",
2946 .cra_blocksize = SHA1_BLOCK_SIZE,
b8aa7dc5
MP
2947 .cra_flags = CRYPTO_ALG_ASYNC |
2948 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
2949 }
2950 },
2951 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2952 DESC_HDR_SEL0_MDEUA |
2953 DESC_HDR_MODE0_MDEU_SHA1,
2954 },
2955 { .type = CRYPTO_ALG_TYPE_AHASH,
2956 .alg.hash = {
79b3a418 2957 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2958 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2959 .halg.base = {
2960 .cra_name = "hmac(sha224)",
2961 .cra_driver_name = "hmac-sha224-talitos",
2962 .cra_blocksize = SHA224_BLOCK_SIZE,
b8aa7dc5
MP
2963 .cra_flags = CRYPTO_ALG_ASYNC |
2964 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
2965 }
2966 },
2967 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2968 DESC_HDR_SEL0_MDEUA |
2969 DESC_HDR_MODE0_MDEU_SHA224,
2970 },
2971 { .type = CRYPTO_ALG_TYPE_AHASH,
2972 .alg.hash = {
79b3a418 2973 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2974 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2975 .halg.base = {
2976 .cra_name = "hmac(sha256)",
2977 .cra_driver_name = "hmac-sha256-talitos",
2978 .cra_blocksize = SHA256_BLOCK_SIZE,
b8aa7dc5
MP
2979 .cra_flags = CRYPTO_ALG_ASYNC |
2980 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
2981 }
2982 },
2983 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2984 DESC_HDR_SEL0_MDEUA |
2985 DESC_HDR_MODE0_MDEU_SHA256,
2986 },
2987 { .type = CRYPTO_ALG_TYPE_AHASH,
2988 .alg.hash = {
79b3a418 2989 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2990 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2991 .halg.base = {
2992 .cra_name = "hmac(sha384)",
2993 .cra_driver_name = "hmac-sha384-talitos",
2994 .cra_blocksize = SHA384_BLOCK_SIZE,
b8aa7dc5
MP
2995 .cra_flags = CRYPTO_ALG_ASYNC |
2996 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
2997 }
2998 },
2999 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3000 DESC_HDR_SEL0_MDEUB |
3001 DESC_HDR_MODE0_MDEUB_SHA384,
3002 },
3003 { .type = CRYPTO_ALG_TYPE_AHASH,
3004 .alg.hash = {
79b3a418 3005 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 3006 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
3007 .halg.base = {
3008 .cra_name = "hmac(sha512)",
3009 .cra_driver_name = "hmac-sha512-talitos",
3010 .cra_blocksize = SHA512_BLOCK_SIZE,
b8aa7dc5
MP
3011 .cra_flags = CRYPTO_ALG_ASYNC |
3012 CRYPTO_ALG_ALLOCATES_MEMORY,
79b3a418
LN
3013 }
3014 },
3015 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3016 DESC_HDR_SEL0_MDEUB |
3017 DESC_HDR_MODE0_MDEUB_SHA512,
3018 }
9c4a7965
KP
3019};
3020
3021struct talitos_crypto_alg {
3022 struct list_head entry;
3023 struct device *dev;
acbf7c62 3024 struct talitos_alg_template algt;
9c4a7965
KP
3025};
3026
89d124cb
JE
3027static int talitos_init_common(struct talitos_ctx *ctx,
3028 struct talitos_crypto_alg *talitos_alg)
9c4a7965 3029{
5228f0f7 3030 struct talitos_private *priv;
9c4a7965
KP
3031
3032 /* update context with ptr to dev */
3033 ctx->dev = talitos_alg->dev;
19bbbc63 3034
5228f0f7
KP
3035 /* assign SEC channel to tfm in round-robin fashion */
3036 priv = dev_get_drvdata(ctx->dev);
3037 ctx->ch = atomic_inc_return(&priv->last_chan) &
3038 (priv->num_channels - 1);
3039
9c4a7965 3040 /* copy descriptor header template value */
acbf7c62 3041 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
9c4a7965 3042
602dba5a
KP
3043 /* select done notification */
3044 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3045
497f2e6b
LN
3046 return 0;
3047}
3048
373960d7 3049static int talitos_cra_init_aead(struct crypto_aead *tfm)
89d124cb 3050{
373960d7 3051 struct aead_alg *alg = crypto_aead_alg(tfm);
89d124cb 3052 struct talitos_crypto_alg *talitos_alg;
373960d7 3053 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
89d124cb 3054
373960d7
AB
3055 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3056 algt.alg.aead);
89d124cb
JE
3057
3058 return talitos_init_common(ctx, talitos_alg);
3059}
3060
373960d7 3061static int talitos_cra_init_skcipher(struct crypto_skcipher *tfm)
497f2e6b 3062{
373960d7 3063 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
89d124cb 3064 struct talitos_crypto_alg *talitos_alg;
373960d7 3065 struct talitos_ctx *ctx = crypto_skcipher_ctx(tfm);
89d124cb
JE
3066
3067 talitos_alg = container_of(alg, struct talitos_crypto_alg,
373960d7 3068 algt.alg.skcipher);
89d124cb
JE
3069
3070 return talitos_init_common(ctx, talitos_alg);
9c4a7965
KP
3071}
3072
497f2e6b
LN
3073static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3074{
373960d7
AB
3075 struct crypto_alg *alg = tfm->__crt_alg;
3076 struct talitos_crypto_alg *talitos_alg;
497f2e6b
LN
3077 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3078
373960d7
AB
3079 talitos_alg = container_of(__crypto_ahash_alg(alg),
3080 struct talitos_crypto_alg,
3081 algt.alg.hash);
497f2e6b
LN
3082
3083 ctx->keylen = 0;
3084 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3085 sizeof(struct talitos_ahash_req_ctx));
3086
373960d7 3087 return talitos_init_common(ctx, talitos_alg);
497f2e6b
LN
3088}
3089
2e13ce08
LC
3090static void talitos_cra_exit(struct crypto_tfm *tfm)
3091{
3092 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3093 struct device *dev = ctx->dev;
3094
3095 if (ctx->keylen)
3096 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3097}
3098
9c4a7965
KP
3099/*
3100 * given the alg's descriptor header template, determine whether descriptor
3101 * type and primary/secondary execution units required match the hw
3102 * capabilities description provided in the device tree node.
3103 */
3104static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3105{
3106 struct talitos_private *priv = dev_get_drvdata(dev);
3107 int ret;
3108
3109 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3110 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3111
3112 if (SECONDARY_EU(desc_hdr_template))
3113 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3114 & priv->exec_units);
3115
3116 return ret;
3117}
3118
2dc11581 3119static int talitos_remove(struct platform_device *ofdev)
9c4a7965
KP
3120{
3121 struct device *dev = &ofdev->dev;
3122 struct talitos_private *priv = dev_get_drvdata(dev);
3123 struct talitos_crypto_alg *t_alg, *n;
3124 int i;
3125
3126 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
acbf7c62 3127 switch (t_alg->algt.type) {
373960d7
AB
3128 case CRYPTO_ALG_TYPE_SKCIPHER:
3129 crypto_unregister_skcipher(&t_alg->algt.alg.skcipher);
acbf7c62 3130 break;
aeb4c132
HX
3131 case CRYPTO_ALG_TYPE_AEAD:
3132 crypto_unregister_aead(&t_alg->algt.alg.aead);
5fc194ea 3133 break;
acbf7c62
LN
3134 case CRYPTO_ALG_TYPE_AHASH:
3135 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3136 break;
3137 }
9c4a7965 3138 list_del(&t_alg->entry);
9c4a7965
KP
3139 }
3140
3141 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3142 talitos_unregister_rng(dev);
3143
c3e337f8 3144 for (i = 0; i < 2; i++)
2cdba3cf 3145 if (priv->irq[i]) {
c3e337f8
KP
3146 free_irq(priv->irq[i], dev);
3147 irq_dispose_mapping(priv->irq[i]);
3148 }
9c4a7965 3149
c3e337f8 3150 tasklet_kill(&priv->done_task[0]);
2cdba3cf 3151 if (priv->irq[1])
c3e337f8 3152 tasklet_kill(&priv->done_task[1]);
9c4a7965 3153
9c4a7965
KP
3154 return 0;
3155}
3156
3157static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3158 struct talitos_alg_template
3159 *template)
3160{
60f208d7 3161 struct talitos_private *priv = dev_get_drvdata(dev);
9c4a7965
KP
3162 struct talitos_crypto_alg *t_alg;
3163 struct crypto_alg *alg;
3164
24b92ff2
LC
3165 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3166 GFP_KERNEL);
9c4a7965
KP
3167 if (!t_alg)
3168 return ERR_PTR(-ENOMEM);
3169
acbf7c62
LN
3170 t_alg->algt = *template;
3171
3172 switch (t_alg->algt.type) {
373960d7
AB
3173 case CRYPTO_ALG_TYPE_SKCIPHER:
3174 alg = &t_alg->algt.alg.skcipher.base;
2e13ce08 3175 alg->cra_exit = talitos_cra_exit;
373960d7
AB
3176 t_alg->algt.alg.skcipher.init = talitos_cra_init_skcipher;
3177 t_alg->algt.alg.skcipher.setkey =
3178 t_alg->algt.alg.skcipher.setkey ?: skcipher_setkey;
3179 t_alg->algt.alg.skcipher.encrypt = skcipher_encrypt;
3180 t_alg->algt.alg.skcipher.decrypt = skcipher_decrypt;
497f2e6b 3181 break;
acbf7c62 3182 case CRYPTO_ALG_TYPE_AEAD:
aeb4c132 3183 alg = &t_alg->algt.alg.aead.base;
2e13ce08 3184 alg->cra_exit = talitos_cra_exit;
aeb4c132 3185 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
ef7c5c85
HX
3186 t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3187 aead_setkey;
aeb4c132
HX
3188 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3189 t_alg->algt.alg.aead.decrypt = aead_decrypt;
6cda075a
LC
3190 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3191 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
24b92ff2 3192 devm_kfree(dev, t_alg);
6cda075a
LC
3193 return ERR_PTR(-ENOTSUPP);
3194 }
acbf7c62
LN
3195 break;
3196 case CRYPTO_ALG_TYPE_AHASH:
3197 alg = &t_alg->algt.alg.hash.halg.base;
497f2e6b 3198 alg->cra_init = talitos_cra_init_ahash;
ad4cd51f 3199 alg->cra_exit = talitos_cra_exit;
b286e003
KP
3200 t_alg->algt.alg.hash.init = ahash_init;
3201 t_alg->algt.alg.hash.update = ahash_update;
3202 t_alg->algt.alg.hash.final = ahash_final;
3203 t_alg->algt.alg.hash.finup = ahash_finup;
3204 t_alg->algt.alg.hash.digest = ahash_digest;
56136631
LC
3205 if (!strncmp(alg->cra_name, "hmac", 4))
3206 t_alg->algt.alg.hash.setkey = ahash_setkey;
3639ca84
HG
3207 t_alg->algt.alg.hash.import = ahash_import;
3208 t_alg->algt.alg.hash.export = ahash_export;
b286e003 3209
79b3a418 3210 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
0b2730d8 3211 !strncmp(alg->cra_name, "hmac", 4)) {
24b92ff2 3212 devm_kfree(dev, t_alg);
79b3a418 3213 return ERR_PTR(-ENOTSUPP);
0b2730d8 3214 }
60f208d7 3215 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
79b3a418
LN
3216 (!strcmp(alg->cra_name, "sha224") ||
3217 !strcmp(alg->cra_name, "hmac(sha224)"))) {
60f208d7
KP
3218 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3219 t_alg->algt.desc_hdr_template =
3220 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3221 DESC_HDR_SEL0_MDEUA |
3222 DESC_HDR_MODE0_MDEU_SHA256;
3223 }
497f2e6b 3224 break;
1d11911a
KP
3225 default:
3226 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
24b92ff2 3227 devm_kfree(dev, t_alg);
1d11911a 3228 return ERR_PTR(-EINVAL);
acbf7c62 3229 }
9c4a7965 3230
9c4a7965 3231 alg->cra_module = THIS_MODULE;
b0057763
LC
3232 if (t_alg->algt.priority)
3233 alg->cra_priority = t_alg->algt.priority;
3234 else
3235 alg->cra_priority = TALITOS_CRA_PRIORITY;
c9cca703
CL
3236 if (has_ftr_sec1(priv))
3237 alg->cra_alignmask = 3;
3238 else
3239 alg->cra_alignmask = 0;
9c4a7965 3240 alg->cra_ctxsize = sizeof(struct talitos_ctx);
d912bb76 3241 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
9c4a7965 3242
9c4a7965
KP
3243 t_alg->dev = dev;
3244
3245 return t_alg;
3246}
3247
c3e337f8
KP
3248static int talitos_probe_irq(struct platform_device *ofdev)
3249{
3250 struct device *dev = &ofdev->dev;
3251 struct device_node *np = ofdev->dev.of_node;
3252 struct talitos_private *priv = dev_get_drvdata(dev);
3253 int err;
dd3c0987 3254 bool is_sec1 = has_ftr_sec1(priv);
c3e337f8
KP
3255
3256 priv->irq[0] = irq_of_parse_and_map(np, 0);
2cdba3cf 3257 if (!priv->irq[0]) {
c3e337f8
KP
3258 dev_err(dev, "failed to map irq\n");
3259 return -EINVAL;
3260 }
dd3c0987
LC
3261 if (is_sec1) {
3262 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3263 dev_driver_string(dev), dev);
3264 goto primary_out;
3265 }
c3e337f8
KP
3266
3267 priv->irq[1] = irq_of_parse_and_map(np, 1);
3268
3269 /* get the primary irq line */
2cdba3cf 3270 if (!priv->irq[1]) {
dd3c0987 3271 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
c3e337f8
KP
3272 dev_driver_string(dev), dev);
3273 goto primary_out;
3274 }
3275
dd3c0987 3276 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
c3e337f8
KP
3277 dev_driver_string(dev), dev);
3278 if (err)
3279 goto primary_out;
3280
3281 /* get the secondary irq line */
dd3c0987 3282 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
c3e337f8
KP
3283 dev_driver_string(dev), dev);
3284 if (err) {
3285 dev_err(dev, "failed to request secondary irq\n");
3286 irq_dispose_mapping(priv->irq[1]);
2cdba3cf 3287 priv->irq[1] = 0;
c3e337f8
KP
3288 }
3289
3290 return err;
3291
3292primary_out:
3293 if (err) {
3294 dev_err(dev, "failed to request primary irq\n");
3295 irq_dispose_mapping(priv->irq[0]);
2cdba3cf 3296 priv->irq[0] = 0;
c3e337f8
KP
3297 }
3298
3299 return err;
3300}
3301
1c48a5c9 3302static int talitos_probe(struct platform_device *ofdev)
9c4a7965
KP
3303{
3304 struct device *dev = &ofdev->dev;
61c7a080 3305 struct device_node *np = ofdev->dev.of_node;
9c4a7965 3306 struct talitos_private *priv;
9c4a7965 3307 int i, err;
5fa7fa14 3308 int stride;
fd5ea7f0 3309 struct resource *res;
9c4a7965 3310
24b92ff2 3311 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
9c4a7965
KP
3312 if (!priv)
3313 return -ENOMEM;
3314
f3de9cb1
KH
3315 INIT_LIST_HEAD(&priv->alg_list);
3316
9c4a7965
KP
3317 dev_set_drvdata(dev, priv);
3318
3319 priv->ofdev = ofdev;
3320
511d63cb
HG
3321 spin_lock_init(&priv->reg_lock);
3322
fd5ea7f0
LC
3323 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3324 if (!res)
3325 return -ENXIO;
3326 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
9c4a7965
KP
3327 if (!priv->reg) {
3328 dev_err(dev, "failed to of_iomap\n");
3329 err = -ENOMEM;
3330 goto err_out;
3331 }
3332
3333 /* get SEC version capabilities from device tree */
fa14c6cf
LC
3334 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3335 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3336 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3337 of_property_read_u32(np, "fsl,descriptor-types-mask",
3338 &priv->desc_types);
9c4a7965
KP
3339
3340 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3341 !priv->exec_units || !priv->desc_types) {
3342 dev_err(dev, "invalid property data in device tree node\n");
3343 err = -EINVAL;
3344 goto err_out;
3345 }
3346
f3c85bc1
LN
3347 if (of_device_is_compatible(np, "fsl,sec3.0"))
3348 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3349
fe5720e2 3350 if (of_device_is_compatible(np, "fsl,sec2.1"))
60f208d7 3351 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
79b3a418
LN
3352 TALITOS_FTR_SHA224_HWINIT |
3353 TALITOS_FTR_HMAC_OK;
fe5720e2 3354
21590888
LC
3355 if (of_device_is_compatible(np, "fsl,sec1.0"))
3356 priv->features |= TALITOS_FTR_SEC1;
3357
5fa7fa14
LC
3358 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3359 priv->reg_deu = priv->reg + TALITOS12_DEU;
3360 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3361 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3362 stride = TALITOS1_CH_STRIDE;
3363 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3364 priv->reg_deu = priv->reg + TALITOS10_DEU;
3365 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3366 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3367 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3368 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3369 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3370 stride = TALITOS1_CH_STRIDE;
3371 } else {
3372 priv->reg_deu = priv->reg + TALITOS2_DEU;
3373 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3374 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3375 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3376 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3377 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3378 priv->reg_keu = priv->reg + TALITOS2_KEU;
3379 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3380 stride = TALITOS2_CH_STRIDE;
3381 }
3382
dd3c0987
LC
3383 err = talitos_probe_irq(ofdev);
3384 if (err)
3385 goto err_out;
3386
c8c74647 3387 if (has_ftr_sec1(priv)) {
9c02e285
LC
3388 if (priv->num_channels == 1)
3389 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
dd3c0987 3390 (unsigned long)dev);
9c02e285
LC
3391 else
3392 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3393 (unsigned long)dev);
3394 } else {
3395 if (priv->irq[1]) {
dd3c0987
LC
3396 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3397 (unsigned long)dev);
3398 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3399 (unsigned long)dev);
9c02e285
LC
3400 } else if (priv->num_channels == 1) {
3401 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3402 (unsigned long)dev);
3403 } else {
3404 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3405 (unsigned long)dev);
dd3c0987
LC
3406 }
3407 }
3408
a86854d0
KC
3409 priv->chan = devm_kcalloc(dev,
3410 priv->num_channels,
3411 sizeof(struct talitos_channel),
3412 GFP_KERNEL);
4b992628
KP
3413 if (!priv->chan) {
3414 dev_err(dev, "failed to allocate channel management space\n");
9c4a7965
KP
3415 err = -ENOMEM;
3416 goto err_out;
3417 }
3418
f641dddd
MH
3419 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3420
c3e337f8 3421 for (i = 0; i < priv->num_channels; i++) {
5fa7fa14 3422 priv->chan[i].reg = priv->reg + stride * (i + 1);
2cdba3cf 3423 if (!priv->irq[1] || !(i & 1))
c3e337f8 3424 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
ad42d5fc 3425
4b992628
KP
3426 spin_lock_init(&priv->chan[i].head_lock);
3427 spin_lock_init(&priv->chan[i].tail_lock);
9c4a7965 3428
a86854d0
KC
3429 priv->chan[i].fifo = devm_kcalloc(dev,
3430 priv->fifo_len,
3431 sizeof(struct talitos_request),
3432 GFP_KERNEL);
4b992628 3433 if (!priv->chan[i].fifo) {
9c4a7965
KP
3434 dev_err(dev, "failed to allocate request fifo %d\n", i);
3435 err = -ENOMEM;
3436 goto err_out;
3437 }
9c4a7965 3438
4b992628
KP
3439 atomic_set(&priv->chan[i].submit_count,
3440 -(priv->chfifo_len - 1));
f641dddd 3441 }
9c4a7965 3442
81eb024c
KP
3443 dma_set_mask(dev, DMA_BIT_MASK(36));
3444
9c4a7965
KP
3445 /* reset and initialize the h/w */
3446 err = init_device(dev);
3447 if (err) {
3448 dev_err(dev, "failed to initialize device\n");
3449 goto err_out;
3450 }
3451
3452 /* register the RNG, if available */
3453 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3454 err = talitos_register_rng(dev);
3455 if (err) {
3456 dev_err(dev, "failed to register hwrng: %d\n", err);
3457 goto err_out;
3458 } else
3459 dev_info(dev, "hwrng\n");
3460 }
3461
3462 /* register crypto algorithms the device supports */
9c4a7965
KP
3463 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3464 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3465 struct talitos_crypto_alg *t_alg;
aeb4c132 3466 struct crypto_alg *alg = NULL;
9c4a7965
KP
3467
3468 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3469 if (IS_ERR(t_alg)) {
3470 err = PTR_ERR(t_alg);
0b2730d8 3471 if (err == -ENOTSUPP)
79b3a418 3472 continue;
9c4a7965
KP
3473 goto err_out;
3474 }
3475
acbf7c62 3476 switch (t_alg->algt.type) {
373960d7
AB
3477 case CRYPTO_ALG_TYPE_SKCIPHER:
3478 err = crypto_register_skcipher(
3479 &t_alg->algt.alg.skcipher);
3480 alg = &t_alg->algt.alg.skcipher.base;
acbf7c62 3481 break;
aeb4c132
HX
3482
3483 case CRYPTO_ALG_TYPE_AEAD:
3484 err = crypto_register_aead(
3485 &t_alg->algt.alg.aead);
3486 alg = &t_alg->algt.alg.aead.base;
3487 break;
3488
acbf7c62
LN
3489 case CRYPTO_ALG_TYPE_AHASH:
3490 err = crypto_register_ahash(
3491 &t_alg->algt.alg.hash);
aeb4c132 3492 alg = &t_alg->algt.alg.hash.halg.base;
acbf7c62
LN
3493 break;
3494 }
9c4a7965
KP
3495 if (err) {
3496 dev_err(dev, "%s alg registration failed\n",
aeb4c132 3497 alg->cra_driver_name);
24b92ff2 3498 devm_kfree(dev, t_alg);
991155ba 3499 } else
9c4a7965 3500 list_add_tail(&t_alg->entry, &priv->alg_list);
9c4a7965
KP
3501 }
3502 }
5b859b6e
KP
3503 if (!list_empty(&priv->alg_list))
3504 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3505 (char *)of_get_property(np, "compatible", NULL));
9c4a7965
KP
3506
3507 return 0;
3508
3509err_out:
3510 talitos_remove(ofdev);
9c4a7965
KP
3511
3512 return err;
3513}
3514
6c3f975a 3515static const struct of_device_id talitos_match[] = {
0635b7db
LC
3516#ifdef CONFIG_CRYPTO_DEV_TALITOS1
3517 {
3518 .compatible = "fsl,sec1.0",
3519 },
3520#endif
3521#ifdef CONFIG_CRYPTO_DEV_TALITOS2
9c4a7965
KP
3522 {
3523 .compatible = "fsl,sec2.0",
3524 },
0635b7db 3525#endif
9c4a7965
KP
3526 {},
3527};
3528MODULE_DEVICE_TABLE(of, talitos_match);
3529
1c48a5c9 3530static struct platform_driver talitos_driver = {
4018294b
GL
3531 .driver = {
3532 .name = "talitos",
4018294b
GL
3533 .of_match_table = talitos_match,
3534 },
9c4a7965 3535 .probe = talitos_probe,
596f1034 3536 .remove = talitos_remove,
9c4a7965
KP
3537};
3538
741e8c2d 3539module_platform_driver(talitos_driver);
9c4a7965
KP
3540
3541MODULE_LICENSE("GPL");
3542MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3543MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");