]> git.proxmox.com Git - mirror_ubuntu-bionic-kernel.git/blame - drivers/crypto/talitos.c
crypto: ccp - Fix oops by properly managing allocated structures
[mirror_ubuntu-bionic-kernel.git] / drivers / crypto / talitos.c
CommitLineData
9c4a7965
KP
1/*
2 * talitos - Freescale Integrated Security Engine (SEC) device driver
3 *
5228f0f7 4 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
9c4a7965
KP
5 *
6 * Scatterlist Crypto API glue code copied from files with the following:
7 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Crypto algorithm registration code copied from hifn driver:
10 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
11 * All rights reserved.
12 *
13 * This program is free software; you can redistribute it and/or modify
14 * it under the terms of the GNU General Public License as published by
15 * the Free Software Foundation; either version 2 of the License, or
16 * (at your option) any later version.
17 *
18 * This program is distributed in the hope that it will be useful,
19 * but WITHOUT ANY WARRANTY; without even the implied warranty of
20 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 * GNU General Public License for more details.
22 *
23 * You should have received a copy of the GNU General Public License
24 * along with this program; if not, write to the Free Software
25 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 */
27
28#include <linux/kernel.h>
29#include <linux/module.h>
30#include <linux/mod_devicetable.h>
31#include <linux/device.h>
32#include <linux/interrupt.h>
33#include <linux/crypto.h>
34#include <linux/hw_random.h>
5af50730
RH
35#include <linux/of_address.h>
36#include <linux/of_irq.h>
9c4a7965
KP
37#include <linux/of_platform.h>
38#include <linux/dma-mapping.h>
39#include <linux/io.h>
40#include <linux/spinlock.h>
41#include <linux/rtnetlink.h>
5a0e3ad6 42#include <linux/slab.h>
9c4a7965
KP
43
44#include <crypto/algapi.h>
45#include <crypto/aes.h>
3952f17e 46#include <crypto/des.h>
9c4a7965 47#include <crypto/sha.h>
497f2e6b 48#include <crypto/md5.h>
e98014ab 49#include <crypto/internal/aead.h>
9c4a7965 50#include <crypto/authenc.h>
4de9d0b5 51#include <crypto/skcipher.h>
acbf7c62
LN
52#include <crypto/hash.h>
53#include <crypto/internal/hash.h>
4de9d0b5 54#include <crypto/scatterwalk.h>
9c4a7965
KP
55
56#include "talitos.h"
57
922f9dc8 58static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
da9de146 59 unsigned int len, bool is_sec1)
81eb024c 60{
edc6bd69 61 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
da9de146
LC
62 if (is_sec1) {
63 ptr->len1 = cpu_to_be16(len);
64 } else {
65 ptr->len = cpu_to_be16(len);
922f9dc8 66 ptr->eptr = upper_32_bits(dma_addr);
da9de146 67 }
81eb024c
KP
68}
69
340ff60a
HG
70static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
71 struct talitos_ptr *src_ptr, bool is_sec1)
72{
73 dst_ptr->ptr = src_ptr->ptr;
922f9dc8 74 if (is_sec1) {
da9de146 75 dst_ptr->len1 = src_ptr->len1;
922f9dc8 76 } else {
da9de146
LC
77 dst_ptr->len = src_ptr->len;
78 dst_ptr->eptr = src_ptr->eptr;
922f9dc8 79 }
538caf83
LC
80}
81
922f9dc8
LC
82static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
83 bool is_sec1)
538caf83 84{
922f9dc8
LC
85 if (is_sec1)
86 return be16_to_cpu(ptr->len1);
87 else
88 return be16_to_cpu(ptr->len);
538caf83
LC
89}
90
b096b544
LC
91static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
92 bool is_sec1)
185eb79f 93{
922f9dc8 94 if (!is_sec1)
b096b544
LC
95 ptr->j_extent = val;
96}
97
98static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
99{
100 if (!is_sec1)
101 ptr->j_extent |= val;
185eb79f
LC
102}
103
9c4a7965
KP
104/*
105 * map virtual single (contiguous) pointer to h/w descriptor pointer
106 */
107static void map_single_talitos_ptr(struct device *dev,
edc6bd69 108 struct talitos_ptr *ptr,
42e8b0d7 109 unsigned int len, void *data,
9c4a7965
KP
110 enum dma_data_direction dir)
111{
81eb024c 112 dma_addr_t dma_addr = dma_map_single(dev, data, len, dir);
922f9dc8
LC
113 struct talitos_private *priv = dev_get_drvdata(dev);
114 bool is_sec1 = has_ftr_sec1(priv);
81eb024c 115
da9de146 116 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
9c4a7965
KP
117}
118
119/*
120 * unmap bus single (contiguous) h/w descriptor pointer
121 */
122static void unmap_single_talitos_ptr(struct device *dev,
edc6bd69 123 struct talitos_ptr *ptr,
9c4a7965
KP
124 enum dma_data_direction dir)
125{
922f9dc8
LC
126 struct talitos_private *priv = dev_get_drvdata(dev);
127 bool is_sec1 = has_ftr_sec1(priv);
128
edc6bd69 129 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
922f9dc8 130 from_talitos_ptr_len(ptr, is_sec1), dir);
9c4a7965
KP
131}
132
133static int reset_channel(struct device *dev, int ch)
134{
135 struct talitos_private *priv = dev_get_drvdata(dev);
136 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 137 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 138
dd3c0987
LC
139 if (is_sec1) {
140 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
141 TALITOS1_CCCR_LO_RESET);
9c4a7965 142
dd3c0987
LC
143 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
144 TALITOS1_CCCR_LO_RESET) && --timeout)
145 cpu_relax();
146 } else {
147 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
148 TALITOS2_CCCR_RESET);
149
150 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
151 TALITOS2_CCCR_RESET) && --timeout)
152 cpu_relax();
153 }
9c4a7965
KP
154
155 if (timeout == 0) {
156 dev_err(dev, "failed to reset channel %d\n", ch);
157 return -EIO;
158 }
159
81eb024c 160 /* set 36-bit addressing, done writeback enable and done IRQ enable */
ad42d5fc 161 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
81eb024c 162 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
37b5e889
LC
163 /* enable chaining descriptors */
164 if (is_sec1)
165 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
166 TALITOS_CCCR_LO_NE);
9c4a7965 167
fe5720e2
KP
168 /* and ICCR writeback, if available */
169 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
ad42d5fc 170 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
fe5720e2
KP
171 TALITOS_CCCR_LO_IWSE);
172
9c4a7965
KP
173 return 0;
174}
175
176static int reset_device(struct device *dev)
177{
178 struct talitos_private *priv = dev_get_drvdata(dev);
179 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987
LC
180 bool is_sec1 = has_ftr_sec1(priv);
181 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
9c4a7965 182
c3e337f8 183 setbits32(priv->reg + TALITOS_MCR, mcr);
9c4a7965 184
dd3c0987 185 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
9c4a7965
KP
186 && --timeout)
187 cpu_relax();
188
2cdba3cf 189 if (priv->irq[1]) {
c3e337f8
KP
190 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
191 setbits32(priv->reg + TALITOS_MCR, mcr);
192 }
193
9c4a7965
KP
194 if (timeout == 0) {
195 dev_err(dev, "failed to reset device\n");
196 return -EIO;
197 }
198
199 return 0;
200}
201
202/*
203 * Reset and initialize the device
204 */
205static int init_device(struct device *dev)
206{
207 struct talitos_private *priv = dev_get_drvdata(dev);
208 int ch, err;
dd3c0987 209 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965
KP
210
211 /*
212 * Master reset
213 * errata documentation: warning: certain SEC interrupts
214 * are not fully cleared by writing the MCR:SWR bit,
215 * set bit twice to completely reset
216 */
217 err = reset_device(dev);
218 if (err)
219 return err;
220
221 err = reset_device(dev);
222 if (err)
223 return err;
224
225 /* reset channels */
226 for (ch = 0; ch < priv->num_channels; ch++) {
227 err = reset_channel(dev, ch);
228 if (err)
229 return err;
230 }
231
232 /* enable channel done and error interrupts */
dd3c0987
LC
233 if (is_sec1) {
234 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
235 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
236 /* disable parity error check in DEU (erroneous? test vect.) */
237 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
238 } else {
239 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
240 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
241 }
9c4a7965 242
fe5720e2
KP
243 /* disable integrity check error interrupts (use writeback instead) */
244 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
5fa7fa14 245 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
fe5720e2
KP
246 TALITOS_MDEUICR_LO_ICE);
247
9c4a7965
KP
248 return 0;
249}
250
251/**
252 * talitos_submit - submits a descriptor to the device for processing
253 * @dev: the SEC device to be used
5228f0f7 254 * @ch: the SEC device channel to be used
9c4a7965
KP
255 * @desc: the descriptor to be processed by the device
256 * @callback: whom to call when processing is complete
257 * @context: a handle for use by caller (optional)
258 *
259 * desc must contain valid dma-mapped (bus physical) address pointers.
260 * callback must check err and feedback in descriptor header
261 * for device processing status.
262 */
865d5061
HG
263int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
264 void (*callback)(struct device *dev,
265 struct talitos_desc *desc,
266 void *context, int error),
267 void *context)
9c4a7965
KP
268{
269 struct talitos_private *priv = dev_get_drvdata(dev);
270 struct talitos_request *request;
5228f0f7 271 unsigned long flags;
9c4a7965 272 int head;
7d607c6a 273 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 274
4b992628 275 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
9c4a7965 276
4b992628 277 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
ec6644d6 278 /* h/w fifo is full */
4b992628 279 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
280 return -EAGAIN;
281 }
282
4b992628
KP
283 head = priv->chan[ch].head;
284 request = &priv->chan[ch].fifo[head];
ec6644d6 285
9c4a7965 286 /* map descriptor and save caller data */
7d607c6a
LC
287 if (is_sec1) {
288 desc->hdr1 = desc->hdr;
7d607c6a
LC
289 request->dma_desc = dma_map_single(dev, &desc->hdr1,
290 TALITOS_DESC_SIZE,
291 DMA_BIDIRECTIONAL);
292 } else {
293 request->dma_desc = dma_map_single(dev, desc,
294 TALITOS_DESC_SIZE,
295 DMA_BIDIRECTIONAL);
296 }
9c4a7965
KP
297 request->callback = callback;
298 request->context = context;
299
300 /* increment fifo head */
4b992628 301 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
9c4a7965
KP
302
303 smp_wmb();
304 request->desc = desc;
305
306 /* GO! */
307 wmb();
ad42d5fc
KP
308 out_be32(priv->chan[ch].reg + TALITOS_FF,
309 upper_32_bits(request->dma_desc));
310 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
a752447a 311 lower_32_bits(request->dma_desc));
9c4a7965 312
4b992628 313 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
314
315 return -EINPROGRESS;
316}
865d5061 317EXPORT_SYMBOL(talitos_submit);
9c4a7965 318
bb9ade52
CL
319static __be32 get_request_hdr(struct talitos_request *request, bool is_sec1)
320{
321 struct talitos_edesc *edesc;
322
323 if (!is_sec1)
324 return request->desc->hdr;
325
326 if (!request->desc->next_desc)
327 return request->desc->hdr1;
328
329 edesc = container_of(request->desc, struct talitos_edesc, desc);
330
331 return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1;
332}
333
9c4a7965
KP
334/*
335 * process what was done, notify callback of error if not
336 */
337static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
338{
339 struct talitos_private *priv = dev_get_drvdata(dev);
340 struct talitos_request *request, saved_req;
341 unsigned long flags;
342 int tail, status;
7d607c6a 343 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 344
4b992628 345 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
9c4a7965 346
4b992628
KP
347 tail = priv->chan[ch].tail;
348 while (priv->chan[ch].fifo[tail].desc) {
7d607c6a
LC
349 __be32 hdr;
350
4b992628 351 request = &priv->chan[ch].fifo[tail];
9c4a7965
KP
352
353 /* descriptors with their done bits set don't get the error */
354 rmb();
bb9ade52 355 hdr = get_request_hdr(request, is_sec1);
7d607c6a
LC
356
357 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
9c4a7965 358 status = 0;
ca38a814 359 else
9c4a7965
KP
360 if (!error)
361 break;
362 else
363 status = error;
364
365 dma_unmap_single(dev, request->dma_desc,
7d607c6a 366 TALITOS_DESC_SIZE,
e938e465 367 DMA_BIDIRECTIONAL);
9c4a7965
KP
368
369 /* copy entries so we can call callback outside lock */
370 saved_req.desc = request->desc;
371 saved_req.callback = request->callback;
372 saved_req.context = request->context;
373
374 /* release request entry in fifo */
375 smp_wmb();
376 request->desc = NULL;
377
378 /* increment fifo tail */
4b992628 379 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
9c4a7965 380
4b992628 381 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
ec6644d6 382
4b992628 383 atomic_dec(&priv->chan[ch].submit_count);
ec6644d6 384
9c4a7965
KP
385 saved_req.callback(dev, saved_req.desc, saved_req.context,
386 status);
387 /* channel may resume processing in single desc error case */
388 if (error && !reset_ch && status == error)
389 return;
4b992628
KP
390 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
391 tail = priv->chan[ch].tail;
9c4a7965
KP
392 }
393
4b992628 394 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
9c4a7965
KP
395}
396
397/*
398 * process completed requests for channels that have done status
399 */
dd3c0987
LC
400#define DEF_TALITOS1_DONE(name, ch_done_mask) \
401static void talitos1_done_##name(unsigned long data) \
402{ \
403 struct device *dev = (struct device *)data; \
404 struct talitos_private *priv = dev_get_drvdata(dev); \
405 unsigned long flags; \
406 \
407 if (ch_done_mask & 0x10000000) \
408 flush_channel(dev, 0, 0, 0); \
dd3c0987
LC
409 if (ch_done_mask & 0x40000000) \
410 flush_channel(dev, 1, 0, 0); \
411 if (ch_done_mask & 0x00010000) \
412 flush_channel(dev, 2, 0, 0); \
413 if (ch_done_mask & 0x00040000) \
414 flush_channel(dev, 3, 0, 0); \
415 \
dd3c0987
LC
416 /* At this point, all completed channels have been processed */ \
417 /* Unmask done interrupts for channels completed later on. */ \
418 spin_lock_irqsave(&priv->reg_lock, flags); \
419 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
420 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
421 spin_unlock_irqrestore(&priv->reg_lock, flags); \
422}
423
424DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
9c02e285 425DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
dd3c0987
LC
426
427#define DEF_TALITOS2_DONE(name, ch_done_mask) \
428static void talitos2_done_##name(unsigned long data) \
c3e337f8
KP
429{ \
430 struct device *dev = (struct device *)data; \
431 struct talitos_private *priv = dev_get_drvdata(dev); \
511d63cb 432 unsigned long flags; \
c3e337f8
KP
433 \
434 if (ch_done_mask & 1) \
435 flush_channel(dev, 0, 0, 0); \
c3e337f8
KP
436 if (ch_done_mask & (1 << 2)) \
437 flush_channel(dev, 1, 0, 0); \
438 if (ch_done_mask & (1 << 4)) \
439 flush_channel(dev, 2, 0, 0); \
440 if (ch_done_mask & (1 << 6)) \
441 flush_channel(dev, 3, 0, 0); \
442 \
c3e337f8
KP
443 /* At this point, all completed channels have been processed */ \
444 /* Unmask done interrupts for channels completed later on. */ \
511d63cb 445 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8 446 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
dd3c0987 447 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
511d63cb 448 spin_unlock_irqrestore(&priv->reg_lock, flags); \
9c4a7965 449}
dd3c0987
LC
450
451DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
9c02e285 452DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
dd3c0987
LC
453DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
454DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
9c4a7965
KP
455
456/*
457 * locate current (offending) descriptor
458 */
3e721aeb 459static u32 current_desc_hdr(struct device *dev, int ch)
9c4a7965
KP
460{
461 struct talitos_private *priv = dev_get_drvdata(dev);
b62ffd8c 462 int tail, iter;
9c4a7965
KP
463 dma_addr_t cur_desc;
464
b62ffd8c
HG
465 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
466 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
9c4a7965 467
b62ffd8c
HG
468 if (!cur_desc) {
469 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
470 return 0;
471 }
472
473 tail = priv->chan[ch].tail;
474
475 iter = tail;
37b5e889
LC
476 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
477 priv->chan[ch].fifo[iter].desc->next_desc != cur_desc) {
b62ffd8c
HG
478 iter = (iter + 1) & (priv->fifo_len - 1);
479 if (iter == tail) {
9c4a7965 480 dev_err(dev, "couldn't locate current descriptor\n");
3e721aeb 481 return 0;
9c4a7965
KP
482 }
483 }
484
bb9ade52
CL
485 if (priv->chan[ch].fifo[iter].desc->next_desc == cur_desc) {
486 struct talitos_edesc *edesc;
487
488 edesc = container_of(priv->chan[ch].fifo[iter].desc,
489 struct talitos_edesc, desc);
490 return ((struct talitos_desc *)
491 (edesc->buf + edesc->dma_len))->hdr;
492 }
37b5e889 493
b62ffd8c 494 return priv->chan[ch].fifo[iter].desc->hdr;
9c4a7965
KP
495}
496
497/*
498 * user diagnostics; report root cause of error based on execution unit status
499 */
3e721aeb 500static void report_eu_error(struct device *dev, int ch, u32 desc_hdr)
9c4a7965
KP
501{
502 struct talitos_private *priv = dev_get_drvdata(dev);
503 int i;
504
3e721aeb 505 if (!desc_hdr)
ad42d5fc 506 desc_hdr = in_be32(priv->chan[ch].reg + TALITOS_DESCBUF);
3e721aeb
KP
507
508 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
9c4a7965
KP
509 case DESC_HDR_SEL0_AFEU:
510 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
5fa7fa14
LC
511 in_be32(priv->reg_afeu + TALITOS_EUISR),
512 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
9c4a7965
KP
513 break;
514 case DESC_HDR_SEL0_DEU:
515 dev_err(dev, "DEUISR 0x%08x_%08x\n",
5fa7fa14
LC
516 in_be32(priv->reg_deu + TALITOS_EUISR),
517 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
9c4a7965
KP
518 break;
519 case DESC_HDR_SEL0_MDEUA:
520 case DESC_HDR_SEL0_MDEUB:
521 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
522 in_be32(priv->reg_mdeu + TALITOS_EUISR),
523 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
524 break;
525 case DESC_HDR_SEL0_RNG:
526 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
5fa7fa14
LC
527 in_be32(priv->reg_rngu + TALITOS_ISR),
528 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
9c4a7965
KP
529 break;
530 case DESC_HDR_SEL0_PKEU:
531 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
5fa7fa14
LC
532 in_be32(priv->reg_pkeu + TALITOS_EUISR),
533 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
534 break;
535 case DESC_HDR_SEL0_AESU:
536 dev_err(dev, "AESUISR 0x%08x_%08x\n",
5fa7fa14
LC
537 in_be32(priv->reg_aesu + TALITOS_EUISR),
538 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
9c4a7965
KP
539 break;
540 case DESC_HDR_SEL0_CRCU:
541 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
542 in_be32(priv->reg_crcu + TALITOS_EUISR),
543 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
544 break;
545 case DESC_HDR_SEL0_KEU:
546 dev_err(dev, "KEUISR 0x%08x_%08x\n",
5fa7fa14
LC
547 in_be32(priv->reg_pkeu + TALITOS_EUISR),
548 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
549 break;
550 }
551
3e721aeb 552 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
9c4a7965
KP
553 case DESC_HDR_SEL1_MDEUA:
554 case DESC_HDR_SEL1_MDEUB:
555 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
556 in_be32(priv->reg_mdeu + TALITOS_EUISR),
557 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
558 break;
559 case DESC_HDR_SEL1_CRCU:
560 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
561 in_be32(priv->reg_crcu + TALITOS_EUISR),
562 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
563 break;
564 }
565
566 for (i = 0; i < 8; i++)
567 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
ad42d5fc
KP
568 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
569 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
9c4a7965
KP
570}
571
572/*
573 * recover from error interrupts
574 */
5e718a09 575static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
9c4a7965 576{
9c4a7965
KP
577 struct talitos_private *priv = dev_get_drvdata(dev);
578 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 579 int ch, error, reset_dev = 0;
42e8b0d7 580 u32 v_lo;
dd3c0987
LC
581 bool is_sec1 = has_ftr_sec1(priv);
582 int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
9c4a7965
KP
583
584 for (ch = 0; ch < priv->num_channels; ch++) {
585 /* skip channels without errors */
dd3c0987
LC
586 if (is_sec1) {
587 /* bits 29, 31, 17, 19 */
588 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
589 continue;
590 } else {
591 if (!(isr & (1 << (ch * 2 + 1))))
592 continue;
593 }
9c4a7965
KP
594
595 error = -EINVAL;
596
ad42d5fc 597 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
9c4a7965
KP
598
599 if (v_lo & TALITOS_CCPSR_LO_DOF) {
600 dev_err(dev, "double fetch fifo overflow error\n");
601 error = -EAGAIN;
602 reset_ch = 1;
603 }
604 if (v_lo & TALITOS_CCPSR_LO_SOF) {
605 /* h/w dropped descriptor */
606 dev_err(dev, "single fetch fifo overflow error\n");
607 error = -EAGAIN;
608 }
609 if (v_lo & TALITOS_CCPSR_LO_MDTE)
610 dev_err(dev, "master data transfer error\n");
611 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
4d9b3a5b 612 dev_err(dev, is_sec1 ? "pointer not complete error\n"
dd3c0987 613 : "s/g data length zero error\n");
9c4a7965 614 if (v_lo & TALITOS_CCPSR_LO_FPZ)
dd3c0987
LC
615 dev_err(dev, is_sec1 ? "parity error\n"
616 : "fetch pointer zero error\n");
9c4a7965
KP
617 if (v_lo & TALITOS_CCPSR_LO_IDH)
618 dev_err(dev, "illegal descriptor header error\n");
619 if (v_lo & TALITOS_CCPSR_LO_IEU)
dd3c0987
LC
620 dev_err(dev, is_sec1 ? "static assignment error\n"
621 : "invalid exec unit error\n");
9c4a7965 622 if (v_lo & TALITOS_CCPSR_LO_EU)
3e721aeb 623 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
dd3c0987
LC
624 if (!is_sec1) {
625 if (v_lo & TALITOS_CCPSR_LO_GB)
626 dev_err(dev, "gather boundary error\n");
627 if (v_lo & TALITOS_CCPSR_LO_GRL)
628 dev_err(dev, "gather return/length error\n");
629 if (v_lo & TALITOS_CCPSR_LO_SB)
630 dev_err(dev, "scatter boundary error\n");
631 if (v_lo & TALITOS_CCPSR_LO_SRL)
632 dev_err(dev, "scatter return/length error\n");
633 }
9c4a7965
KP
634
635 flush_channel(dev, ch, error, reset_ch);
636
637 if (reset_ch) {
638 reset_channel(dev, ch);
639 } else {
ad42d5fc 640 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
dd3c0987 641 TALITOS2_CCCR_CONT);
ad42d5fc
KP
642 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
643 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
dd3c0987 644 TALITOS2_CCCR_CONT) && --timeout)
9c4a7965
KP
645 cpu_relax();
646 if (timeout == 0) {
647 dev_err(dev, "failed to restart channel %d\n",
648 ch);
649 reset_dev = 1;
650 }
651 }
652 }
dd3c0987
LC
653 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
654 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
655 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
656 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
657 isr, isr_lo);
658 else
659 dev_err(dev, "done overflow, internal time out, or "
660 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
9c4a7965
KP
661
662 /* purge request queues */
663 for (ch = 0; ch < priv->num_channels; ch++)
664 flush_channel(dev, ch, -EIO, 1);
665
666 /* reset and reinitialize the device */
667 init_device(dev);
668 }
669}
670
dd3c0987
LC
671#define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
672static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
673{ \
674 struct device *dev = data; \
675 struct talitos_private *priv = dev_get_drvdata(dev); \
676 u32 isr, isr_lo; \
677 unsigned long flags; \
678 \
679 spin_lock_irqsave(&priv->reg_lock, flags); \
680 isr = in_be32(priv->reg + TALITOS_ISR); \
681 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
682 /* Acknowledge interrupt */ \
683 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
684 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
685 \
686 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
687 spin_unlock_irqrestore(&priv->reg_lock, flags); \
688 talitos_error(dev, isr & ch_err_mask, isr_lo); \
689 } \
690 else { \
691 if (likely(isr & ch_done_mask)) { \
692 /* mask further done interrupts. */ \
693 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
694 /* done_task will unmask done interrupts at exit */ \
695 tasklet_schedule(&priv->done_task[tlet]); \
696 } \
697 spin_unlock_irqrestore(&priv->reg_lock, flags); \
698 } \
699 \
700 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
701 IRQ_NONE; \
702}
703
704DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
705
706#define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
707static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
c3e337f8
KP
708{ \
709 struct device *dev = data; \
710 struct talitos_private *priv = dev_get_drvdata(dev); \
711 u32 isr, isr_lo; \
511d63cb 712 unsigned long flags; \
c3e337f8 713 \
511d63cb 714 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8
KP
715 isr = in_be32(priv->reg + TALITOS_ISR); \
716 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
717 /* Acknowledge interrupt */ \
718 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
719 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
720 \
511d63cb
HG
721 if (unlikely(isr & ch_err_mask || isr_lo)) { \
722 spin_unlock_irqrestore(&priv->reg_lock, flags); \
723 talitos_error(dev, isr & ch_err_mask, isr_lo); \
724 } \
725 else { \
c3e337f8
KP
726 if (likely(isr & ch_done_mask)) { \
727 /* mask further done interrupts. */ \
728 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
729 /* done_task will unmask done interrupts at exit */ \
730 tasklet_schedule(&priv->done_task[tlet]); \
731 } \
511d63cb
HG
732 spin_unlock_irqrestore(&priv->reg_lock, flags); \
733 } \
c3e337f8
KP
734 \
735 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
736 IRQ_NONE; \
9c4a7965 737}
dd3c0987
LC
738
739DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
740DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
741 0)
742DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
743 1)
9c4a7965
KP
744
745/*
746 * hwrng
747 */
748static int talitos_rng_data_present(struct hwrng *rng, int wait)
749{
750 struct device *dev = (struct device *)rng->priv;
751 struct talitos_private *priv = dev_get_drvdata(dev);
752 u32 ofl;
753 int i;
754
755 for (i = 0; i < 20; i++) {
5fa7fa14 756 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
9c4a7965
KP
757 TALITOS_RNGUSR_LO_OFL;
758 if (ofl || !wait)
759 break;
760 udelay(10);
761 }
762
763 return !!ofl;
764}
765
766static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
767{
768 struct device *dev = (struct device *)rng->priv;
769 struct talitos_private *priv = dev_get_drvdata(dev);
770
771 /* rng fifo requires 64-bit accesses */
5fa7fa14
LC
772 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
773 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
9c4a7965
KP
774
775 return sizeof(u32);
776}
777
778static int talitos_rng_init(struct hwrng *rng)
779{
780 struct device *dev = (struct device *)rng->priv;
781 struct talitos_private *priv = dev_get_drvdata(dev);
782 unsigned int timeout = TALITOS_TIMEOUT;
783
5fa7fa14
LC
784 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
785 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
786 & TALITOS_RNGUSR_LO_RD)
9c4a7965
KP
787 && --timeout)
788 cpu_relax();
789 if (timeout == 0) {
790 dev_err(dev, "failed to reset rng hw\n");
791 return -ENODEV;
792 }
793
794 /* start generating */
5fa7fa14 795 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
9c4a7965
KP
796
797 return 0;
798}
799
800static int talitos_register_rng(struct device *dev)
801{
802 struct talitos_private *priv = dev_get_drvdata(dev);
35a3bb3d 803 int err;
9c4a7965
KP
804
805 priv->rng.name = dev_driver_string(dev),
806 priv->rng.init = talitos_rng_init,
807 priv->rng.data_present = talitos_rng_data_present,
808 priv->rng.data_read = talitos_rng_data_read,
809 priv->rng.priv = (unsigned long)dev;
810
35a3bb3d
AS
811 err = hwrng_register(&priv->rng);
812 if (!err)
813 priv->rng_registered = true;
814
815 return err;
9c4a7965
KP
816}
817
818static void talitos_unregister_rng(struct device *dev)
819{
820 struct talitos_private *priv = dev_get_drvdata(dev);
821
35a3bb3d
AS
822 if (!priv->rng_registered)
823 return;
824
9c4a7965 825 hwrng_unregister(&priv->rng);
35a3bb3d 826 priv->rng_registered = false;
9c4a7965
KP
827}
828
829/*
830 * crypto alg
831 */
832#define TALITOS_CRA_PRIORITY 3000
7405c8d7
LC
833/*
834 * Defines a priority for doing AEAD with descriptors type
835 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
836 */
837#define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
03d2c511 838#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
3952f17e 839#define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
70bcaca7 840
9c4a7965
KP
841struct talitos_ctx {
842 struct device *dev;
5228f0f7 843 int ch;
9c4a7965
KP
844 __be32 desc_hdr_template;
845 u8 key[TALITOS_MAX_KEY_SIZE];
70bcaca7 846 u8 iv[TALITOS_MAX_IV_LENGTH];
2e13ce08 847 dma_addr_t dma_key;
9c4a7965
KP
848 unsigned int keylen;
849 unsigned int enckeylen;
850 unsigned int authkeylen;
9c4a7965
KP
851};
852
497f2e6b
LN
853#define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
854#define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
855
856struct talitos_ahash_req_ctx {
60f208d7 857 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
497f2e6b 858 unsigned int hw_context_size;
3c0dd190
LC
859 u8 buf[2][HASH_MAX_BLOCK_SIZE];
860 int buf_idx;
60f208d7 861 unsigned int swinit;
497f2e6b
LN
862 unsigned int first;
863 unsigned int last;
864 unsigned int to_hash_later;
42e8b0d7 865 unsigned int nbuf;
497f2e6b
LN
866 struct scatterlist bufsl[2];
867 struct scatterlist *psrc;
868};
869
3639ca84
HG
870struct talitos_export_state {
871 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
872 u8 buf[HASH_MAX_BLOCK_SIZE];
873 unsigned int swinit;
874 unsigned int first;
875 unsigned int last;
876 unsigned int to_hash_later;
877 unsigned int nbuf;
878};
879
56af8cd4
LN
880static int aead_setkey(struct crypto_aead *authenc,
881 const u8 *key, unsigned int keylen)
9c4a7965
KP
882{
883 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
2e13ce08 884 struct device *dev = ctx->dev;
c306a98d 885 struct crypto_authenc_keys keys;
9c4a7965 886
c306a98d 887 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
9c4a7965
KP
888 goto badkey;
889
c306a98d 890 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
9c4a7965
KP
891 goto badkey;
892
2e13ce08
LC
893 if (ctx->keylen)
894 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
895
c306a98d
MK
896 memcpy(ctx->key, keys.authkey, keys.authkeylen);
897 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
9c4a7965 898
c306a98d
MK
899 ctx->keylen = keys.authkeylen + keys.enckeylen;
900 ctx->enckeylen = keys.enckeylen;
901 ctx->authkeylen = keys.authkeylen;
2e13ce08
LC
902 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
903 DMA_TO_DEVICE);
9c4a7965
KP
904
905 return 0;
906
907badkey:
908 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
909 return -EINVAL;
910}
911
4de9d0b5
LN
912static void talitos_sg_unmap(struct device *dev,
913 struct talitos_edesc *edesc,
914 struct scatterlist *src,
6a1e8d14
LC
915 struct scatterlist *dst,
916 unsigned int len, unsigned int offset)
4de9d0b5 917{
6a1e8d14
LC
918 struct talitos_private *priv = dev_get_drvdata(dev);
919 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
920 unsigned int src_nents = edesc->src_nents ? : 1;
921 unsigned int dst_nents = edesc->dst_nents ? : 1;
922
6a1e8d14
LC
923 if (is_sec1 && dst && dst_nents > 1) {
924 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
925 len, DMA_FROM_DEVICE);
926 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
927 offset);
928 }
4de9d0b5 929 if (src != dst) {
6a1e8d14
LC
930 if (src_nents == 1 || !is_sec1)
931 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
4de9d0b5 932
6a1e8d14 933 if (dst && (dst_nents == 1 || !is_sec1))
b8a011d4 934 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
6a1e8d14 935 } else if (src_nents == 1 || !is_sec1) {
b8a011d4 936 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
246a87cd
LC
937 }
938}
939
9c4a7965 940static void ipsec_esp_unmap(struct device *dev,
56af8cd4 941 struct talitos_edesc *edesc,
9c4a7965
KP
942 struct aead_request *areq)
943{
549bd8bc
LC
944 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
945 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
946 unsigned int ivsize = crypto_aead_ivsize(aead);
9a655608
LC
947 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
948 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
549bd8bc 949
9a655608 950 if (is_ipsec_esp)
549bd8bc
LC
951 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
952 DMA_FROM_DEVICE);
9a655608 953 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
9c4a7965 954
6a1e8d14
LC
955 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen,
956 areq->assoclen);
9c4a7965
KP
957
958 if (edesc->dma_len)
959 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
960 DMA_BIDIRECTIONAL);
549bd8bc 961
9a655608 962 if (!is_ipsec_esp) {
549bd8bc
LC
963 unsigned int dst_nents = edesc->dst_nents ? : 1;
964
965 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
966 areq->assoclen + areq->cryptlen - ivsize);
967 }
9c4a7965
KP
968}
969
970/*
971 * ipsec_esp descriptor callbacks
972 */
973static void ipsec_esp_encrypt_done(struct device *dev,
974 struct talitos_desc *desc, void *context,
975 int err)
976{
549bd8bc
LC
977 struct talitos_private *priv = dev_get_drvdata(dev);
978 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 979 struct aead_request *areq = context;
9c4a7965 980 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 981 unsigned int authsize = crypto_aead_authsize(authenc);
2e13ce08 982 unsigned int ivsize = crypto_aead_ivsize(authenc);
19bbbc63 983 struct talitos_edesc *edesc;
9c4a7965
KP
984 void *icvdata;
985
19bbbc63
KP
986 edesc = container_of(desc, struct talitos_edesc, desc);
987
9c4a7965
KP
988 ipsec_esp_unmap(dev, edesc, areq);
989
990 /* copy the generated ICV to dst */
aeb4c132 991 if (edesc->icv_ool) {
549bd8bc
LC
992 if (is_sec1)
993 icvdata = edesc->buf + areq->assoclen + areq->cryptlen;
994 else
995 icvdata = &edesc->link_tbl[edesc->src_nents +
996 edesc->dst_nents + 2];
1a1d745a
CL
997 sg_pcopy_from_buffer(areq->dst, edesc->dst_nents ? : 1, icvdata,
998 authsize, areq->assoclen + areq->cryptlen);
9c4a7965
KP
999 }
1000
2e13ce08
LC
1001 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1002
9c4a7965
KP
1003 kfree(edesc);
1004
1005 aead_request_complete(areq, err);
1006}
1007
fe5720e2 1008static void ipsec_esp_decrypt_swauth_done(struct device *dev,
e938e465
KP
1009 struct talitos_desc *desc,
1010 void *context, int err)
9c4a7965
KP
1011{
1012 struct aead_request *req = context;
9c4a7965 1013 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1014 unsigned int authsize = crypto_aead_authsize(authenc);
19bbbc63 1015 struct talitos_edesc *edesc;
aeb4c132 1016 char *oicv, *icv;
549bd8bc
LC
1017 struct talitos_private *priv = dev_get_drvdata(dev);
1018 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 1019
19bbbc63
KP
1020 edesc = container_of(desc, struct talitos_edesc, desc);
1021
9c4a7965
KP
1022 ipsec_esp_unmap(dev, edesc, req);
1023
1024 if (!err) {
1a1d745a
CL
1025 char icvdata[SHA512_DIGEST_SIZE];
1026 int nents = edesc->dst_nents ? : 1;
1027 unsigned int len = req->assoclen + req->cryptlen;
1028
9c4a7965 1029 /* auth check */
1a1d745a
CL
1030 if (nents > 1) {
1031 sg_pcopy_to_buffer(req->dst, nents, icvdata, authsize,
1032 len - authsize);
1033 icv = icvdata;
1034 } else {
1035 icv = (char *)sg_virt(req->dst) + len - authsize;
1036 }
aeb4c132
HX
1037
1038 if (edesc->dma_len) {
549bd8bc
LC
1039 if (is_sec1)
1040 oicv = (char *)&edesc->dma_link_tbl +
1041 req->assoclen + req->cryptlen;
1042 else
1043 oicv = (char *)
1044 &edesc->link_tbl[edesc->src_nents +
aeb4c132
HX
1045 edesc->dst_nents + 2];
1046 if (edesc->icv_ool)
1047 icv = oicv + authsize;
1048 } else
1049 oicv = (char *)&edesc->link_tbl[0];
1050
79960943 1051 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
9c4a7965
KP
1052 }
1053
1054 kfree(edesc);
1055
1056 aead_request_complete(req, err);
1057}
1058
fe5720e2 1059static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
e938e465
KP
1060 struct talitos_desc *desc,
1061 void *context, int err)
fe5720e2
KP
1062{
1063 struct aead_request *req = context;
19bbbc63
KP
1064 struct talitos_edesc *edesc;
1065
1066 edesc = container_of(desc, struct talitos_edesc, desc);
fe5720e2
KP
1067
1068 ipsec_esp_unmap(dev, edesc, req);
1069
1070 /* check ICV auth status */
e938e465
KP
1071 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1072 DESC_HDR_LO_ICCR1_PASS))
1073 err = -EBADMSG;
fe5720e2
KP
1074
1075 kfree(edesc);
1076
1077 aead_request_complete(req, err);
1078}
1079
9c4a7965
KP
1080/*
1081 * convert scatterlist to SEC h/w link table format
1082 * stop at cryptlen bytes
1083 */
aeb4c132
HX
1084static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1085 unsigned int offset, int cryptlen,
1086 struct talitos_ptr *link_tbl_ptr)
9c4a7965 1087{
70bcaca7 1088 int n_sg = sg_count;
aeb4c132 1089 int count = 0;
70bcaca7 1090
aeb4c132
HX
1091 while (cryptlen && sg && n_sg--) {
1092 unsigned int len = sg_dma_len(sg);
9c4a7965 1093
aeb4c132
HX
1094 if (offset >= len) {
1095 offset -= len;
1096 goto next;
1097 }
1098
1099 len -= offset;
1100
1101 if (len > cryptlen)
1102 len = cryptlen;
1103
1104 to_talitos_ptr(link_tbl_ptr + count,
da9de146 1105 sg_dma_address(sg) + offset, len, 0);
b096b544 1106 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
aeb4c132
HX
1107 count++;
1108 cryptlen -= len;
1109 offset = 0;
1110
1111next:
1112 sg = sg_next(sg);
70bcaca7 1113 }
9c4a7965
KP
1114
1115 /* tag end of link table */
aeb4c132 1116 if (count > 0)
b096b544
LC
1117 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1118 DESC_PTR_LNKTBL_RETURN, 0);
70bcaca7 1119
aeb4c132
HX
1120 return count;
1121}
1122
fb354266
LC
1123static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1124 unsigned int len, struct talitos_edesc *edesc,
1125 struct talitos_ptr *ptr, int sg_count,
1126 unsigned int offset, int tbl_off, int elen)
246a87cd 1127{
246a87cd
LC
1128 struct talitos_private *priv = dev_get_drvdata(dev);
1129 bool is_sec1 = has_ftr_sec1(priv);
1130
add0c5f1
LC
1131 if (!src) {
1132 to_talitos_ptr(ptr, 0, 0, is_sec1);
1133 return 1;
1134 }
fb354266 1135 to_talitos_ptr_ext_set(ptr, elen, is_sec1);
6a1e8d14 1136 if (sg_count == 1) {
da9de146 1137 to_talitos_ptr(ptr, sg_dma_address(src) + offset, len, is_sec1);
6a1e8d14 1138 return sg_count;
246a87cd 1139 }
246a87cd 1140 if (is_sec1) {
da9de146 1141 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, len, is_sec1);
6a1e8d14 1142 return sg_count;
246a87cd 1143 }
fb354266 1144 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len + elen,
6a1e8d14
LC
1145 &edesc->link_tbl[tbl_off]);
1146 if (sg_count == 1) {
1147 /* Only one segment now, so no link tbl needed*/
1148 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1149 return sg_count;
1150 }
1151 to_talitos_ptr(ptr, edesc->dma_link_tbl +
da9de146 1152 tbl_off * sizeof(struct talitos_ptr), len, is_sec1);
6a1e8d14
LC
1153 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1154
1155 return sg_count;
246a87cd
LC
1156}
1157
fb354266
LC
1158static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1159 unsigned int len, struct talitos_edesc *edesc,
1160 struct talitos_ptr *ptr, int sg_count,
1161 unsigned int offset, int tbl_off)
1162{
1163 return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
1164 tbl_off, 0);
1165}
1166
9c4a7965
KP
1167/*
1168 * fill in and submit ipsec_esp descriptor
1169 */
56af8cd4 1170static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
aeb4c132
HX
1171 void (*callback)(struct device *dev,
1172 struct talitos_desc *desc,
1173 void *context, int error))
9c4a7965
KP
1174{
1175 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
aeb4c132 1176 unsigned int authsize = crypto_aead_authsize(aead);
9c4a7965
KP
1177 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1178 struct device *dev = ctx->dev;
1179 struct talitos_desc *desc = &edesc->desc;
1180 unsigned int cryptlen = areq->cryptlen;
e41256f1 1181 unsigned int ivsize = crypto_aead_ivsize(aead);
aeb4c132 1182 int tbl_off = 0;
fa86a267 1183 int sg_count, ret;
fb354266 1184 int elen = 0;
549bd8bc
LC
1185 bool sync_needed = false;
1186 struct talitos_private *priv = dev_get_drvdata(dev);
1187 bool is_sec1 = has_ftr_sec1(priv);
9a655608
LC
1188 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1189 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1190 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
9c4a7965
KP
1191
1192 /* hmac key */
2e13ce08 1193 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
79fd31d3 1194
549bd8bc
LC
1195 sg_count = edesc->src_nents ?: 1;
1196 if (is_sec1 && sg_count > 1)
1197 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1198 areq->assoclen + cryptlen);
1199 else
1200 sg_count = dma_map_sg(dev, areq->src, sg_count,
1201 (areq->src == areq->dst) ?
1202 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
79fd31d3 1203
549bd8bc
LC
1204 /* hmac data */
1205 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1206 &desc->ptr[1], sg_count, 0, tbl_off);
340ff60a 1207
549bd8bc 1208 if (ret > 1) {
340ff60a 1209 tbl_off += ret;
549bd8bc 1210 sync_needed = true;
79fd31d3
HG
1211 }
1212
9c4a7965 1213 /* cipher iv */
9a655608 1214 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
9c4a7965
KP
1215
1216 /* cipher key */
2e13ce08
LC
1217 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1218 ctx->enckeylen, is_sec1);
9c4a7965
KP
1219
1220 /*
1221 * cipher in
1222 * map and adjust cipher len to aead request cryptlen.
1223 * extent is bytes of HMAC postpended to ciphertext,
1224 * typically 12 for ipsec
1225 */
fb354266
LC
1226 if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1227 elen = authsize;
9c4a7965 1228
fb354266
LC
1229 ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
1230 sg_count, areq->assoclen, tbl_off, elen);
549bd8bc 1231
ec8c7d14
LC
1232 if (ret > 1) {
1233 tbl_off += ret;
549bd8bc
LC
1234 sync_needed = true;
1235 }
9c4a7965 1236
549bd8bc
LC
1237 /* cipher out */
1238 if (areq->src != areq->dst) {
1239 sg_count = edesc->dst_nents ? : 1;
1240 if (!is_sec1 || sg_count == 1)
1241 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1242 }
9c4a7965 1243
e04a61be
LC
1244 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1245 sg_count, areq->assoclen, tbl_off);
aeb4c132 1246
9a655608 1247 if (is_ipsec_esp)
549bd8bc 1248 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
aeb4c132 1249
e04a61be
LC
1250 /* ICV data */
1251 if (ret > 1) {
1252 tbl_off += ret;
aeb4c132 1253 edesc->icv_ool = true;
549bd8bc
LC
1254 sync_needed = true;
1255
9a655608 1256 if (is_ipsec_esp) {
549bd8bc
LC
1257 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1258 int offset = (edesc->src_nents + edesc->dst_nents + 2) *
1259 sizeof(struct talitos_ptr) + authsize;
1260
1261 /* Add an entry to the link table for ICV data */
e04a61be 1262 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
549bd8bc
LC
1263 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RETURN,
1264 is_sec1);
549bd8bc
LC
1265
1266 /* icv data follows link tables */
1267 to_talitos_ptr(tbl_ptr, edesc->dma_link_tbl + offset,
da9de146 1268 authsize, is_sec1);
e04a61be
LC
1269 } else {
1270 dma_addr_t addr = edesc->dma_link_tbl;
1271
1272 if (is_sec1)
1273 addr += areq->assoclen + cryptlen;
1274 else
1275 addr += sizeof(struct talitos_ptr) * tbl_off;
1276
da9de146 1277 to_talitos_ptr(&desc->ptr[6], addr, authsize, is_sec1);
e04a61be 1278 }
9a655608 1279 } else if (!is_ipsec_esp) {
e04a61be
LC
1280 ret = talitos_sg_map(dev, areq->dst, authsize, edesc,
1281 &desc->ptr[6], sg_count, areq->assoclen +
1282 cryptlen,
1283 tbl_off);
1284 if (ret > 1) {
1285 tbl_off += ret;
1286 edesc->icv_ool = true;
1287 sync_needed = true;
1288 } else {
1289 edesc->icv_ool = false;
549bd8bc 1290 }
340ff60a 1291 } else {
549bd8bc
LC
1292 edesc->icv_ool = false;
1293 }
1294
9c4a7965 1295 /* iv out */
9a655608 1296 if (is_ipsec_esp)
549bd8bc
LC
1297 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1298 DMA_FROM_DEVICE);
1299
1300 if (sync_needed)
1301 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1302 edesc->dma_len,
1303 DMA_BIDIRECTIONAL);
9c4a7965 1304
5228f0f7 1305 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
fa86a267
KP
1306 if (ret != -EINPROGRESS) {
1307 ipsec_esp_unmap(dev, edesc, areq);
1308 kfree(edesc);
1309 }
1310 return ret;
9c4a7965
KP
1311}
1312
9c4a7965 1313/*
56af8cd4 1314 * allocate and map the extended descriptor
9c4a7965 1315 */
4de9d0b5
LN
1316static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1317 struct scatterlist *src,
1318 struct scatterlist *dst,
79fd31d3
HG
1319 u8 *iv,
1320 unsigned int assoclen,
4de9d0b5
LN
1321 unsigned int cryptlen,
1322 unsigned int authsize,
79fd31d3 1323 unsigned int ivsize,
4de9d0b5 1324 int icv_stashing,
62293a37
HG
1325 u32 cryptoflags,
1326 bool encrypt)
9c4a7965 1327{
56af8cd4 1328 struct talitos_edesc *edesc;
6a1e8d14 1329 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
79fd31d3 1330 dma_addr_t iv_dma = 0;
4de9d0b5 1331 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
586725f8 1332 GFP_ATOMIC;
6f65f6ac
LC
1333 struct talitos_private *priv = dev_get_drvdata(dev);
1334 bool is_sec1 = has_ftr_sec1(priv);
1335 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
9c4a7965 1336
6f65f6ac 1337 if (cryptlen + authsize > max_len) {
4de9d0b5 1338 dev_err(dev, "length exceeds h/w max limit\n");
9c4a7965
KP
1339 return ERR_PTR(-EINVAL);
1340 }
1341
62293a37 1342 if (!dst || dst == src) {
6a1e8d14
LC
1343 src_len = assoclen + cryptlen + authsize;
1344 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1345 if (src_nents < 0) {
1346 dev_err(dev, "Invalid number of src SG.\n");
8ef2f291 1347 return ERR_PTR(-EINVAL);
8e409fe1 1348 }
62293a37
HG
1349 src_nents = (src_nents == 1) ? 0 : src_nents;
1350 dst_nents = dst ? src_nents : 0;
6a1e8d14 1351 dst_len = 0;
62293a37 1352 } else { /* dst && dst != src*/
6a1e8d14
LC
1353 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1354 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1355 if (src_nents < 0) {
1356 dev_err(dev, "Invalid number of src SG.\n");
8ef2f291 1357 return ERR_PTR(-EINVAL);
8e409fe1 1358 }
62293a37 1359 src_nents = (src_nents == 1) ? 0 : src_nents;
6a1e8d14
LC
1360 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1361 dst_nents = sg_nents_for_len(dst, dst_len);
8e409fe1
LC
1362 if (dst_nents < 0) {
1363 dev_err(dev, "Invalid number of dst SG.\n");
8ef2f291 1364 return ERR_PTR(-EINVAL);
8e409fe1 1365 }
62293a37 1366 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
9c4a7965
KP
1367 }
1368
1369 /*
1370 * allocate space for base edesc plus the link tables,
aeb4c132
HX
1371 * allowing for two separate entries for AD and generated ICV (+ 2),
1372 * and space for two sets of ICVs (stashed and generated)
9c4a7965 1373 */
56af8cd4 1374 alloc_len = sizeof(struct talitos_edesc);
aeb4c132 1375 if (src_nents || dst_nents) {
6f65f6ac 1376 if (is_sec1)
6a1e8d14
LC
1377 dma_len = (src_nents ? src_len : 0) +
1378 (dst_nents ? dst_len : 0);
6f65f6ac 1379 else
aeb4c132
HX
1380 dma_len = (src_nents + dst_nents + 2) *
1381 sizeof(struct talitos_ptr) + authsize * 2;
9c4a7965
KP
1382 alloc_len += dma_len;
1383 } else {
1384 dma_len = 0;
4de9d0b5 1385 alloc_len += icv_stashing ? authsize : 0;
9c4a7965
KP
1386 }
1387
37b5e889
LC
1388 /* if its a ahash, add space for a second desc next to the first one */
1389 if (is_sec1 && !dst)
1390 alloc_len += sizeof(struct talitos_desc);
51bb9443 1391 alloc_len += ivsize;
37b5e889 1392
586725f8 1393 edesc = kmalloc(alloc_len, GFP_DMA | flags);
8ef2f291
CL
1394 if (!edesc)
1395 return ERR_PTR(-ENOMEM);
51bb9443
CL
1396 if (ivsize) {
1397 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
8ef2f291 1398 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
51bb9443 1399 }
e4a647c4 1400 memset(&edesc->desc, 0, sizeof(edesc->desc));
9c4a7965
KP
1401
1402 edesc->src_nents = src_nents;
1403 edesc->dst_nents = dst_nents;
79fd31d3 1404 edesc->iv_dma = iv_dma;
9c4a7965 1405 edesc->dma_len = dma_len;
bb9ade52
CL
1406 if (dma_len)
1407 edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0],
497f2e6b
LN
1408 edesc->dma_len,
1409 DMA_BIDIRECTIONAL);
bb9ade52 1410
9c4a7965
KP
1411 return edesc;
1412}
1413
79fd31d3 1414static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
62293a37 1415 int icv_stashing, bool encrypt)
4de9d0b5
LN
1416{
1417 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 1418 unsigned int authsize = crypto_aead_authsize(authenc);
4de9d0b5 1419 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
79fd31d3 1420 unsigned int ivsize = crypto_aead_ivsize(authenc);
4de9d0b5 1421
aeb4c132 1422 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
79fd31d3 1423 iv, areq->assoclen, areq->cryptlen,
aeb4c132 1424 authsize, ivsize, icv_stashing,
62293a37 1425 areq->base.flags, encrypt);
4de9d0b5
LN
1426}
1427
56af8cd4 1428static int aead_encrypt(struct aead_request *req)
9c4a7965
KP
1429{
1430 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1431 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
56af8cd4 1432 struct talitos_edesc *edesc;
9c4a7965
KP
1433
1434 /* allocate extended descriptor */
62293a37 1435 edesc = aead_edesc_alloc(req, req->iv, 0, true);
9c4a7965
KP
1436 if (IS_ERR(edesc))
1437 return PTR_ERR(edesc);
1438
1439 /* set encrypt */
70bcaca7 1440 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
9c4a7965 1441
aeb4c132 1442 return ipsec_esp(edesc, req, ipsec_esp_encrypt_done);
9c4a7965
KP
1443}
1444
56af8cd4 1445static int aead_decrypt(struct aead_request *req)
9c4a7965
KP
1446{
1447 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1448 unsigned int authsize = crypto_aead_authsize(authenc);
9c4a7965 1449 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
fe5720e2 1450 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
56af8cd4 1451 struct talitos_edesc *edesc;
9c4a7965
KP
1452 void *icvdata;
1453
1454 req->cryptlen -= authsize;
1455
1456 /* allocate extended descriptor */
62293a37 1457 edesc = aead_edesc_alloc(req, req->iv, 1, false);
9c4a7965
KP
1458 if (IS_ERR(edesc))
1459 return PTR_ERR(edesc);
1460
fe5720e2 1461 if ((priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
e938e465
KP
1462 ((!edesc->src_nents && !edesc->dst_nents) ||
1463 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
9c4a7965 1464
fe5720e2 1465 /* decrypt and check the ICV */
e938e465
KP
1466 edesc->desc.hdr = ctx->desc_hdr_template |
1467 DESC_HDR_DIR_INBOUND |
fe5720e2 1468 DESC_HDR_MODE1_MDEU_CICV;
9c4a7965 1469
fe5720e2 1470 /* reset integrity check result bits */
9c4a7965 1471
aeb4c132 1472 return ipsec_esp(edesc, req, ipsec_esp_decrypt_hwauth_done);
e938e465 1473 }
fe5720e2 1474
e938e465
KP
1475 /* Have to check the ICV with software */
1476 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
fe5720e2 1477
e938e465
KP
1478 /* stash incoming ICV for later cmp with ICV generated by the h/w */
1479 if (edesc->dma_len)
aeb4c132
HX
1480 icvdata = (char *)&edesc->link_tbl[edesc->src_nents +
1481 edesc->dst_nents + 2];
e938e465
KP
1482 else
1483 icvdata = &edesc->link_tbl[0];
fe5720e2 1484
1a1d745a
CL
1485 sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1486 req->assoclen + req->cryptlen - authsize);
9c4a7965 1487
aeb4c132 1488 return ipsec_esp(edesc, req, ipsec_esp_decrypt_swauth_done);
9c4a7965
KP
1489}
1490
4de9d0b5
LN
1491static int ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1492 const u8 *key, unsigned int keylen)
1493{
1494 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
2e13ce08 1495 struct device *dev = ctx->dev;
f384cdc4 1496 u32 tmp[DES_EXPKEY_WORDS];
4de9d0b5 1497
03d2c511
MH
1498 if (keylen > TALITOS_MAX_KEY_SIZE) {
1499 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
1500 return -EINVAL;
1501 }
1502
f384cdc4
LC
1503 if (unlikely(crypto_ablkcipher_get_flags(cipher) &
1504 CRYPTO_TFM_REQ_WEAK_KEY) &&
1505 !des_ekey(tmp, key)) {
1506 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_WEAK_KEY);
1507 return -EINVAL;
1508 }
1509
2e13ce08
LC
1510 if (ctx->keylen)
1511 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1512
4de9d0b5
LN
1513 memcpy(&ctx->key, key, keylen);
1514 ctx->keylen = keylen;
1515
2e13ce08
LC
1516 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1517
4de9d0b5 1518 return 0;
4de9d0b5
LN
1519}
1520
1521static void common_nonsnoop_unmap(struct device *dev,
1522 struct talitos_edesc *edesc,
1523 struct ablkcipher_request *areq)
1524{
1525 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
032d197e 1526
6a1e8d14 1527 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0);
4de9d0b5
LN
1528 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1529
4de9d0b5
LN
1530 if (edesc->dma_len)
1531 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1532 DMA_BIDIRECTIONAL);
1533}
1534
1535static void ablkcipher_done(struct device *dev,
1536 struct talitos_desc *desc, void *context,
1537 int err)
1538{
1539 struct ablkcipher_request *areq = context;
c204e0d7
CL
1540 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1541 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1542 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
19bbbc63
KP
1543 struct talitos_edesc *edesc;
1544
1545 edesc = container_of(desc, struct talitos_edesc, desc);
4de9d0b5
LN
1546
1547 common_nonsnoop_unmap(dev, edesc, areq);
c204e0d7 1548 memcpy(areq->info, ctx->iv, ivsize);
4de9d0b5
LN
1549
1550 kfree(edesc);
1551
1552 areq->base.complete(&areq->base, err);
1553}
1554
1555static int common_nonsnoop(struct talitos_edesc *edesc,
1556 struct ablkcipher_request *areq,
4de9d0b5
LN
1557 void (*callback) (struct device *dev,
1558 struct talitos_desc *desc,
1559 void *context, int error))
1560{
1561 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1562 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1563 struct device *dev = ctx->dev;
1564 struct talitos_desc *desc = &edesc->desc;
1565 unsigned int cryptlen = areq->nbytes;
79fd31d3 1566 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1567 int sg_count, ret;
6a1e8d14 1568 bool sync_needed = false;
922f9dc8
LC
1569 struct talitos_private *priv = dev_get_drvdata(dev);
1570 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
1571
1572 /* first DWORD empty */
4de9d0b5
LN
1573
1574 /* cipher iv */
da9de146 1575 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
4de9d0b5
LN
1576
1577 /* cipher key */
2e13ce08 1578 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
4de9d0b5 1579
6a1e8d14
LC
1580 sg_count = edesc->src_nents ?: 1;
1581 if (is_sec1 && sg_count > 1)
1582 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1583 cryptlen);
1584 else
1585 sg_count = dma_map_sg(dev, areq->src, sg_count,
1586 (areq->src == areq->dst) ?
1587 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
4de9d0b5
LN
1588 /*
1589 * cipher in
1590 */
6a1e8d14
LC
1591 sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc,
1592 &desc->ptr[3], sg_count, 0, 0);
1593 if (sg_count > 1)
1594 sync_needed = true;
4de9d0b5
LN
1595
1596 /* cipher out */
6a1e8d14
LC
1597 if (areq->src != areq->dst) {
1598 sg_count = edesc->dst_nents ? : 1;
1599 if (!is_sec1 || sg_count == 1)
1600 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1601 }
1602
1603 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1604 sg_count, 0, (edesc->src_nents + 1));
1605 if (ret > 1)
1606 sync_needed = true;
4de9d0b5
LN
1607
1608 /* iv out */
a2b35aa8 1609 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
4de9d0b5
LN
1610 DMA_FROM_DEVICE);
1611
1612 /* last DWORD empty */
4de9d0b5 1613
6a1e8d14
LC
1614 if (sync_needed)
1615 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1616 edesc->dma_len, DMA_BIDIRECTIONAL);
1617
5228f0f7 1618 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
4de9d0b5
LN
1619 if (ret != -EINPROGRESS) {
1620 common_nonsnoop_unmap(dev, edesc, areq);
1621 kfree(edesc);
1622 }
1623 return ret;
1624}
1625
e938e465 1626static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request *
62293a37 1627 areq, bool encrypt)
4de9d0b5
LN
1628{
1629 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1630 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
79fd31d3 1631 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1632
aeb4c132 1633 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
79fd31d3 1634 areq->info, 0, areq->nbytes, 0, ivsize, 0,
62293a37 1635 areq->base.flags, encrypt);
4de9d0b5
LN
1636}
1637
1638static int ablkcipher_encrypt(struct ablkcipher_request *areq)
1639{
1640 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1641 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1642 struct talitos_edesc *edesc;
1643
1644 /* allocate extended descriptor */
62293a37 1645 edesc = ablkcipher_edesc_alloc(areq, true);
4de9d0b5
LN
1646 if (IS_ERR(edesc))
1647 return PTR_ERR(edesc);
1648
1649 /* set encrypt */
1650 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1651
febec542 1652 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1653}
1654
1655static int ablkcipher_decrypt(struct ablkcipher_request *areq)
1656{
1657 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1658 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1659 struct talitos_edesc *edesc;
1660
1661 /* allocate extended descriptor */
62293a37 1662 edesc = ablkcipher_edesc_alloc(areq, false);
4de9d0b5
LN
1663 if (IS_ERR(edesc))
1664 return PTR_ERR(edesc);
1665
1666 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1667
febec542 1668 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1669}
1670
497f2e6b
LN
1671static void common_nonsnoop_hash_unmap(struct device *dev,
1672 struct talitos_edesc *edesc,
1673 struct ahash_request *areq)
1674{
1675 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
9d7c9e2a
LC
1676 struct talitos_private *priv = dev_get_drvdata(dev);
1677 bool is_sec1 = has_ftr_sec1(priv);
1678 struct talitos_desc *desc = &edesc->desc;
bb9ade52
CL
1679 struct talitos_desc *desc2 = (struct talitos_desc *)
1680 (edesc->buf + edesc->dma_len);
9d7c9e2a
LC
1681
1682 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1683 if (desc->next_desc &&
1684 desc->ptr[5].ptr != desc2->ptr[5].ptr)
1685 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
497f2e6b 1686
bb9ade52
CL
1687 if (req_ctx->psrc)
1688 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
032d197e 1689
9d7c9e2a
LC
1690 /* When using hashctx-in, must unmap it. */
1691 if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1692 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1693 DMA_TO_DEVICE);
1694 else if (desc->next_desc)
1695 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1696 DMA_TO_DEVICE);
1697
1698 if (is_sec1 && req_ctx->nbuf)
1699 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1700 DMA_TO_DEVICE);
1701
497f2e6b
LN
1702 if (edesc->dma_len)
1703 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1704 DMA_BIDIRECTIONAL);
1705
37b5e889
LC
1706 if (edesc->desc.next_desc)
1707 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1708 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
497f2e6b
LN
1709}
1710
1711static void ahash_done(struct device *dev,
1712 struct talitos_desc *desc, void *context,
1713 int err)
1714{
1715 struct ahash_request *areq = context;
1716 struct talitos_edesc *edesc =
1717 container_of(desc, struct talitos_edesc, desc);
1718 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1719
1720 if (!req_ctx->last && req_ctx->to_hash_later) {
1721 /* Position any partial block for next update/final/finup */
3c0dd190 1722 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
5e833bc4 1723 req_ctx->nbuf = req_ctx->to_hash_later;
497f2e6b
LN
1724 }
1725 common_nonsnoop_hash_unmap(dev, edesc, areq);
1726
1727 kfree(edesc);
1728
1729 areq->base.complete(&areq->base, err);
1730}
1731
2d02905e
LC
1732/*
1733 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1734 * ourself and submit a padded block
1735 */
5b2cf268 1736static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
2d02905e
LC
1737 struct talitos_edesc *edesc,
1738 struct talitos_ptr *ptr)
1739{
1740 static u8 padded_hash[64] = {
1741 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1742 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1743 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1744 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1745 };
1746
1747 pr_err_once("Bug in SEC1, padding ourself\n");
1748 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1749 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1750 (char *)padded_hash, DMA_TO_DEVICE);
1751}
1752
497f2e6b
LN
1753static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1754 struct ahash_request *areq, unsigned int length,
1755 void (*callback) (struct device *dev,
1756 struct talitos_desc *desc,
1757 void *context, int error))
1758{
1759 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1760 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1761 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1762 struct device *dev = ctx->dev;
1763 struct talitos_desc *desc = &edesc->desc;
032d197e 1764 int ret;
6a1e8d14 1765 bool sync_needed = false;
922f9dc8
LC
1766 struct talitos_private *priv = dev_get_drvdata(dev);
1767 bool is_sec1 = has_ftr_sec1(priv);
6a1e8d14 1768 int sg_count;
497f2e6b
LN
1769
1770 /* first DWORD empty */
497f2e6b 1771
60f208d7
KP
1772 /* hash context in */
1773 if (!req_ctx->first || req_ctx->swinit) {
9d7c9e2a
LC
1774 map_single_talitos_ptr(dev, &desc->ptr[1],
1775 req_ctx->hw_context_size,
1776 (char *)req_ctx->hw_context,
1777 DMA_TO_DEVICE);
60f208d7 1778 req_ctx->swinit = 0;
497f2e6b 1779 }
afd62fa2
LC
1780 /* Indicate next op is not the first. */
1781 req_ctx->first = 0;
497f2e6b
LN
1782
1783 /* HMAC key */
1784 if (ctx->keylen)
2e13ce08
LC
1785 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1786 is_sec1);
497f2e6b 1787
37b5e889
LC
1788 if (is_sec1 && req_ctx->nbuf)
1789 length -= req_ctx->nbuf;
1790
6a1e8d14
LC
1791 sg_count = edesc->src_nents ?: 1;
1792 if (is_sec1 && sg_count > 1)
bb9ade52 1793 sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length);
37b5e889 1794 else if (length)
6a1e8d14
LC
1795 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1796 DMA_TO_DEVICE);
497f2e6b
LN
1797 /*
1798 * data in
1799 */
37b5e889 1800 if (is_sec1 && req_ctx->nbuf) {
9d7c9e2a
LC
1801 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1802 req_ctx->buf[req_ctx->buf_idx],
1803 DMA_TO_DEVICE);
37b5e889
LC
1804 } else {
1805 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
bb9ade52 1806 &desc->ptr[3], sg_count, 0, 0);
37b5e889
LC
1807 if (sg_count > 1)
1808 sync_needed = true;
1809 }
497f2e6b
LN
1810
1811 /* fifth DWORD empty */
497f2e6b
LN
1812
1813 /* hash/HMAC out -or- hash context out */
1814 if (req_ctx->last)
1815 map_single_talitos_ptr(dev, &desc->ptr[5],
1816 crypto_ahash_digestsize(tfm),
a2b35aa8 1817 areq->result, DMA_FROM_DEVICE);
497f2e6b 1818 else
9d7c9e2a
LC
1819 map_single_talitos_ptr(dev, &desc->ptr[5],
1820 req_ctx->hw_context_size,
1821 req_ctx->hw_context, DMA_FROM_DEVICE);
497f2e6b
LN
1822
1823 /* last DWORD empty */
497f2e6b 1824
2d02905e
LC
1825 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1826 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1827
37b5e889 1828 if (is_sec1 && req_ctx->nbuf && length) {
bb9ade52
CL
1829 struct talitos_desc *desc2 = (struct talitos_desc *)
1830 (edesc->buf + edesc->dma_len);
37b5e889
LC
1831 dma_addr_t next_desc;
1832
1833 memset(desc2, 0, sizeof(*desc2));
1834 desc2->hdr = desc->hdr;
1835 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1836 desc2->hdr1 = desc2->hdr;
1837 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1838 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1839 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1840
9d7c9e2a
LC
1841 if (desc->ptr[1].ptr)
1842 copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1843 is_sec1);
1844 else
1845 map_single_talitos_ptr(dev, &desc2->ptr[1],
1846 req_ctx->hw_context_size,
1847 req_ctx->hw_context,
1848 DMA_TO_DEVICE);
37b5e889
LC
1849 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1850 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
bb9ade52 1851 &desc2->ptr[3], sg_count, 0, 0);
37b5e889
LC
1852 if (sg_count > 1)
1853 sync_needed = true;
1854 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1855 if (req_ctx->last)
9d7c9e2a
LC
1856 map_single_talitos_ptr(dev, &desc->ptr[5],
1857 req_ctx->hw_context_size,
1858 req_ctx->hw_context,
1859 DMA_FROM_DEVICE);
37b5e889
LC
1860
1861 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1862 DMA_BIDIRECTIONAL);
1863 desc->next_desc = cpu_to_be32(next_desc);
1864 }
1865
6a1e8d14
LC
1866 if (sync_needed)
1867 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1868 edesc->dma_len, DMA_BIDIRECTIONAL);
1869
5228f0f7 1870 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
497f2e6b
LN
1871 if (ret != -EINPROGRESS) {
1872 common_nonsnoop_hash_unmap(dev, edesc, areq);
1873 kfree(edesc);
1874 }
1875 return ret;
1876}
1877
1878static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1879 unsigned int nbytes)
1880{
1881 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1882 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1883 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
37b5e889
LC
1884 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1885 bool is_sec1 = has_ftr_sec1(priv);
1886
1887 if (is_sec1)
1888 nbytes -= req_ctx->nbuf;
497f2e6b 1889
aeb4c132 1890 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
62293a37 1891 nbytes, 0, 0, 0, areq->base.flags, false);
497f2e6b
LN
1892}
1893
1894static int ahash_init(struct ahash_request *areq)
1895{
1896 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1897 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
49f9783b 1898 unsigned int size;
497f2e6b
LN
1899
1900 /* Initialize the context */
3c0dd190 1901 req_ctx->buf_idx = 0;
5e833bc4 1902 req_ctx->nbuf = 0;
60f208d7
KP
1903 req_ctx->first = 1; /* first indicates h/w must init its context */
1904 req_ctx->swinit = 0; /* assume h/w init of context */
49f9783b 1905 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
497f2e6b
LN
1906 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1907 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 1908 req_ctx->hw_context_size = size;
497f2e6b
LN
1909
1910 return 0;
1911}
1912
60f208d7
KP
1913/*
1914 * on h/w without explicit sha224 support, we initialize h/w context
1915 * manually with sha224 constants, and tell it to run sha256.
1916 */
1917static int ahash_init_sha224_swinit(struct ahash_request *areq)
1918{
1919 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1920
1921 ahash_init(areq);
1922 req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
1923
a752447a
KP
1924 req_ctx->hw_context[0] = SHA224_H0;
1925 req_ctx->hw_context[1] = SHA224_H1;
1926 req_ctx->hw_context[2] = SHA224_H2;
1927 req_ctx->hw_context[3] = SHA224_H3;
1928 req_ctx->hw_context[4] = SHA224_H4;
1929 req_ctx->hw_context[5] = SHA224_H5;
1930 req_ctx->hw_context[6] = SHA224_H6;
1931 req_ctx->hw_context[7] = SHA224_H7;
60f208d7
KP
1932
1933 /* init 64-bit count */
1934 req_ctx->hw_context[8] = 0;
1935 req_ctx->hw_context[9] = 0;
1936
1937 return 0;
1938}
1939
497f2e6b
LN
1940static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1941{
1942 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1943 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1944 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1945 struct talitos_edesc *edesc;
1946 unsigned int blocksize =
1947 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1948 unsigned int nbytes_to_hash;
1949 unsigned int to_hash_later;
5e833bc4 1950 unsigned int nsg;
8e409fe1 1951 int nents;
37b5e889
LC
1952 struct device *dev = ctx->dev;
1953 struct talitos_private *priv = dev_get_drvdata(dev);
1954 bool is_sec1 = has_ftr_sec1(priv);
3c0dd190 1955 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
497f2e6b 1956
5e833bc4
LN
1957 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1958 /* Buffer up to one whole block */
8e409fe1
LC
1959 nents = sg_nents_for_len(areq->src, nbytes);
1960 if (nents < 0) {
1961 dev_err(ctx->dev, "Invalid number of src SG.\n");
1962 return nents;
1963 }
1964 sg_copy_to_buffer(areq->src, nents,
3c0dd190 1965 ctx_buf + req_ctx->nbuf, nbytes);
5e833bc4 1966 req_ctx->nbuf += nbytes;
497f2e6b
LN
1967 return 0;
1968 }
1969
5e833bc4
LN
1970 /* At least (blocksize + 1) bytes are available to hash */
1971 nbytes_to_hash = nbytes + req_ctx->nbuf;
1972 to_hash_later = nbytes_to_hash & (blocksize - 1);
1973
1974 if (req_ctx->last)
1975 to_hash_later = 0;
1976 else if (to_hash_later)
1977 /* There is a partial block. Hash the full block(s) now */
1978 nbytes_to_hash -= to_hash_later;
1979 else {
1980 /* Keep one block buffered */
1981 nbytes_to_hash -= blocksize;
1982 to_hash_later = blocksize;
1983 }
1984
1985 /* Chain in any previously buffered data */
37b5e889 1986 if (!is_sec1 && req_ctx->nbuf) {
5e833bc4
LN
1987 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
1988 sg_init_table(req_ctx->bufsl, nsg);
3c0dd190 1989 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
5e833bc4 1990 if (nsg > 1)
c56f6d12 1991 sg_chain(req_ctx->bufsl, 2, areq->src);
497f2e6b 1992 req_ctx->psrc = req_ctx->bufsl;
37b5e889 1993 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
bb9ade52
CL
1994 int offset;
1995
37b5e889
LC
1996 if (nbytes_to_hash > blocksize)
1997 offset = blocksize - req_ctx->nbuf;
1998 else
1999 offset = nbytes_to_hash - req_ctx->nbuf;
2000 nents = sg_nents_for_len(areq->src, offset);
2001 if (nents < 0) {
2002 dev_err(ctx->dev, "Invalid number of src SG.\n");
2003 return nents;
2004 }
2005 sg_copy_to_buffer(areq->src, nents,
3c0dd190 2006 ctx_buf + req_ctx->nbuf, offset);
37b5e889 2007 req_ctx->nbuf += offset;
bb9ade52
CL
2008 req_ctx->psrc = scatterwalk_ffwd(req_ctx->bufsl, areq->src,
2009 offset);
5e833bc4 2010 } else
497f2e6b 2011 req_ctx->psrc = areq->src;
5e833bc4
LN
2012
2013 if (to_hash_later) {
8e409fe1
LC
2014 nents = sg_nents_for_len(areq->src, nbytes);
2015 if (nents < 0) {
2016 dev_err(ctx->dev, "Invalid number of src SG.\n");
2017 return nents;
2018 }
d0525723 2019 sg_pcopy_to_buffer(areq->src, nents,
3c0dd190 2020 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
5e833bc4
LN
2021 to_hash_later,
2022 nbytes - to_hash_later);
497f2e6b 2023 }
5e833bc4 2024 req_ctx->to_hash_later = to_hash_later;
497f2e6b 2025
5e833bc4 2026 /* Allocate extended descriptor */
497f2e6b
LN
2027 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2028 if (IS_ERR(edesc))
2029 return PTR_ERR(edesc);
2030
2031 edesc->desc.hdr = ctx->desc_hdr_template;
2032
2033 /* On last one, request SEC to pad; otherwise continue */
2034 if (req_ctx->last)
2035 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2036 else
2037 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2038
60f208d7
KP
2039 /* request SEC to INIT hash. */
2040 if (req_ctx->first && !req_ctx->swinit)
497f2e6b
LN
2041 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2042
2043 /* When the tfm context has a keylen, it's an HMAC.
2044 * A first or last (ie. not middle) descriptor must request HMAC.
2045 */
2046 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2047 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2048
bb9ade52 2049 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done);
497f2e6b
LN
2050}
2051
2052static int ahash_update(struct ahash_request *areq)
2053{
2054 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2055
2056 req_ctx->last = 0;
2057
2058 return ahash_process_req(areq, areq->nbytes);
2059}
2060
2061static int ahash_final(struct ahash_request *areq)
2062{
2063 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2064
2065 req_ctx->last = 1;
2066
2067 return ahash_process_req(areq, 0);
2068}
2069
2070static int ahash_finup(struct ahash_request *areq)
2071{
2072 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2073
2074 req_ctx->last = 1;
2075
2076 return ahash_process_req(areq, areq->nbytes);
2077}
2078
2079static int ahash_digest(struct ahash_request *areq)
2080{
2081 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
60f208d7 2082 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
497f2e6b 2083
60f208d7 2084 ahash->init(areq);
497f2e6b
LN
2085 req_ctx->last = 1;
2086
2087 return ahash_process_req(areq, areq->nbytes);
2088}
2089
3639ca84
HG
2090static int ahash_export(struct ahash_request *areq, void *out)
2091{
2092 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2093 struct talitos_export_state *export = out;
2094
2095 memcpy(export->hw_context, req_ctx->hw_context,
2096 req_ctx->hw_context_size);
3c0dd190 2097 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
3639ca84
HG
2098 export->swinit = req_ctx->swinit;
2099 export->first = req_ctx->first;
2100 export->last = req_ctx->last;
2101 export->to_hash_later = req_ctx->to_hash_later;
2102 export->nbuf = req_ctx->nbuf;
2103
2104 return 0;
2105}
2106
2107static int ahash_import(struct ahash_request *areq, const void *in)
2108{
2109 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2110 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2111 const struct talitos_export_state *export = in;
49f9783b 2112 unsigned int size;
3639ca84
HG
2113
2114 memset(req_ctx, 0, sizeof(*req_ctx));
49f9783b 2115 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
3639ca84
HG
2116 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2117 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
49f9783b 2118 req_ctx->hw_context_size = size;
49f9783b 2119 memcpy(req_ctx->hw_context, export->hw_context, size);
3c0dd190 2120 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
3639ca84
HG
2121 req_ctx->swinit = export->swinit;
2122 req_ctx->first = export->first;
2123 req_ctx->last = export->last;
2124 req_ctx->to_hash_later = export->to_hash_later;
2125 req_ctx->nbuf = export->nbuf;
2126
2127 return 0;
2128}
2129
79b3a418
LN
2130static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2131 u8 *hash)
2132{
2133 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2134
2135 struct scatterlist sg[1];
2136 struct ahash_request *req;
f1c90ac3 2137 struct crypto_wait wait;
79b3a418
LN
2138 int ret;
2139
f1c90ac3 2140 crypto_init_wait(&wait);
79b3a418
LN
2141
2142 req = ahash_request_alloc(tfm, GFP_KERNEL);
2143 if (!req)
2144 return -ENOMEM;
2145
2146 /* Keep tfm keylen == 0 during hash of the long key */
2147 ctx->keylen = 0;
2148 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
f1c90ac3 2149 crypto_req_done, &wait);
79b3a418
LN
2150
2151 sg_init_one(&sg[0], key, keylen);
2152
2153 ahash_request_set_crypt(req, sg, hash, keylen);
f1c90ac3
GBY
2154 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2155
79b3a418
LN
2156 ahash_request_free(req);
2157
2158 return ret;
2159}
2160
2161static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2162 unsigned int keylen)
2163{
2164 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2e13ce08 2165 struct device *dev = ctx->dev;
79b3a418
LN
2166 unsigned int blocksize =
2167 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2168 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2169 unsigned int keysize = keylen;
2170 u8 hash[SHA512_DIGEST_SIZE];
2171 int ret;
2172
2173 if (keylen <= blocksize)
2174 memcpy(ctx->key, key, keysize);
2175 else {
2176 /* Must get the hash of the long key */
2177 ret = keyhash(tfm, key, keylen, hash);
2178
2179 if (ret) {
2180 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2181 return -EINVAL;
2182 }
2183
2184 keysize = digestsize;
2185 memcpy(ctx->key, hash, digestsize);
2186 }
2187
2e13ce08
LC
2188 if (ctx->keylen)
2189 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2190
79b3a418 2191 ctx->keylen = keysize;
2e13ce08 2192 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
79b3a418
LN
2193
2194 return 0;
2195}
2196
2197
9c4a7965 2198struct talitos_alg_template {
d5e4aaef 2199 u32 type;
b0057763 2200 u32 priority;
d5e4aaef
LN
2201 union {
2202 struct crypto_alg crypto;
acbf7c62 2203 struct ahash_alg hash;
aeb4c132 2204 struct aead_alg aead;
d5e4aaef 2205 } alg;
9c4a7965
KP
2206 __be32 desc_hdr_template;
2207};
2208
2209static struct talitos_alg_template driver_algs[] = {
991155ba 2210 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
d5e4aaef 2211 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2212 .alg.aead = {
2213 .base = {
2214 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2215 .cra_driver_name = "authenc-hmac-sha1-"
2a781f54 2216 "cbc-aes-talitos-hsna",
aeb4c132
HX
2217 .cra_blocksize = AES_BLOCK_SIZE,
2218 .cra_flags = CRYPTO_ALG_ASYNC,
2219 },
2220 .ivsize = AES_BLOCK_SIZE,
2221 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2222 },
9c4a7965
KP
2223 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2224 DESC_HDR_SEL0_AESU |
2225 DESC_HDR_MODE0_AESU_CBC |
2226 DESC_HDR_SEL1_MDEUA |
2227 DESC_HDR_MODE1_MDEU_INIT |
2228 DESC_HDR_MODE1_MDEU_PAD |
2229 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
70bcaca7 2230 },
7405c8d7
LC
2231 { .type = CRYPTO_ALG_TYPE_AEAD,
2232 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2233 .alg.aead = {
2234 .base = {
2235 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2236 .cra_driver_name = "authenc-hmac-sha1-"
2237 "cbc-aes-talitos",
2238 .cra_blocksize = AES_BLOCK_SIZE,
2239 .cra_flags = CRYPTO_ALG_ASYNC,
2240 },
2241 .ivsize = AES_BLOCK_SIZE,
2242 .maxauthsize = SHA1_DIGEST_SIZE,
2243 },
2244 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2245 DESC_HDR_SEL0_AESU |
2246 DESC_HDR_MODE0_AESU_CBC |
2247 DESC_HDR_SEL1_MDEUA |
2248 DESC_HDR_MODE1_MDEU_INIT |
2249 DESC_HDR_MODE1_MDEU_PAD |
2250 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2251 },
d5e4aaef 2252 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2253 .alg.aead = {
2254 .base = {
2255 .cra_name = "authenc(hmac(sha1),"
2256 "cbc(des3_ede))",
2257 .cra_driver_name = "authenc-hmac-sha1-"
2a781f54 2258 "cbc-3des-talitos-hsna",
aeb4c132
HX
2259 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2260 .cra_flags = CRYPTO_ALG_ASYNC,
2261 },
2262 .ivsize = DES3_EDE_BLOCK_SIZE,
2263 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2264 },
70bcaca7
LN
2265 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2266 DESC_HDR_SEL0_DEU |
2267 DESC_HDR_MODE0_DEU_CBC |
2268 DESC_HDR_MODE0_DEU_3DES |
2269 DESC_HDR_SEL1_MDEUA |
2270 DESC_HDR_MODE1_MDEU_INIT |
2271 DESC_HDR_MODE1_MDEU_PAD |
2272 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
3952f17e 2273 },
7405c8d7
LC
2274 { .type = CRYPTO_ALG_TYPE_AEAD,
2275 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2276 .alg.aead = {
2277 .base = {
2278 .cra_name = "authenc(hmac(sha1),"
2279 "cbc(des3_ede))",
2280 .cra_driver_name = "authenc-hmac-sha1-"
2281 "cbc-3des-talitos",
2282 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2283 .cra_flags = CRYPTO_ALG_ASYNC,
2284 },
2285 .ivsize = DES3_EDE_BLOCK_SIZE,
2286 .maxauthsize = SHA1_DIGEST_SIZE,
2287 },
2288 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2289 DESC_HDR_SEL0_DEU |
2290 DESC_HDR_MODE0_DEU_CBC |
2291 DESC_HDR_MODE0_DEU_3DES |
2292 DESC_HDR_SEL1_MDEUA |
2293 DESC_HDR_MODE1_MDEU_INIT |
2294 DESC_HDR_MODE1_MDEU_PAD |
2295 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2296 },
357fb605 2297 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2298 .alg.aead = {
2299 .base = {
2300 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2301 .cra_driver_name = "authenc-hmac-sha224-"
2a781f54 2302 "cbc-aes-talitos-hsna",
aeb4c132
HX
2303 .cra_blocksize = AES_BLOCK_SIZE,
2304 .cra_flags = CRYPTO_ALG_ASYNC,
2305 },
2306 .ivsize = AES_BLOCK_SIZE,
2307 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2308 },
2309 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2310 DESC_HDR_SEL0_AESU |
2311 DESC_HDR_MODE0_AESU_CBC |
2312 DESC_HDR_SEL1_MDEUA |
2313 DESC_HDR_MODE1_MDEU_INIT |
2314 DESC_HDR_MODE1_MDEU_PAD |
2315 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2316 },
7405c8d7
LC
2317 { .type = CRYPTO_ALG_TYPE_AEAD,
2318 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2319 .alg.aead = {
2320 .base = {
2321 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2322 .cra_driver_name = "authenc-hmac-sha224-"
2323 "cbc-aes-talitos",
2324 .cra_blocksize = AES_BLOCK_SIZE,
2325 .cra_flags = CRYPTO_ALG_ASYNC,
2326 },
2327 .ivsize = AES_BLOCK_SIZE,
2328 .maxauthsize = SHA224_DIGEST_SIZE,
2329 },
2330 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2331 DESC_HDR_SEL0_AESU |
2332 DESC_HDR_MODE0_AESU_CBC |
2333 DESC_HDR_SEL1_MDEUA |
2334 DESC_HDR_MODE1_MDEU_INIT |
2335 DESC_HDR_MODE1_MDEU_PAD |
2336 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2337 },
357fb605 2338 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2339 .alg.aead = {
2340 .base = {
2341 .cra_name = "authenc(hmac(sha224),"
2342 "cbc(des3_ede))",
2343 .cra_driver_name = "authenc-hmac-sha224-"
2a781f54 2344 "cbc-3des-talitos-hsna",
aeb4c132
HX
2345 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2346 .cra_flags = CRYPTO_ALG_ASYNC,
2347 },
2348 .ivsize = DES3_EDE_BLOCK_SIZE,
2349 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2350 },
2351 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2352 DESC_HDR_SEL0_DEU |
2353 DESC_HDR_MODE0_DEU_CBC |
2354 DESC_HDR_MODE0_DEU_3DES |
2355 DESC_HDR_SEL1_MDEUA |
2356 DESC_HDR_MODE1_MDEU_INIT |
2357 DESC_HDR_MODE1_MDEU_PAD |
2358 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2359 },
7405c8d7
LC
2360 { .type = CRYPTO_ALG_TYPE_AEAD,
2361 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2362 .alg.aead = {
2363 .base = {
2364 .cra_name = "authenc(hmac(sha224),"
2365 "cbc(des3_ede))",
2366 .cra_driver_name = "authenc-hmac-sha224-"
2367 "cbc-3des-talitos",
2368 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2369 .cra_flags = CRYPTO_ALG_ASYNC,
2370 },
2371 .ivsize = DES3_EDE_BLOCK_SIZE,
2372 .maxauthsize = SHA224_DIGEST_SIZE,
2373 },
2374 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2375 DESC_HDR_SEL0_DEU |
2376 DESC_HDR_MODE0_DEU_CBC |
2377 DESC_HDR_MODE0_DEU_3DES |
2378 DESC_HDR_SEL1_MDEUA |
2379 DESC_HDR_MODE1_MDEU_INIT |
2380 DESC_HDR_MODE1_MDEU_PAD |
2381 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2382 },
d5e4aaef 2383 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2384 .alg.aead = {
2385 .base = {
2386 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2387 .cra_driver_name = "authenc-hmac-sha256-"
2a781f54 2388 "cbc-aes-talitos-hsna",
aeb4c132
HX
2389 .cra_blocksize = AES_BLOCK_SIZE,
2390 .cra_flags = CRYPTO_ALG_ASYNC,
2391 },
2392 .ivsize = AES_BLOCK_SIZE,
2393 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2394 },
3952f17e
LN
2395 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2396 DESC_HDR_SEL0_AESU |
2397 DESC_HDR_MODE0_AESU_CBC |
2398 DESC_HDR_SEL1_MDEUA |
2399 DESC_HDR_MODE1_MDEU_INIT |
2400 DESC_HDR_MODE1_MDEU_PAD |
2401 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2402 },
7405c8d7
LC
2403 { .type = CRYPTO_ALG_TYPE_AEAD,
2404 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2405 .alg.aead = {
2406 .base = {
2407 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2408 .cra_driver_name = "authenc-hmac-sha256-"
2409 "cbc-aes-talitos",
2410 .cra_blocksize = AES_BLOCK_SIZE,
2411 .cra_flags = CRYPTO_ALG_ASYNC,
2412 },
2413 .ivsize = AES_BLOCK_SIZE,
2414 .maxauthsize = SHA256_DIGEST_SIZE,
2415 },
2416 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2417 DESC_HDR_SEL0_AESU |
2418 DESC_HDR_MODE0_AESU_CBC |
2419 DESC_HDR_SEL1_MDEUA |
2420 DESC_HDR_MODE1_MDEU_INIT |
2421 DESC_HDR_MODE1_MDEU_PAD |
2422 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2423 },
d5e4aaef 2424 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2425 .alg.aead = {
2426 .base = {
2427 .cra_name = "authenc(hmac(sha256),"
2428 "cbc(des3_ede))",
2429 .cra_driver_name = "authenc-hmac-sha256-"
2a781f54 2430 "cbc-3des-talitos-hsna",
aeb4c132
HX
2431 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2432 .cra_flags = CRYPTO_ALG_ASYNC,
2433 },
2434 .ivsize = DES3_EDE_BLOCK_SIZE,
2435 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2436 },
3952f17e
LN
2437 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2438 DESC_HDR_SEL0_DEU |
2439 DESC_HDR_MODE0_DEU_CBC |
2440 DESC_HDR_MODE0_DEU_3DES |
2441 DESC_HDR_SEL1_MDEUA |
2442 DESC_HDR_MODE1_MDEU_INIT |
2443 DESC_HDR_MODE1_MDEU_PAD |
2444 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2445 },
7405c8d7
LC
2446 { .type = CRYPTO_ALG_TYPE_AEAD,
2447 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2448 .alg.aead = {
2449 .base = {
2450 .cra_name = "authenc(hmac(sha256),"
2451 "cbc(des3_ede))",
2452 .cra_driver_name = "authenc-hmac-sha256-"
2453 "cbc-3des-talitos",
2454 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2455 .cra_flags = CRYPTO_ALG_ASYNC,
2456 },
2457 .ivsize = DES3_EDE_BLOCK_SIZE,
2458 .maxauthsize = SHA256_DIGEST_SIZE,
2459 },
2460 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2461 DESC_HDR_SEL0_DEU |
2462 DESC_HDR_MODE0_DEU_CBC |
2463 DESC_HDR_MODE0_DEU_3DES |
2464 DESC_HDR_SEL1_MDEUA |
2465 DESC_HDR_MODE1_MDEU_INIT |
2466 DESC_HDR_MODE1_MDEU_PAD |
2467 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2468 },
d5e4aaef 2469 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2470 .alg.aead = {
2471 .base = {
2472 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2473 .cra_driver_name = "authenc-hmac-sha384-"
2474 "cbc-aes-talitos",
2475 .cra_blocksize = AES_BLOCK_SIZE,
2476 .cra_flags = CRYPTO_ALG_ASYNC,
2477 },
2478 .ivsize = AES_BLOCK_SIZE,
2479 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2480 },
2481 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2482 DESC_HDR_SEL0_AESU |
2483 DESC_HDR_MODE0_AESU_CBC |
2484 DESC_HDR_SEL1_MDEUB |
2485 DESC_HDR_MODE1_MDEU_INIT |
2486 DESC_HDR_MODE1_MDEU_PAD |
2487 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2488 },
2489 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2490 .alg.aead = {
2491 .base = {
2492 .cra_name = "authenc(hmac(sha384),"
2493 "cbc(des3_ede))",
2494 .cra_driver_name = "authenc-hmac-sha384-"
2495 "cbc-3des-talitos",
2496 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2497 .cra_flags = CRYPTO_ALG_ASYNC,
2498 },
2499 .ivsize = DES3_EDE_BLOCK_SIZE,
2500 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2501 },
2502 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2503 DESC_HDR_SEL0_DEU |
2504 DESC_HDR_MODE0_DEU_CBC |
2505 DESC_HDR_MODE0_DEU_3DES |
2506 DESC_HDR_SEL1_MDEUB |
2507 DESC_HDR_MODE1_MDEU_INIT |
2508 DESC_HDR_MODE1_MDEU_PAD |
2509 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2510 },
2511 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2512 .alg.aead = {
2513 .base = {
2514 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2515 .cra_driver_name = "authenc-hmac-sha512-"
2516 "cbc-aes-talitos",
2517 .cra_blocksize = AES_BLOCK_SIZE,
2518 .cra_flags = CRYPTO_ALG_ASYNC,
2519 },
2520 .ivsize = AES_BLOCK_SIZE,
2521 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2522 },
2523 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2524 DESC_HDR_SEL0_AESU |
2525 DESC_HDR_MODE0_AESU_CBC |
2526 DESC_HDR_SEL1_MDEUB |
2527 DESC_HDR_MODE1_MDEU_INIT |
2528 DESC_HDR_MODE1_MDEU_PAD |
2529 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2530 },
2531 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2532 .alg.aead = {
2533 .base = {
2534 .cra_name = "authenc(hmac(sha512),"
2535 "cbc(des3_ede))",
2536 .cra_driver_name = "authenc-hmac-sha512-"
2537 "cbc-3des-talitos",
2538 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2539 .cra_flags = CRYPTO_ALG_ASYNC,
2540 },
2541 .ivsize = DES3_EDE_BLOCK_SIZE,
2542 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2543 },
2544 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2545 DESC_HDR_SEL0_DEU |
2546 DESC_HDR_MODE0_DEU_CBC |
2547 DESC_HDR_MODE0_DEU_3DES |
2548 DESC_HDR_SEL1_MDEUB |
2549 DESC_HDR_MODE1_MDEU_INIT |
2550 DESC_HDR_MODE1_MDEU_PAD |
2551 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2552 },
2553 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2554 .alg.aead = {
2555 .base = {
2556 .cra_name = "authenc(hmac(md5),cbc(aes))",
2557 .cra_driver_name = "authenc-hmac-md5-"
2a781f54 2558 "cbc-aes-talitos-hsna",
aeb4c132
HX
2559 .cra_blocksize = AES_BLOCK_SIZE,
2560 .cra_flags = CRYPTO_ALG_ASYNC,
2561 },
2562 .ivsize = AES_BLOCK_SIZE,
2563 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2564 },
3952f17e
LN
2565 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2566 DESC_HDR_SEL0_AESU |
2567 DESC_HDR_MODE0_AESU_CBC |
2568 DESC_HDR_SEL1_MDEUA |
2569 DESC_HDR_MODE1_MDEU_INIT |
2570 DESC_HDR_MODE1_MDEU_PAD |
2571 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2572 },
7405c8d7
LC
2573 { .type = CRYPTO_ALG_TYPE_AEAD,
2574 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2575 .alg.aead = {
2576 .base = {
2577 .cra_name = "authenc(hmac(md5),cbc(aes))",
2578 .cra_driver_name = "authenc-hmac-md5-"
2579 "cbc-aes-talitos",
2580 .cra_blocksize = AES_BLOCK_SIZE,
2581 .cra_flags = CRYPTO_ALG_ASYNC,
2582 },
2583 .ivsize = AES_BLOCK_SIZE,
2584 .maxauthsize = MD5_DIGEST_SIZE,
2585 },
2586 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2587 DESC_HDR_SEL0_AESU |
2588 DESC_HDR_MODE0_AESU_CBC |
2589 DESC_HDR_SEL1_MDEUA |
2590 DESC_HDR_MODE1_MDEU_INIT |
2591 DESC_HDR_MODE1_MDEU_PAD |
2592 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2593 },
d5e4aaef 2594 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2595 .alg.aead = {
2596 .base = {
2597 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2598 .cra_driver_name = "authenc-hmac-md5-"
2a781f54 2599 "cbc-3des-talitos-hsna",
aeb4c132
HX
2600 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2601 .cra_flags = CRYPTO_ALG_ASYNC,
2602 },
2603 .ivsize = DES3_EDE_BLOCK_SIZE,
2604 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2605 },
3952f17e
LN
2606 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2607 DESC_HDR_SEL0_DEU |
2608 DESC_HDR_MODE0_DEU_CBC |
2609 DESC_HDR_MODE0_DEU_3DES |
2610 DESC_HDR_SEL1_MDEUA |
2611 DESC_HDR_MODE1_MDEU_INIT |
2612 DESC_HDR_MODE1_MDEU_PAD |
2613 DESC_HDR_MODE1_MDEU_MD5_HMAC,
4de9d0b5 2614 },
7405c8d7
LC
2615 { .type = CRYPTO_ALG_TYPE_AEAD,
2616 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2617 .alg.aead = {
2618 .base = {
2619 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2620 .cra_driver_name = "authenc-hmac-md5-"
2621 "cbc-3des-talitos",
2622 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2623 .cra_flags = CRYPTO_ALG_ASYNC,
2624 },
2625 .ivsize = DES3_EDE_BLOCK_SIZE,
2626 .maxauthsize = MD5_DIGEST_SIZE,
2627 },
2628 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2629 DESC_HDR_SEL0_DEU |
2630 DESC_HDR_MODE0_DEU_CBC |
2631 DESC_HDR_MODE0_DEU_3DES |
2632 DESC_HDR_SEL1_MDEUA |
2633 DESC_HDR_MODE1_MDEU_INIT |
2634 DESC_HDR_MODE1_MDEU_PAD |
2635 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2636 },
4de9d0b5 2637 /* ABLKCIPHER algorithms. */
5e75ae1b
LC
2638 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2639 .alg.crypto = {
2640 .cra_name = "ecb(aes)",
2641 .cra_driver_name = "ecb-aes-talitos",
2642 .cra_blocksize = AES_BLOCK_SIZE,
2643 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2644 CRYPTO_ALG_ASYNC,
2645 .cra_ablkcipher = {
2646 .min_keysize = AES_MIN_KEY_SIZE,
2647 .max_keysize = AES_MAX_KEY_SIZE,
2648 .ivsize = AES_BLOCK_SIZE,
2649 }
2650 },
2651 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2652 DESC_HDR_SEL0_AESU,
2653 },
d5e4aaef
LN
2654 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2655 .alg.crypto = {
4de9d0b5
LN
2656 .cra_name = "cbc(aes)",
2657 .cra_driver_name = "cbc-aes-talitos",
2658 .cra_blocksize = AES_BLOCK_SIZE,
2659 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2660 CRYPTO_ALG_ASYNC,
4de9d0b5 2661 .cra_ablkcipher = {
4de9d0b5
LN
2662 .min_keysize = AES_MIN_KEY_SIZE,
2663 .max_keysize = AES_MAX_KEY_SIZE,
2664 .ivsize = AES_BLOCK_SIZE,
2665 }
2666 },
2667 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2668 DESC_HDR_SEL0_AESU |
2669 DESC_HDR_MODE0_AESU_CBC,
2670 },
5e75ae1b
LC
2671 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2672 .alg.crypto = {
2673 .cra_name = "ctr(aes)",
2674 .cra_driver_name = "ctr-aes-talitos",
2675 .cra_blocksize = AES_BLOCK_SIZE,
2676 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2677 CRYPTO_ALG_ASYNC,
2678 .cra_ablkcipher = {
2679 .min_keysize = AES_MIN_KEY_SIZE,
2680 .max_keysize = AES_MAX_KEY_SIZE,
2681 .ivsize = AES_BLOCK_SIZE,
2682 }
2683 },
70d355cc 2684 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
5e75ae1b
LC
2685 DESC_HDR_SEL0_AESU |
2686 DESC_HDR_MODE0_AESU_CTR,
2687 },
2688 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2689 .alg.crypto = {
2690 .cra_name = "ecb(des)",
2691 .cra_driver_name = "ecb-des-talitos",
2692 .cra_blocksize = DES_BLOCK_SIZE,
2693 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2694 CRYPTO_ALG_ASYNC,
2695 .cra_ablkcipher = {
2696 .min_keysize = DES_KEY_SIZE,
2697 .max_keysize = DES_KEY_SIZE,
2698 .ivsize = DES_BLOCK_SIZE,
2699 }
2700 },
2701 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2702 DESC_HDR_SEL0_DEU,
2703 },
2704 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2705 .alg.crypto = {
2706 .cra_name = "cbc(des)",
2707 .cra_driver_name = "cbc-des-talitos",
2708 .cra_blocksize = DES_BLOCK_SIZE,
2709 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2710 CRYPTO_ALG_ASYNC,
2711 .cra_ablkcipher = {
2712 .min_keysize = DES_KEY_SIZE,
2713 .max_keysize = DES_KEY_SIZE,
2714 .ivsize = DES_BLOCK_SIZE,
2715 }
2716 },
2717 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2718 DESC_HDR_SEL0_DEU |
2719 DESC_HDR_MODE0_DEU_CBC,
2720 },
2721 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2722 .alg.crypto = {
2723 .cra_name = "ecb(des3_ede)",
2724 .cra_driver_name = "ecb-3des-talitos",
2725 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2726 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2727 CRYPTO_ALG_ASYNC,
2728 .cra_ablkcipher = {
2729 .min_keysize = DES3_EDE_KEY_SIZE,
2730 .max_keysize = DES3_EDE_KEY_SIZE,
2731 .ivsize = DES3_EDE_BLOCK_SIZE,
2732 }
2733 },
2734 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2735 DESC_HDR_SEL0_DEU |
2736 DESC_HDR_MODE0_DEU_3DES,
2737 },
d5e4aaef
LN
2738 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2739 .alg.crypto = {
4de9d0b5
LN
2740 .cra_name = "cbc(des3_ede)",
2741 .cra_driver_name = "cbc-3des-talitos",
2742 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2743 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2744 CRYPTO_ALG_ASYNC,
4de9d0b5 2745 .cra_ablkcipher = {
4de9d0b5
LN
2746 .min_keysize = DES3_EDE_KEY_SIZE,
2747 .max_keysize = DES3_EDE_KEY_SIZE,
2748 .ivsize = DES3_EDE_BLOCK_SIZE,
2749 }
2750 },
2751 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2752 DESC_HDR_SEL0_DEU |
2753 DESC_HDR_MODE0_DEU_CBC |
2754 DESC_HDR_MODE0_DEU_3DES,
497f2e6b
LN
2755 },
2756 /* AHASH algorithms. */
2757 { .type = CRYPTO_ALG_TYPE_AHASH,
2758 .alg.hash = {
497f2e6b 2759 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2760 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2761 .halg.base = {
2762 .cra_name = "md5",
2763 .cra_driver_name = "md5-talitos",
b3988618 2764 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
497f2e6b
LN
2765 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2766 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2767 }
2768 },
2769 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2770 DESC_HDR_SEL0_MDEUA |
2771 DESC_HDR_MODE0_MDEU_MD5,
2772 },
2773 { .type = CRYPTO_ALG_TYPE_AHASH,
2774 .alg.hash = {
497f2e6b 2775 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2776 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2777 .halg.base = {
2778 .cra_name = "sha1",
2779 .cra_driver_name = "sha1-talitos",
2780 .cra_blocksize = SHA1_BLOCK_SIZE,
2781 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2782 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2783 }
2784 },
2785 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2786 DESC_HDR_SEL0_MDEUA |
2787 DESC_HDR_MODE0_MDEU_SHA1,
2788 },
60f208d7
KP
2789 { .type = CRYPTO_ALG_TYPE_AHASH,
2790 .alg.hash = {
60f208d7 2791 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2792 .halg.statesize = sizeof(struct talitos_export_state),
60f208d7
KP
2793 .halg.base = {
2794 .cra_name = "sha224",
2795 .cra_driver_name = "sha224-talitos",
2796 .cra_blocksize = SHA224_BLOCK_SIZE,
2797 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2798 CRYPTO_ALG_ASYNC,
60f208d7
KP
2799 }
2800 },
2801 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2802 DESC_HDR_SEL0_MDEUA |
2803 DESC_HDR_MODE0_MDEU_SHA224,
2804 },
497f2e6b
LN
2805 { .type = CRYPTO_ALG_TYPE_AHASH,
2806 .alg.hash = {
497f2e6b 2807 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2808 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2809 .halg.base = {
2810 .cra_name = "sha256",
2811 .cra_driver_name = "sha256-talitos",
2812 .cra_blocksize = SHA256_BLOCK_SIZE,
2813 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2814 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2815 }
2816 },
2817 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2818 DESC_HDR_SEL0_MDEUA |
2819 DESC_HDR_MODE0_MDEU_SHA256,
2820 },
2821 { .type = CRYPTO_ALG_TYPE_AHASH,
2822 .alg.hash = {
497f2e6b 2823 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2824 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2825 .halg.base = {
2826 .cra_name = "sha384",
2827 .cra_driver_name = "sha384-talitos",
2828 .cra_blocksize = SHA384_BLOCK_SIZE,
2829 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2830 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2831 }
2832 },
2833 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2834 DESC_HDR_SEL0_MDEUB |
2835 DESC_HDR_MODE0_MDEUB_SHA384,
2836 },
2837 { .type = CRYPTO_ALG_TYPE_AHASH,
2838 .alg.hash = {
497f2e6b 2839 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2840 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2841 .halg.base = {
2842 .cra_name = "sha512",
2843 .cra_driver_name = "sha512-talitos",
2844 .cra_blocksize = SHA512_BLOCK_SIZE,
2845 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2846 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2847 }
2848 },
2849 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2850 DESC_HDR_SEL0_MDEUB |
2851 DESC_HDR_MODE0_MDEUB_SHA512,
2852 },
79b3a418
LN
2853 { .type = CRYPTO_ALG_TYPE_AHASH,
2854 .alg.hash = {
79b3a418 2855 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2856 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2857 .halg.base = {
2858 .cra_name = "hmac(md5)",
2859 .cra_driver_name = "hmac-md5-talitos",
b3988618 2860 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
79b3a418
LN
2861 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2862 CRYPTO_ALG_ASYNC,
79b3a418
LN
2863 }
2864 },
2865 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2866 DESC_HDR_SEL0_MDEUA |
2867 DESC_HDR_MODE0_MDEU_MD5,
2868 },
2869 { .type = CRYPTO_ALG_TYPE_AHASH,
2870 .alg.hash = {
79b3a418 2871 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2872 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2873 .halg.base = {
2874 .cra_name = "hmac(sha1)",
2875 .cra_driver_name = "hmac-sha1-talitos",
2876 .cra_blocksize = SHA1_BLOCK_SIZE,
2877 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2878 CRYPTO_ALG_ASYNC,
79b3a418
LN
2879 }
2880 },
2881 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2882 DESC_HDR_SEL0_MDEUA |
2883 DESC_HDR_MODE0_MDEU_SHA1,
2884 },
2885 { .type = CRYPTO_ALG_TYPE_AHASH,
2886 .alg.hash = {
79b3a418 2887 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2888 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2889 .halg.base = {
2890 .cra_name = "hmac(sha224)",
2891 .cra_driver_name = "hmac-sha224-talitos",
2892 .cra_blocksize = SHA224_BLOCK_SIZE,
2893 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2894 CRYPTO_ALG_ASYNC,
79b3a418
LN
2895 }
2896 },
2897 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2898 DESC_HDR_SEL0_MDEUA |
2899 DESC_HDR_MODE0_MDEU_SHA224,
2900 },
2901 { .type = CRYPTO_ALG_TYPE_AHASH,
2902 .alg.hash = {
79b3a418 2903 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2904 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2905 .halg.base = {
2906 .cra_name = "hmac(sha256)",
2907 .cra_driver_name = "hmac-sha256-talitos",
2908 .cra_blocksize = SHA256_BLOCK_SIZE,
2909 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2910 CRYPTO_ALG_ASYNC,
79b3a418
LN
2911 }
2912 },
2913 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2914 DESC_HDR_SEL0_MDEUA |
2915 DESC_HDR_MODE0_MDEU_SHA256,
2916 },
2917 { .type = CRYPTO_ALG_TYPE_AHASH,
2918 .alg.hash = {
79b3a418 2919 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2920 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2921 .halg.base = {
2922 .cra_name = "hmac(sha384)",
2923 .cra_driver_name = "hmac-sha384-talitos",
2924 .cra_blocksize = SHA384_BLOCK_SIZE,
2925 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2926 CRYPTO_ALG_ASYNC,
79b3a418
LN
2927 }
2928 },
2929 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2930 DESC_HDR_SEL0_MDEUB |
2931 DESC_HDR_MODE0_MDEUB_SHA384,
2932 },
2933 { .type = CRYPTO_ALG_TYPE_AHASH,
2934 .alg.hash = {
79b3a418 2935 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2936 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2937 .halg.base = {
2938 .cra_name = "hmac(sha512)",
2939 .cra_driver_name = "hmac-sha512-talitos",
2940 .cra_blocksize = SHA512_BLOCK_SIZE,
2941 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2942 CRYPTO_ALG_ASYNC,
79b3a418
LN
2943 }
2944 },
2945 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2946 DESC_HDR_SEL0_MDEUB |
2947 DESC_HDR_MODE0_MDEUB_SHA512,
2948 }
9c4a7965
KP
2949};
2950
2951struct talitos_crypto_alg {
2952 struct list_head entry;
2953 struct device *dev;
acbf7c62 2954 struct talitos_alg_template algt;
9c4a7965
KP
2955};
2956
89d124cb
JE
2957static int talitos_init_common(struct talitos_ctx *ctx,
2958 struct talitos_crypto_alg *talitos_alg)
9c4a7965 2959{
5228f0f7 2960 struct talitos_private *priv;
9c4a7965
KP
2961
2962 /* update context with ptr to dev */
2963 ctx->dev = talitos_alg->dev;
19bbbc63 2964
5228f0f7
KP
2965 /* assign SEC channel to tfm in round-robin fashion */
2966 priv = dev_get_drvdata(ctx->dev);
2967 ctx->ch = atomic_inc_return(&priv->last_chan) &
2968 (priv->num_channels - 1);
2969
9c4a7965 2970 /* copy descriptor header template value */
acbf7c62 2971 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
9c4a7965 2972
602dba5a
KP
2973 /* select done notification */
2974 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
2975
497f2e6b
LN
2976 return 0;
2977}
2978
89d124cb
JE
2979static int talitos_cra_init(struct crypto_tfm *tfm)
2980{
2981 struct crypto_alg *alg = tfm->__crt_alg;
2982 struct talitos_crypto_alg *talitos_alg;
2983 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
2984
2985 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_AHASH)
2986 talitos_alg = container_of(__crypto_ahash_alg(alg),
2987 struct talitos_crypto_alg,
2988 algt.alg.hash);
2989 else
2990 talitos_alg = container_of(alg, struct talitos_crypto_alg,
2991 algt.alg.crypto);
2992
2993 return talitos_init_common(ctx, talitos_alg);
2994}
2995
aeb4c132 2996static int talitos_cra_init_aead(struct crypto_aead *tfm)
497f2e6b 2997{
89d124cb
JE
2998 struct aead_alg *alg = crypto_aead_alg(tfm);
2999 struct talitos_crypto_alg *talitos_alg;
3000 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3001
3002 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3003 algt.alg.aead);
3004
3005 return talitos_init_common(ctx, talitos_alg);
9c4a7965
KP
3006}
3007
497f2e6b
LN
3008static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3009{
3010 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3011
3012 talitos_cra_init(tfm);
3013
3014 ctx->keylen = 0;
3015 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3016 sizeof(struct talitos_ahash_req_ctx));
3017
3018 return 0;
3019}
3020
2e13ce08
LC
3021static void talitos_cra_exit(struct crypto_tfm *tfm)
3022{
3023 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3024 struct device *dev = ctx->dev;
3025
3026 if (ctx->keylen)
3027 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3028}
3029
9c4a7965
KP
3030/*
3031 * given the alg's descriptor header template, determine whether descriptor
3032 * type and primary/secondary execution units required match the hw
3033 * capabilities description provided in the device tree node.
3034 */
3035static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3036{
3037 struct talitos_private *priv = dev_get_drvdata(dev);
3038 int ret;
3039
3040 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3041 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3042
3043 if (SECONDARY_EU(desc_hdr_template))
3044 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3045 & priv->exec_units);
3046
3047 return ret;
3048}
3049
2dc11581 3050static int talitos_remove(struct platform_device *ofdev)
9c4a7965
KP
3051{
3052 struct device *dev = &ofdev->dev;
3053 struct talitos_private *priv = dev_get_drvdata(dev);
3054 struct talitos_crypto_alg *t_alg, *n;
3055 int i;
3056
3057 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
acbf7c62
LN
3058 switch (t_alg->algt.type) {
3059 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62 3060 break;
aeb4c132
HX
3061 case CRYPTO_ALG_TYPE_AEAD:
3062 crypto_unregister_aead(&t_alg->algt.alg.aead);
acbf7c62
LN
3063 case CRYPTO_ALG_TYPE_AHASH:
3064 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3065 break;
3066 }
9c4a7965 3067 list_del(&t_alg->entry);
9c4a7965
KP
3068 }
3069
3070 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3071 talitos_unregister_rng(dev);
3072
c3e337f8 3073 for (i = 0; i < 2; i++)
2cdba3cf 3074 if (priv->irq[i]) {
c3e337f8
KP
3075 free_irq(priv->irq[i], dev);
3076 irq_dispose_mapping(priv->irq[i]);
3077 }
9c4a7965 3078
c3e337f8 3079 tasklet_kill(&priv->done_task[0]);
2cdba3cf 3080 if (priv->irq[1])
c3e337f8 3081 tasklet_kill(&priv->done_task[1]);
9c4a7965 3082
9c4a7965
KP
3083 return 0;
3084}
3085
3086static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3087 struct talitos_alg_template
3088 *template)
3089{
60f208d7 3090 struct talitos_private *priv = dev_get_drvdata(dev);
9c4a7965
KP
3091 struct talitos_crypto_alg *t_alg;
3092 struct crypto_alg *alg;
3093
24b92ff2
LC
3094 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3095 GFP_KERNEL);
9c4a7965
KP
3096 if (!t_alg)
3097 return ERR_PTR(-ENOMEM);
3098
acbf7c62
LN
3099 t_alg->algt = *template;
3100
3101 switch (t_alg->algt.type) {
3102 case CRYPTO_ALG_TYPE_ABLKCIPHER:
497f2e6b
LN
3103 alg = &t_alg->algt.alg.crypto;
3104 alg->cra_init = talitos_cra_init;
2e13ce08 3105 alg->cra_exit = talitos_cra_exit;
d4cd3283 3106 alg->cra_type = &crypto_ablkcipher_type;
b286e003
KP
3107 alg->cra_ablkcipher.setkey = ablkcipher_setkey;
3108 alg->cra_ablkcipher.encrypt = ablkcipher_encrypt;
3109 alg->cra_ablkcipher.decrypt = ablkcipher_decrypt;
3110 alg->cra_ablkcipher.geniv = "eseqiv";
497f2e6b 3111 break;
acbf7c62 3112 case CRYPTO_ALG_TYPE_AEAD:
aeb4c132 3113 alg = &t_alg->algt.alg.aead.base;
2e13ce08 3114 alg->cra_exit = talitos_cra_exit;
aeb4c132
HX
3115 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
3116 t_alg->algt.alg.aead.setkey = aead_setkey;
3117 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3118 t_alg->algt.alg.aead.decrypt = aead_decrypt;
6cda075a
LC
3119 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3120 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
24b92ff2 3121 devm_kfree(dev, t_alg);
6cda075a
LC
3122 return ERR_PTR(-ENOTSUPP);
3123 }
acbf7c62
LN
3124 break;
3125 case CRYPTO_ALG_TYPE_AHASH:
3126 alg = &t_alg->algt.alg.hash.halg.base;
497f2e6b 3127 alg->cra_init = talitos_cra_init_ahash;
9d7c9e2a 3128 alg->cra_exit = talitos_cra_exit;
d4cd3283 3129 alg->cra_type = &crypto_ahash_type;
b286e003
KP
3130 t_alg->algt.alg.hash.init = ahash_init;
3131 t_alg->algt.alg.hash.update = ahash_update;
3132 t_alg->algt.alg.hash.final = ahash_final;
3133 t_alg->algt.alg.hash.finup = ahash_finup;
3134 t_alg->algt.alg.hash.digest = ahash_digest;
56136631
LC
3135 if (!strncmp(alg->cra_name, "hmac", 4))
3136 t_alg->algt.alg.hash.setkey = ahash_setkey;
3639ca84
HG
3137 t_alg->algt.alg.hash.import = ahash_import;
3138 t_alg->algt.alg.hash.export = ahash_export;
b286e003 3139
79b3a418 3140 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
0b2730d8 3141 !strncmp(alg->cra_name, "hmac", 4)) {
24b92ff2 3142 devm_kfree(dev, t_alg);
79b3a418 3143 return ERR_PTR(-ENOTSUPP);
0b2730d8 3144 }
60f208d7 3145 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
79b3a418
LN
3146 (!strcmp(alg->cra_name, "sha224") ||
3147 !strcmp(alg->cra_name, "hmac(sha224)"))) {
60f208d7
KP
3148 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3149 t_alg->algt.desc_hdr_template =
3150 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3151 DESC_HDR_SEL0_MDEUA |
3152 DESC_HDR_MODE0_MDEU_SHA256;
3153 }
497f2e6b 3154 break;
1d11911a
KP
3155 default:
3156 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
24b92ff2 3157 devm_kfree(dev, t_alg);
1d11911a 3158 return ERR_PTR(-EINVAL);
acbf7c62 3159 }
9c4a7965 3160
9c4a7965 3161 alg->cra_module = THIS_MODULE;
b0057763
LC
3162 if (t_alg->algt.priority)
3163 alg->cra_priority = t_alg->algt.priority;
3164 else
3165 alg->cra_priority = TALITOS_CRA_PRIORITY;
72c104d6
CL
3166 if (has_ftr_sec1(priv))
3167 alg->cra_alignmask = 3;
3168 else
3169 alg->cra_alignmask = 0;
9c4a7965 3170 alg->cra_ctxsize = sizeof(struct talitos_ctx);
d912bb76 3171 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
9c4a7965 3172
9c4a7965
KP
3173 t_alg->dev = dev;
3174
3175 return t_alg;
3176}
3177
c3e337f8
KP
3178static int talitos_probe_irq(struct platform_device *ofdev)
3179{
3180 struct device *dev = &ofdev->dev;
3181 struct device_node *np = ofdev->dev.of_node;
3182 struct talitos_private *priv = dev_get_drvdata(dev);
3183 int err;
dd3c0987 3184 bool is_sec1 = has_ftr_sec1(priv);
c3e337f8
KP
3185
3186 priv->irq[0] = irq_of_parse_and_map(np, 0);
2cdba3cf 3187 if (!priv->irq[0]) {
c3e337f8
KP
3188 dev_err(dev, "failed to map irq\n");
3189 return -EINVAL;
3190 }
dd3c0987
LC
3191 if (is_sec1) {
3192 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3193 dev_driver_string(dev), dev);
3194 goto primary_out;
3195 }
c3e337f8
KP
3196
3197 priv->irq[1] = irq_of_parse_and_map(np, 1);
3198
3199 /* get the primary irq line */
2cdba3cf 3200 if (!priv->irq[1]) {
dd3c0987 3201 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
c3e337f8
KP
3202 dev_driver_string(dev), dev);
3203 goto primary_out;
3204 }
3205
dd3c0987 3206 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
c3e337f8
KP
3207 dev_driver_string(dev), dev);
3208 if (err)
3209 goto primary_out;
3210
3211 /* get the secondary irq line */
dd3c0987 3212 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
c3e337f8
KP
3213 dev_driver_string(dev), dev);
3214 if (err) {
3215 dev_err(dev, "failed to request secondary irq\n");
3216 irq_dispose_mapping(priv->irq[1]);
2cdba3cf 3217 priv->irq[1] = 0;
c3e337f8
KP
3218 }
3219
3220 return err;
3221
3222primary_out:
3223 if (err) {
3224 dev_err(dev, "failed to request primary irq\n");
3225 irq_dispose_mapping(priv->irq[0]);
2cdba3cf 3226 priv->irq[0] = 0;
c3e337f8
KP
3227 }
3228
3229 return err;
3230}
3231
1c48a5c9 3232static int talitos_probe(struct platform_device *ofdev)
9c4a7965
KP
3233{
3234 struct device *dev = &ofdev->dev;
61c7a080 3235 struct device_node *np = ofdev->dev.of_node;
9c4a7965 3236 struct talitos_private *priv;
9c4a7965 3237 int i, err;
5fa7fa14 3238 int stride;
fd5ea7f0 3239 struct resource *res;
9c4a7965 3240
24b92ff2 3241 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
9c4a7965
KP
3242 if (!priv)
3243 return -ENOMEM;
3244
f3de9cb1
KH
3245 INIT_LIST_HEAD(&priv->alg_list);
3246
9c4a7965
KP
3247 dev_set_drvdata(dev, priv);
3248
3249 priv->ofdev = ofdev;
3250
511d63cb
HG
3251 spin_lock_init(&priv->reg_lock);
3252
fd5ea7f0
LC
3253 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3254 if (!res)
3255 return -ENXIO;
3256 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
9c4a7965
KP
3257 if (!priv->reg) {
3258 dev_err(dev, "failed to of_iomap\n");
3259 err = -ENOMEM;
3260 goto err_out;
3261 }
3262
3263 /* get SEC version capabilities from device tree */
fa14c6cf
LC
3264 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3265 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3266 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3267 of_property_read_u32(np, "fsl,descriptor-types-mask",
3268 &priv->desc_types);
9c4a7965
KP
3269
3270 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3271 !priv->exec_units || !priv->desc_types) {
3272 dev_err(dev, "invalid property data in device tree node\n");
3273 err = -EINVAL;
3274 goto err_out;
3275 }
3276
f3c85bc1
LN
3277 if (of_device_is_compatible(np, "fsl,sec3.0"))
3278 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3279
fe5720e2 3280 if (of_device_is_compatible(np, "fsl,sec2.1"))
60f208d7 3281 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
79b3a418
LN
3282 TALITOS_FTR_SHA224_HWINIT |
3283 TALITOS_FTR_HMAC_OK;
fe5720e2 3284
21590888
LC
3285 if (of_device_is_compatible(np, "fsl,sec1.0"))
3286 priv->features |= TALITOS_FTR_SEC1;
3287
5fa7fa14
LC
3288 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3289 priv->reg_deu = priv->reg + TALITOS12_DEU;
3290 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3291 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3292 stride = TALITOS1_CH_STRIDE;
3293 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3294 priv->reg_deu = priv->reg + TALITOS10_DEU;
3295 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3296 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3297 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3298 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3299 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3300 stride = TALITOS1_CH_STRIDE;
3301 } else {
3302 priv->reg_deu = priv->reg + TALITOS2_DEU;
3303 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3304 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3305 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3306 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3307 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3308 priv->reg_keu = priv->reg + TALITOS2_KEU;
3309 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3310 stride = TALITOS2_CH_STRIDE;
3311 }
3312
dd3c0987
LC
3313 err = talitos_probe_irq(ofdev);
3314 if (err)
3315 goto err_out;
3316
3317 if (of_device_is_compatible(np, "fsl,sec1.0")) {
9c02e285
LC
3318 if (priv->num_channels == 1)
3319 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
dd3c0987 3320 (unsigned long)dev);
9c02e285
LC
3321 else
3322 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3323 (unsigned long)dev);
3324 } else {
3325 if (priv->irq[1]) {
dd3c0987
LC
3326 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3327 (unsigned long)dev);
3328 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3329 (unsigned long)dev);
9c02e285
LC
3330 } else if (priv->num_channels == 1) {
3331 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3332 (unsigned long)dev);
3333 } else {
3334 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3335 (unsigned long)dev);
dd3c0987
LC
3336 }
3337 }
3338
24b92ff2
LC
3339 priv->chan = devm_kzalloc(dev, sizeof(struct talitos_channel) *
3340 priv->num_channels, GFP_KERNEL);
4b992628
KP
3341 if (!priv->chan) {
3342 dev_err(dev, "failed to allocate channel management space\n");
9c4a7965
KP
3343 err = -ENOMEM;
3344 goto err_out;
3345 }
3346
f641dddd
MH
3347 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3348
c3e337f8 3349 for (i = 0; i < priv->num_channels; i++) {
5fa7fa14 3350 priv->chan[i].reg = priv->reg + stride * (i + 1);
2cdba3cf 3351 if (!priv->irq[1] || !(i & 1))
c3e337f8 3352 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
ad42d5fc 3353
4b992628
KP
3354 spin_lock_init(&priv->chan[i].head_lock);
3355 spin_lock_init(&priv->chan[i].tail_lock);
9c4a7965 3356
24b92ff2
LC
3357 priv->chan[i].fifo = devm_kzalloc(dev,
3358 sizeof(struct talitos_request) *
3359 priv->fifo_len, GFP_KERNEL);
4b992628 3360 if (!priv->chan[i].fifo) {
9c4a7965
KP
3361 dev_err(dev, "failed to allocate request fifo %d\n", i);
3362 err = -ENOMEM;
3363 goto err_out;
3364 }
9c4a7965 3365
4b992628
KP
3366 atomic_set(&priv->chan[i].submit_count,
3367 -(priv->chfifo_len - 1));
f641dddd 3368 }
9c4a7965 3369
81eb024c
KP
3370 dma_set_mask(dev, DMA_BIT_MASK(36));
3371
9c4a7965
KP
3372 /* reset and initialize the h/w */
3373 err = init_device(dev);
3374 if (err) {
3375 dev_err(dev, "failed to initialize device\n");
3376 goto err_out;
3377 }
3378
3379 /* register the RNG, if available */
3380 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3381 err = talitos_register_rng(dev);
3382 if (err) {
3383 dev_err(dev, "failed to register hwrng: %d\n", err);
3384 goto err_out;
3385 } else
3386 dev_info(dev, "hwrng\n");
3387 }
3388
3389 /* register crypto algorithms the device supports */
9c4a7965
KP
3390 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3391 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3392 struct talitos_crypto_alg *t_alg;
aeb4c132 3393 struct crypto_alg *alg = NULL;
9c4a7965
KP
3394
3395 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3396 if (IS_ERR(t_alg)) {
3397 err = PTR_ERR(t_alg);
0b2730d8 3398 if (err == -ENOTSUPP)
79b3a418 3399 continue;
9c4a7965
KP
3400 goto err_out;
3401 }
3402
acbf7c62
LN
3403 switch (t_alg->algt.type) {
3404 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62
LN
3405 err = crypto_register_alg(
3406 &t_alg->algt.alg.crypto);
aeb4c132 3407 alg = &t_alg->algt.alg.crypto;
acbf7c62 3408 break;
aeb4c132
HX
3409
3410 case CRYPTO_ALG_TYPE_AEAD:
3411 err = crypto_register_aead(
3412 &t_alg->algt.alg.aead);
3413 alg = &t_alg->algt.alg.aead.base;
3414 break;
3415
acbf7c62
LN
3416 case CRYPTO_ALG_TYPE_AHASH:
3417 err = crypto_register_ahash(
3418 &t_alg->algt.alg.hash);
aeb4c132 3419 alg = &t_alg->algt.alg.hash.halg.base;
acbf7c62
LN
3420 break;
3421 }
9c4a7965
KP
3422 if (err) {
3423 dev_err(dev, "%s alg registration failed\n",
aeb4c132 3424 alg->cra_driver_name);
24b92ff2 3425 devm_kfree(dev, t_alg);
991155ba 3426 } else
9c4a7965 3427 list_add_tail(&t_alg->entry, &priv->alg_list);
9c4a7965
KP
3428 }
3429 }
5b859b6e
KP
3430 if (!list_empty(&priv->alg_list))
3431 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3432 (char *)of_get_property(np, "compatible", NULL));
9c4a7965
KP
3433
3434 return 0;
3435
3436err_out:
3437 talitos_remove(ofdev);
9c4a7965
KP
3438
3439 return err;
3440}
3441
6c3f975a 3442static const struct of_device_id talitos_match[] = {
0635b7db
LC
3443#ifdef CONFIG_CRYPTO_DEV_TALITOS1
3444 {
3445 .compatible = "fsl,sec1.0",
3446 },
3447#endif
3448#ifdef CONFIG_CRYPTO_DEV_TALITOS2
9c4a7965
KP
3449 {
3450 .compatible = "fsl,sec2.0",
3451 },
0635b7db 3452#endif
9c4a7965
KP
3453 {},
3454};
3455MODULE_DEVICE_TABLE(of, talitos_match);
3456
1c48a5c9 3457static struct platform_driver talitos_driver = {
4018294b
GL
3458 .driver = {
3459 .name = "talitos",
4018294b
GL
3460 .of_match_table = talitos_match,
3461 },
9c4a7965 3462 .probe = talitos_probe,
596f1034 3463 .remove = talitos_remove,
9c4a7965
KP
3464};
3465
741e8c2d 3466module_platform_driver(talitos_driver);
9c4a7965
KP
3467
3468MODULE_LICENSE("GPL");
3469MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3470MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");