]> git.proxmox.com Git - mirror_ubuntu-artful-kernel.git/blame - drivers/crypto/talitos.c
UBUNTU: Start new release
[mirror_ubuntu-artful-kernel.git] / drivers / crypto / talitos.c
CommitLineData
9c4a7965
KP
1/*
2 * talitos - Freescale Integrated Security Engine (SEC) device driver
3 *
5228f0f7 4 * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
9c4a7965
KP
5 *
6 * Scatterlist Crypto API glue code copied from files with the following:
7 * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Crypto algorithm registration code copied from hifn driver:
10 * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
11 * All rights reserved.
12 *
13 * This program is free software; you can redistribute it and/or modify
14 * it under the terms of the GNU General Public License as published by
15 * the Free Software Foundation; either version 2 of the License, or
16 * (at your option) any later version.
17 *
18 * This program is distributed in the hope that it will be useful,
19 * but WITHOUT ANY WARRANTY; without even the implied warranty of
20 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
21 * GNU General Public License for more details.
22 *
23 * You should have received a copy of the GNU General Public License
24 * along with this program; if not, write to the Free Software
25 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
26 */
27
28#include <linux/kernel.h>
29#include <linux/module.h>
30#include <linux/mod_devicetable.h>
31#include <linux/device.h>
32#include <linux/interrupt.h>
33#include <linux/crypto.h>
34#include <linux/hw_random.h>
5af50730
RH
35#include <linux/of_address.h>
36#include <linux/of_irq.h>
9c4a7965
KP
37#include <linux/of_platform.h>
38#include <linux/dma-mapping.h>
39#include <linux/io.h>
40#include <linux/spinlock.h>
41#include <linux/rtnetlink.h>
5a0e3ad6 42#include <linux/slab.h>
9c4a7965
KP
43
44#include <crypto/algapi.h>
45#include <crypto/aes.h>
3952f17e 46#include <crypto/des.h>
9c4a7965 47#include <crypto/sha.h>
497f2e6b 48#include <crypto/md5.h>
e98014ab 49#include <crypto/internal/aead.h>
9c4a7965 50#include <crypto/authenc.h>
4de9d0b5 51#include <crypto/skcipher.h>
acbf7c62
LN
52#include <crypto/hash.h>
53#include <crypto/internal/hash.h>
4de9d0b5 54#include <crypto/scatterwalk.h>
9c4a7965
KP
55
56#include "talitos.h"
57
922f9dc8
LC
58static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
59 bool is_sec1)
81eb024c 60{
edc6bd69 61 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
922f9dc8
LC
62 if (!is_sec1)
63 ptr->eptr = upper_32_bits(dma_addr);
81eb024c
KP
64}
65
340ff60a
HG
66static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
67 struct talitos_ptr *src_ptr, bool is_sec1)
68{
69 dst_ptr->ptr = src_ptr->ptr;
70 if (!is_sec1)
71 dst_ptr->eptr = src_ptr->eptr;
72}
73
42e8b0d7 74static void to_talitos_ptr_len(struct talitos_ptr *ptr, unsigned int len,
922f9dc8 75 bool is_sec1)
538caf83 76{
922f9dc8
LC
77 if (is_sec1) {
78 ptr->res = 0;
79 ptr->len1 = cpu_to_be16(len);
80 } else {
81 ptr->len = cpu_to_be16(len);
82 }
538caf83
LC
83}
84
922f9dc8
LC
85static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
86 bool is_sec1)
538caf83 87{
922f9dc8
LC
88 if (is_sec1)
89 return be16_to_cpu(ptr->len1);
90 else
91 return be16_to_cpu(ptr->len);
538caf83
LC
92}
93
b096b544
LC
94static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
95 bool is_sec1)
185eb79f 96{
922f9dc8 97 if (!is_sec1)
b096b544
LC
98 ptr->j_extent = val;
99}
100
101static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
102{
103 if (!is_sec1)
104 ptr->j_extent |= val;
185eb79f
LC
105}
106
9c4a7965
KP
107/*
108 * map virtual single (contiguous) pointer to h/w descriptor pointer
109 */
110static void map_single_talitos_ptr(struct device *dev,
edc6bd69 111 struct talitos_ptr *ptr,
42e8b0d7 112 unsigned int len, void *data,
9c4a7965
KP
113 enum dma_data_direction dir)
114{
81eb024c 115 dma_addr_t dma_addr = dma_map_single(dev, data, len, dir);
922f9dc8
LC
116 struct talitos_private *priv = dev_get_drvdata(dev);
117 bool is_sec1 = has_ftr_sec1(priv);
81eb024c 118
922f9dc8
LC
119 to_talitos_ptr_len(ptr, len, is_sec1);
120 to_talitos_ptr(ptr, dma_addr, is_sec1);
b096b544 121 to_talitos_ptr_ext_set(ptr, 0, is_sec1);
9c4a7965
KP
122}
123
124/*
125 * unmap bus single (contiguous) h/w descriptor pointer
126 */
127static void unmap_single_talitos_ptr(struct device *dev,
edc6bd69 128 struct talitos_ptr *ptr,
9c4a7965
KP
129 enum dma_data_direction dir)
130{
922f9dc8
LC
131 struct talitos_private *priv = dev_get_drvdata(dev);
132 bool is_sec1 = has_ftr_sec1(priv);
133
edc6bd69 134 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
922f9dc8 135 from_talitos_ptr_len(ptr, is_sec1), dir);
9c4a7965
KP
136}
137
138static int reset_channel(struct device *dev, int ch)
139{
140 struct talitos_private *priv = dev_get_drvdata(dev);
141 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 142 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 143
dd3c0987
LC
144 if (is_sec1) {
145 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
146 TALITOS1_CCCR_LO_RESET);
9c4a7965 147
dd3c0987
LC
148 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
149 TALITOS1_CCCR_LO_RESET) && --timeout)
150 cpu_relax();
151 } else {
152 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
153 TALITOS2_CCCR_RESET);
154
155 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
156 TALITOS2_CCCR_RESET) && --timeout)
157 cpu_relax();
158 }
9c4a7965
KP
159
160 if (timeout == 0) {
161 dev_err(dev, "failed to reset channel %d\n", ch);
162 return -EIO;
163 }
164
81eb024c 165 /* set 36-bit addressing, done writeback enable and done IRQ enable */
ad42d5fc 166 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
81eb024c 167 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
9c4a7965 168
fe5720e2
KP
169 /* and ICCR writeback, if available */
170 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
ad42d5fc 171 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
fe5720e2
KP
172 TALITOS_CCCR_LO_IWSE);
173
9c4a7965
KP
174 return 0;
175}
176
177static int reset_device(struct device *dev)
178{
179 struct talitos_private *priv = dev_get_drvdata(dev);
180 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987
LC
181 bool is_sec1 = has_ftr_sec1(priv);
182 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
9c4a7965 183
c3e337f8 184 setbits32(priv->reg + TALITOS_MCR, mcr);
9c4a7965 185
dd3c0987 186 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
9c4a7965
KP
187 && --timeout)
188 cpu_relax();
189
2cdba3cf 190 if (priv->irq[1]) {
c3e337f8
KP
191 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
192 setbits32(priv->reg + TALITOS_MCR, mcr);
193 }
194
9c4a7965
KP
195 if (timeout == 0) {
196 dev_err(dev, "failed to reset device\n");
197 return -EIO;
198 }
199
200 return 0;
201}
202
203/*
204 * Reset and initialize the device
205 */
206static int init_device(struct device *dev)
207{
208 struct talitos_private *priv = dev_get_drvdata(dev);
209 int ch, err;
dd3c0987 210 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965
KP
211
212 /*
213 * Master reset
214 * errata documentation: warning: certain SEC interrupts
215 * are not fully cleared by writing the MCR:SWR bit,
216 * set bit twice to completely reset
217 */
218 err = reset_device(dev);
219 if (err)
220 return err;
221
222 err = reset_device(dev);
223 if (err)
224 return err;
225
226 /* reset channels */
227 for (ch = 0; ch < priv->num_channels; ch++) {
228 err = reset_channel(dev, ch);
229 if (err)
230 return err;
231 }
232
233 /* enable channel done and error interrupts */
dd3c0987
LC
234 if (is_sec1) {
235 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
236 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
237 /* disable parity error check in DEU (erroneous? test vect.) */
238 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
239 } else {
240 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
241 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
242 }
9c4a7965 243
fe5720e2
KP
244 /* disable integrity check error interrupts (use writeback instead) */
245 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
5fa7fa14 246 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
fe5720e2
KP
247 TALITOS_MDEUICR_LO_ICE);
248
9c4a7965
KP
249 return 0;
250}
251
252/**
253 * talitos_submit - submits a descriptor to the device for processing
254 * @dev: the SEC device to be used
5228f0f7 255 * @ch: the SEC device channel to be used
9c4a7965
KP
256 * @desc: the descriptor to be processed by the device
257 * @callback: whom to call when processing is complete
258 * @context: a handle for use by caller (optional)
259 *
260 * desc must contain valid dma-mapped (bus physical) address pointers.
261 * callback must check err and feedback in descriptor header
262 * for device processing status.
263 */
865d5061
HG
264int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
265 void (*callback)(struct device *dev,
266 struct talitos_desc *desc,
267 void *context, int error),
268 void *context)
9c4a7965
KP
269{
270 struct talitos_private *priv = dev_get_drvdata(dev);
271 struct talitos_request *request;
5228f0f7 272 unsigned long flags;
9c4a7965 273 int head;
7d607c6a 274 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 275
4b992628 276 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
9c4a7965 277
4b992628 278 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
ec6644d6 279 /* h/w fifo is full */
4b992628 280 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
281 return -EAGAIN;
282 }
283
4b992628
KP
284 head = priv->chan[ch].head;
285 request = &priv->chan[ch].fifo[head];
ec6644d6 286
9c4a7965 287 /* map descriptor and save caller data */
7d607c6a
LC
288 if (is_sec1) {
289 desc->hdr1 = desc->hdr;
290 desc->next_desc = 0;
291 request->dma_desc = dma_map_single(dev, &desc->hdr1,
292 TALITOS_DESC_SIZE,
293 DMA_BIDIRECTIONAL);
294 } else {
295 request->dma_desc = dma_map_single(dev, desc,
296 TALITOS_DESC_SIZE,
297 DMA_BIDIRECTIONAL);
298 }
9c4a7965
KP
299 request->callback = callback;
300 request->context = context;
301
302 /* increment fifo head */
4b992628 303 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
9c4a7965
KP
304
305 smp_wmb();
306 request->desc = desc;
307
308 /* GO! */
309 wmb();
ad42d5fc
KP
310 out_be32(priv->chan[ch].reg + TALITOS_FF,
311 upper_32_bits(request->dma_desc));
312 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
a752447a 313 lower_32_bits(request->dma_desc));
9c4a7965 314
4b992628 315 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
9c4a7965
KP
316
317 return -EINPROGRESS;
318}
865d5061 319EXPORT_SYMBOL(talitos_submit);
9c4a7965
KP
320
321/*
322 * process what was done, notify callback of error if not
323 */
324static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
325{
326 struct talitos_private *priv = dev_get_drvdata(dev);
327 struct talitos_request *request, saved_req;
328 unsigned long flags;
329 int tail, status;
7d607c6a 330 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 331
4b992628 332 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
9c4a7965 333
4b992628
KP
334 tail = priv->chan[ch].tail;
335 while (priv->chan[ch].fifo[tail].desc) {
7d607c6a
LC
336 __be32 hdr;
337
4b992628 338 request = &priv->chan[ch].fifo[tail];
9c4a7965
KP
339
340 /* descriptors with their done bits set don't get the error */
341 rmb();
7d607c6a
LC
342 hdr = is_sec1 ? request->desc->hdr1 : request->desc->hdr;
343
344 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
9c4a7965 345 status = 0;
ca38a814 346 else
9c4a7965
KP
347 if (!error)
348 break;
349 else
350 status = error;
351
352 dma_unmap_single(dev, request->dma_desc,
7d607c6a 353 TALITOS_DESC_SIZE,
e938e465 354 DMA_BIDIRECTIONAL);
9c4a7965
KP
355
356 /* copy entries so we can call callback outside lock */
357 saved_req.desc = request->desc;
358 saved_req.callback = request->callback;
359 saved_req.context = request->context;
360
361 /* release request entry in fifo */
362 smp_wmb();
363 request->desc = NULL;
364
365 /* increment fifo tail */
4b992628 366 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
9c4a7965 367
4b992628 368 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
ec6644d6 369
4b992628 370 atomic_dec(&priv->chan[ch].submit_count);
ec6644d6 371
9c4a7965
KP
372 saved_req.callback(dev, saved_req.desc, saved_req.context,
373 status);
374 /* channel may resume processing in single desc error case */
375 if (error && !reset_ch && status == error)
376 return;
4b992628
KP
377 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
378 tail = priv->chan[ch].tail;
9c4a7965
KP
379 }
380
4b992628 381 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
9c4a7965
KP
382}
383
384/*
385 * process completed requests for channels that have done status
386 */
dd3c0987
LC
387#define DEF_TALITOS1_DONE(name, ch_done_mask) \
388static void talitos1_done_##name(unsigned long data) \
389{ \
390 struct device *dev = (struct device *)data; \
391 struct talitos_private *priv = dev_get_drvdata(dev); \
392 unsigned long flags; \
393 \
394 if (ch_done_mask & 0x10000000) \
395 flush_channel(dev, 0, 0, 0); \
396 if (priv->num_channels == 1) \
397 goto out; \
398 if (ch_done_mask & 0x40000000) \
399 flush_channel(dev, 1, 0, 0); \
400 if (ch_done_mask & 0x00010000) \
401 flush_channel(dev, 2, 0, 0); \
402 if (ch_done_mask & 0x00040000) \
403 flush_channel(dev, 3, 0, 0); \
404 \
405out: \
406 /* At this point, all completed channels have been processed */ \
407 /* Unmask done interrupts for channels completed later on. */ \
408 spin_lock_irqsave(&priv->reg_lock, flags); \
409 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
410 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
411 spin_unlock_irqrestore(&priv->reg_lock, flags); \
412}
413
414DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
415
416#define DEF_TALITOS2_DONE(name, ch_done_mask) \
417static void talitos2_done_##name(unsigned long data) \
c3e337f8
KP
418{ \
419 struct device *dev = (struct device *)data; \
420 struct talitos_private *priv = dev_get_drvdata(dev); \
511d63cb 421 unsigned long flags; \
c3e337f8
KP
422 \
423 if (ch_done_mask & 1) \
424 flush_channel(dev, 0, 0, 0); \
425 if (priv->num_channels == 1) \
426 goto out; \
427 if (ch_done_mask & (1 << 2)) \
428 flush_channel(dev, 1, 0, 0); \
429 if (ch_done_mask & (1 << 4)) \
430 flush_channel(dev, 2, 0, 0); \
431 if (ch_done_mask & (1 << 6)) \
432 flush_channel(dev, 3, 0, 0); \
433 \
434out: \
435 /* At this point, all completed channels have been processed */ \
436 /* Unmask done interrupts for channels completed later on. */ \
511d63cb 437 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8 438 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
dd3c0987 439 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
511d63cb 440 spin_unlock_irqrestore(&priv->reg_lock, flags); \
9c4a7965 441}
dd3c0987
LC
442
443DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
444DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
445DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
9c4a7965
KP
446
447/*
448 * locate current (offending) descriptor
449 */
3e721aeb 450static u32 current_desc_hdr(struct device *dev, int ch)
9c4a7965
KP
451{
452 struct talitos_private *priv = dev_get_drvdata(dev);
b62ffd8c 453 int tail, iter;
9c4a7965
KP
454 dma_addr_t cur_desc;
455
b62ffd8c
HG
456 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
457 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
9c4a7965 458
b62ffd8c
HG
459 if (!cur_desc) {
460 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
461 return 0;
462 }
463
464 tail = priv->chan[ch].tail;
465
466 iter = tail;
467 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc) {
468 iter = (iter + 1) & (priv->fifo_len - 1);
469 if (iter == tail) {
9c4a7965 470 dev_err(dev, "couldn't locate current descriptor\n");
3e721aeb 471 return 0;
9c4a7965
KP
472 }
473 }
474
b62ffd8c 475 return priv->chan[ch].fifo[iter].desc->hdr;
9c4a7965
KP
476}
477
478/*
479 * user diagnostics; report root cause of error based on execution unit status
480 */
3e721aeb 481static void report_eu_error(struct device *dev, int ch, u32 desc_hdr)
9c4a7965
KP
482{
483 struct talitos_private *priv = dev_get_drvdata(dev);
484 int i;
485
3e721aeb 486 if (!desc_hdr)
ad42d5fc 487 desc_hdr = in_be32(priv->chan[ch].reg + TALITOS_DESCBUF);
3e721aeb
KP
488
489 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
9c4a7965
KP
490 case DESC_HDR_SEL0_AFEU:
491 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
5fa7fa14
LC
492 in_be32(priv->reg_afeu + TALITOS_EUISR),
493 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
9c4a7965
KP
494 break;
495 case DESC_HDR_SEL0_DEU:
496 dev_err(dev, "DEUISR 0x%08x_%08x\n",
5fa7fa14
LC
497 in_be32(priv->reg_deu + TALITOS_EUISR),
498 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
9c4a7965
KP
499 break;
500 case DESC_HDR_SEL0_MDEUA:
501 case DESC_HDR_SEL0_MDEUB:
502 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
503 in_be32(priv->reg_mdeu + TALITOS_EUISR),
504 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
505 break;
506 case DESC_HDR_SEL0_RNG:
507 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
5fa7fa14
LC
508 in_be32(priv->reg_rngu + TALITOS_ISR),
509 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
9c4a7965
KP
510 break;
511 case DESC_HDR_SEL0_PKEU:
512 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
5fa7fa14
LC
513 in_be32(priv->reg_pkeu + TALITOS_EUISR),
514 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
515 break;
516 case DESC_HDR_SEL0_AESU:
517 dev_err(dev, "AESUISR 0x%08x_%08x\n",
5fa7fa14
LC
518 in_be32(priv->reg_aesu + TALITOS_EUISR),
519 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
9c4a7965
KP
520 break;
521 case DESC_HDR_SEL0_CRCU:
522 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
523 in_be32(priv->reg_crcu + TALITOS_EUISR),
524 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
525 break;
526 case DESC_HDR_SEL0_KEU:
527 dev_err(dev, "KEUISR 0x%08x_%08x\n",
5fa7fa14
LC
528 in_be32(priv->reg_pkeu + TALITOS_EUISR),
529 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
9c4a7965
KP
530 break;
531 }
532
3e721aeb 533 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
9c4a7965
KP
534 case DESC_HDR_SEL1_MDEUA:
535 case DESC_HDR_SEL1_MDEUB:
536 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
5fa7fa14
LC
537 in_be32(priv->reg_mdeu + TALITOS_EUISR),
538 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
9c4a7965
KP
539 break;
540 case DESC_HDR_SEL1_CRCU:
541 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
5fa7fa14
LC
542 in_be32(priv->reg_crcu + TALITOS_EUISR),
543 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
9c4a7965
KP
544 break;
545 }
546
547 for (i = 0; i < 8; i++)
548 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
ad42d5fc
KP
549 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
550 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
9c4a7965
KP
551}
552
553/*
554 * recover from error interrupts
555 */
5e718a09 556static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
9c4a7965 557{
9c4a7965
KP
558 struct talitos_private *priv = dev_get_drvdata(dev);
559 unsigned int timeout = TALITOS_TIMEOUT;
dd3c0987 560 int ch, error, reset_dev = 0;
42e8b0d7 561 u32 v_lo;
dd3c0987
LC
562 bool is_sec1 = has_ftr_sec1(priv);
563 int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
9c4a7965
KP
564
565 for (ch = 0; ch < priv->num_channels; ch++) {
566 /* skip channels without errors */
dd3c0987
LC
567 if (is_sec1) {
568 /* bits 29, 31, 17, 19 */
569 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
570 continue;
571 } else {
572 if (!(isr & (1 << (ch * 2 + 1))))
573 continue;
574 }
9c4a7965
KP
575
576 error = -EINVAL;
577
ad42d5fc 578 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
9c4a7965
KP
579
580 if (v_lo & TALITOS_CCPSR_LO_DOF) {
581 dev_err(dev, "double fetch fifo overflow error\n");
582 error = -EAGAIN;
583 reset_ch = 1;
584 }
585 if (v_lo & TALITOS_CCPSR_LO_SOF) {
586 /* h/w dropped descriptor */
587 dev_err(dev, "single fetch fifo overflow error\n");
588 error = -EAGAIN;
589 }
590 if (v_lo & TALITOS_CCPSR_LO_MDTE)
591 dev_err(dev, "master data transfer error\n");
592 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
4d9b3a5b 593 dev_err(dev, is_sec1 ? "pointer not complete error\n"
dd3c0987 594 : "s/g data length zero error\n");
9c4a7965 595 if (v_lo & TALITOS_CCPSR_LO_FPZ)
dd3c0987
LC
596 dev_err(dev, is_sec1 ? "parity error\n"
597 : "fetch pointer zero error\n");
9c4a7965
KP
598 if (v_lo & TALITOS_CCPSR_LO_IDH)
599 dev_err(dev, "illegal descriptor header error\n");
600 if (v_lo & TALITOS_CCPSR_LO_IEU)
dd3c0987
LC
601 dev_err(dev, is_sec1 ? "static assignment error\n"
602 : "invalid exec unit error\n");
9c4a7965 603 if (v_lo & TALITOS_CCPSR_LO_EU)
3e721aeb 604 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
dd3c0987
LC
605 if (!is_sec1) {
606 if (v_lo & TALITOS_CCPSR_LO_GB)
607 dev_err(dev, "gather boundary error\n");
608 if (v_lo & TALITOS_CCPSR_LO_GRL)
609 dev_err(dev, "gather return/length error\n");
610 if (v_lo & TALITOS_CCPSR_LO_SB)
611 dev_err(dev, "scatter boundary error\n");
612 if (v_lo & TALITOS_CCPSR_LO_SRL)
613 dev_err(dev, "scatter return/length error\n");
614 }
9c4a7965
KP
615
616 flush_channel(dev, ch, error, reset_ch);
617
618 if (reset_ch) {
619 reset_channel(dev, ch);
620 } else {
ad42d5fc 621 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
dd3c0987 622 TALITOS2_CCCR_CONT);
ad42d5fc
KP
623 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
624 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
dd3c0987 625 TALITOS2_CCCR_CONT) && --timeout)
9c4a7965
KP
626 cpu_relax();
627 if (timeout == 0) {
628 dev_err(dev, "failed to restart channel %d\n",
629 ch);
630 reset_dev = 1;
631 }
632 }
633 }
dd3c0987
LC
634 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
635 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
636 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
637 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
638 isr, isr_lo);
639 else
640 dev_err(dev, "done overflow, internal time out, or "
641 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
9c4a7965
KP
642
643 /* purge request queues */
644 for (ch = 0; ch < priv->num_channels; ch++)
645 flush_channel(dev, ch, -EIO, 1);
646
647 /* reset and reinitialize the device */
648 init_device(dev);
649 }
650}
651
dd3c0987
LC
652#define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
653static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
654{ \
655 struct device *dev = data; \
656 struct talitos_private *priv = dev_get_drvdata(dev); \
657 u32 isr, isr_lo; \
658 unsigned long flags; \
659 \
660 spin_lock_irqsave(&priv->reg_lock, flags); \
661 isr = in_be32(priv->reg + TALITOS_ISR); \
662 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
663 /* Acknowledge interrupt */ \
664 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
665 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
666 \
667 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
668 spin_unlock_irqrestore(&priv->reg_lock, flags); \
669 talitos_error(dev, isr & ch_err_mask, isr_lo); \
670 } \
671 else { \
672 if (likely(isr & ch_done_mask)) { \
673 /* mask further done interrupts. */ \
674 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
675 /* done_task will unmask done interrupts at exit */ \
676 tasklet_schedule(&priv->done_task[tlet]); \
677 } \
678 spin_unlock_irqrestore(&priv->reg_lock, flags); \
679 } \
680 \
681 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
682 IRQ_NONE; \
683}
684
685DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
686
687#define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
688static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
c3e337f8
KP
689{ \
690 struct device *dev = data; \
691 struct talitos_private *priv = dev_get_drvdata(dev); \
692 u32 isr, isr_lo; \
511d63cb 693 unsigned long flags; \
c3e337f8 694 \
511d63cb 695 spin_lock_irqsave(&priv->reg_lock, flags); \
c3e337f8
KP
696 isr = in_be32(priv->reg + TALITOS_ISR); \
697 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
698 /* Acknowledge interrupt */ \
699 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
700 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
701 \
511d63cb
HG
702 if (unlikely(isr & ch_err_mask || isr_lo)) { \
703 spin_unlock_irqrestore(&priv->reg_lock, flags); \
704 talitos_error(dev, isr & ch_err_mask, isr_lo); \
705 } \
706 else { \
c3e337f8
KP
707 if (likely(isr & ch_done_mask)) { \
708 /* mask further done interrupts. */ \
709 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
710 /* done_task will unmask done interrupts at exit */ \
711 tasklet_schedule(&priv->done_task[tlet]); \
712 } \
511d63cb
HG
713 spin_unlock_irqrestore(&priv->reg_lock, flags); \
714 } \
c3e337f8
KP
715 \
716 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
717 IRQ_NONE; \
9c4a7965 718}
dd3c0987
LC
719
720DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
721DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
722 0)
723DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
724 1)
9c4a7965
KP
725
726/*
727 * hwrng
728 */
729static int talitos_rng_data_present(struct hwrng *rng, int wait)
730{
731 struct device *dev = (struct device *)rng->priv;
732 struct talitos_private *priv = dev_get_drvdata(dev);
733 u32 ofl;
734 int i;
735
736 for (i = 0; i < 20; i++) {
5fa7fa14 737 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
9c4a7965
KP
738 TALITOS_RNGUSR_LO_OFL;
739 if (ofl || !wait)
740 break;
741 udelay(10);
742 }
743
744 return !!ofl;
745}
746
747static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
748{
749 struct device *dev = (struct device *)rng->priv;
750 struct talitos_private *priv = dev_get_drvdata(dev);
751
752 /* rng fifo requires 64-bit accesses */
5fa7fa14
LC
753 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
754 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
9c4a7965
KP
755
756 return sizeof(u32);
757}
758
759static int talitos_rng_init(struct hwrng *rng)
760{
761 struct device *dev = (struct device *)rng->priv;
762 struct talitos_private *priv = dev_get_drvdata(dev);
763 unsigned int timeout = TALITOS_TIMEOUT;
764
5fa7fa14
LC
765 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
766 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
767 & TALITOS_RNGUSR_LO_RD)
9c4a7965
KP
768 && --timeout)
769 cpu_relax();
770 if (timeout == 0) {
771 dev_err(dev, "failed to reset rng hw\n");
772 return -ENODEV;
773 }
774
775 /* start generating */
5fa7fa14 776 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
9c4a7965
KP
777
778 return 0;
779}
780
781static int talitos_register_rng(struct device *dev)
782{
783 struct talitos_private *priv = dev_get_drvdata(dev);
35a3bb3d 784 int err;
9c4a7965
KP
785
786 priv->rng.name = dev_driver_string(dev),
787 priv->rng.init = talitos_rng_init,
788 priv->rng.data_present = talitos_rng_data_present,
789 priv->rng.data_read = talitos_rng_data_read,
790 priv->rng.priv = (unsigned long)dev;
791
35a3bb3d
AS
792 err = hwrng_register(&priv->rng);
793 if (!err)
794 priv->rng_registered = true;
795
796 return err;
9c4a7965
KP
797}
798
799static void talitos_unregister_rng(struct device *dev)
800{
801 struct talitos_private *priv = dev_get_drvdata(dev);
802
35a3bb3d
AS
803 if (!priv->rng_registered)
804 return;
805
9c4a7965 806 hwrng_unregister(&priv->rng);
35a3bb3d 807 priv->rng_registered = false;
9c4a7965
KP
808}
809
810/*
811 * crypto alg
812 */
813#define TALITOS_CRA_PRIORITY 3000
7405c8d7
LC
814/*
815 * Defines a priority for doing AEAD with descriptors type
816 * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
817 */
818#define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
03d2c511 819#define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
3952f17e 820#define TALITOS_MAX_IV_LENGTH 16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
70bcaca7 821
9c4a7965
KP
822struct talitos_ctx {
823 struct device *dev;
5228f0f7 824 int ch;
9c4a7965
KP
825 __be32 desc_hdr_template;
826 u8 key[TALITOS_MAX_KEY_SIZE];
70bcaca7 827 u8 iv[TALITOS_MAX_IV_LENGTH];
9c4a7965
KP
828 unsigned int keylen;
829 unsigned int enckeylen;
830 unsigned int authkeylen;
9c4a7965
KP
831};
832
497f2e6b
LN
833#define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
834#define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
835
836struct talitos_ahash_req_ctx {
60f208d7 837 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
497f2e6b
LN
838 unsigned int hw_context_size;
839 u8 buf[HASH_MAX_BLOCK_SIZE];
840 u8 bufnext[HASH_MAX_BLOCK_SIZE];
60f208d7 841 unsigned int swinit;
497f2e6b
LN
842 unsigned int first;
843 unsigned int last;
844 unsigned int to_hash_later;
42e8b0d7 845 unsigned int nbuf;
497f2e6b
LN
846 struct scatterlist bufsl[2];
847 struct scatterlist *psrc;
848};
849
3639ca84
HG
850struct talitos_export_state {
851 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
852 u8 buf[HASH_MAX_BLOCK_SIZE];
853 unsigned int swinit;
854 unsigned int first;
855 unsigned int last;
856 unsigned int to_hash_later;
857 unsigned int nbuf;
858};
859
56af8cd4
LN
860static int aead_setkey(struct crypto_aead *authenc,
861 const u8 *key, unsigned int keylen)
9c4a7965
KP
862{
863 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
c306a98d 864 struct crypto_authenc_keys keys;
9c4a7965 865
c306a98d 866 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
9c4a7965
KP
867 goto badkey;
868
c306a98d 869 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
9c4a7965
KP
870 goto badkey;
871
c306a98d
MK
872 memcpy(ctx->key, keys.authkey, keys.authkeylen);
873 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
9c4a7965 874
c306a98d
MK
875 ctx->keylen = keys.authkeylen + keys.enckeylen;
876 ctx->enckeylen = keys.enckeylen;
877 ctx->authkeylen = keys.authkeylen;
9c4a7965
KP
878
879 return 0;
880
881badkey:
882 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
883 return -EINVAL;
884}
885
886/*
56af8cd4 887 * talitos_edesc - s/w-extended descriptor
9c4a7965
KP
888 * @src_nents: number of segments in input scatterlist
889 * @dst_nents: number of segments in output scatterlist
aeb4c132 890 * @icv_ool: whether ICV is out-of-line
79fd31d3 891 * @iv_dma: dma address of iv for checking continuity and link table
9c4a7965 892 * @dma_len: length of dma mapped link_tbl space
6f65f6ac 893 * @dma_link_tbl: bus physical address of link_tbl/buf
9c4a7965 894 * @desc: h/w descriptor
6f65f6ac
LC
895 * @link_tbl: input and output h/w link tables (if {src,dst}_nents > 1) (SEC2)
896 * @buf: input and output buffeur (if {src,dst}_nents > 1) (SEC1)
9c4a7965
KP
897 *
898 * if decrypting (with authcheck), or either one of src_nents or dst_nents
899 * is greater than 1, an integrity check value is concatenated to the end
900 * of link_tbl data
901 */
56af8cd4 902struct talitos_edesc {
9c4a7965
KP
903 int src_nents;
904 int dst_nents;
aeb4c132 905 bool icv_ool;
79fd31d3 906 dma_addr_t iv_dma;
9c4a7965
KP
907 int dma_len;
908 dma_addr_t dma_link_tbl;
909 struct talitos_desc desc;
6f65f6ac
LC
910 union {
911 struct talitos_ptr link_tbl[0];
912 u8 buf[0];
913 };
9c4a7965
KP
914};
915
4de9d0b5
LN
916static void talitos_sg_unmap(struct device *dev,
917 struct talitos_edesc *edesc,
918 struct scatterlist *src,
6a1e8d14
LC
919 struct scatterlist *dst,
920 unsigned int len, unsigned int offset)
4de9d0b5 921{
6a1e8d14
LC
922 struct talitos_private *priv = dev_get_drvdata(dev);
923 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
924 unsigned int src_nents = edesc->src_nents ? : 1;
925 unsigned int dst_nents = edesc->dst_nents ? : 1;
926
6a1e8d14
LC
927 if (is_sec1 && dst && dst_nents > 1) {
928 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
929 len, DMA_FROM_DEVICE);
930 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
931 offset);
932 }
4de9d0b5 933 if (src != dst) {
6a1e8d14
LC
934 if (src_nents == 1 || !is_sec1)
935 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
4de9d0b5 936
6a1e8d14 937 if (dst && (dst_nents == 1 || !is_sec1))
b8a011d4 938 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
6a1e8d14 939 } else if (src_nents == 1 || !is_sec1) {
b8a011d4 940 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
246a87cd
LC
941 }
942}
943
9c4a7965 944static void ipsec_esp_unmap(struct device *dev,
56af8cd4 945 struct talitos_edesc *edesc,
9c4a7965
KP
946 struct aead_request *areq)
947{
549bd8bc
LC
948 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
949 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
950 unsigned int ivsize = crypto_aead_ivsize(aead);
951
952 if (edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP)
953 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
954 DMA_FROM_DEVICE);
9c4a7965
KP
955 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[3], DMA_TO_DEVICE);
956 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[2], DMA_TO_DEVICE);
957 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[0], DMA_TO_DEVICE);
958
6a1e8d14
LC
959 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->cryptlen,
960 areq->assoclen);
9c4a7965
KP
961
962 if (edesc->dma_len)
963 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
964 DMA_BIDIRECTIONAL);
549bd8bc
LC
965
966 if (!(edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP)) {
967 unsigned int dst_nents = edesc->dst_nents ? : 1;
968
969 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
970 areq->assoclen + areq->cryptlen - ivsize);
971 }
9c4a7965
KP
972}
973
974/*
975 * ipsec_esp descriptor callbacks
976 */
977static void ipsec_esp_encrypt_done(struct device *dev,
978 struct talitos_desc *desc, void *context,
979 int err)
980{
549bd8bc
LC
981 struct talitos_private *priv = dev_get_drvdata(dev);
982 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 983 struct aead_request *areq = context;
9c4a7965 984 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 985 unsigned int authsize = crypto_aead_authsize(authenc);
19bbbc63 986 struct talitos_edesc *edesc;
9c4a7965
KP
987 struct scatterlist *sg;
988 void *icvdata;
989
19bbbc63
KP
990 edesc = container_of(desc, struct talitos_edesc, desc);
991
9c4a7965
KP
992 ipsec_esp_unmap(dev, edesc, areq);
993
994 /* copy the generated ICV to dst */
aeb4c132 995 if (edesc->icv_ool) {
549bd8bc
LC
996 if (is_sec1)
997 icvdata = edesc->buf + areq->assoclen + areq->cryptlen;
998 else
999 icvdata = &edesc->link_tbl[edesc->src_nents +
1000 edesc->dst_nents + 2];
9c4a7965 1001 sg = sg_last(areq->dst, edesc->dst_nents);
aeb4c132
HX
1002 memcpy((char *)sg_virt(sg) + sg->length - authsize,
1003 icvdata, authsize);
9c4a7965
KP
1004 }
1005
1006 kfree(edesc);
1007
1008 aead_request_complete(areq, err);
1009}
1010
fe5720e2 1011static void ipsec_esp_decrypt_swauth_done(struct device *dev,
e938e465
KP
1012 struct talitos_desc *desc,
1013 void *context, int err)
9c4a7965
KP
1014{
1015 struct aead_request *req = context;
9c4a7965 1016 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1017 unsigned int authsize = crypto_aead_authsize(authenc);
19bbbc63 1018 struct talitos_edesc *edesc;
9c4a7965 1019 struct scatterlist *sg;
aeb4c132 1020 char *oicv, *icv;
549bd8bc
LC
1021 struct talitos_private *priv = dev_get_drvdata(dev);
1022 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965 1023
19bbbc63
KP
1024 edesc = container_of(desc, struct talitos_edesc, desc);
1025
9c4a7965
KP
1026 ipsec_esp_unmap(dev, edesc, req);
1027
1028 if (!err) {
1029 /* auth check */
9c4a7965 1030 sg = sg_last(req->dst, edesc->dst_nents ? : 1);
aeb4c132
HX
1031 icv = (char *)sg_virt(sg) + sg->length - authsize;
1032
1033 if (edesc->dma_len) {
549bd8bc
LC
1034 if (is_sec1)
1035 oicv = (char *)&edesc->dma_link_tbl +
1036 req->assoclen + req->cryptlen;
1037 else
1038 oicv = (char *)
1039 &edesc->link_tbl[edesc->src_nents +
aeb4c132
HX
1040 edesc->dst_nents + 2];
1041 if (edesc->icv_ool)
1042 icv = oicv + authsize;
1043 } else
1044 oicv = (char *)&edesc->link_tbl[0];
1045
79960943 1046 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
9c4a7965
KP
1047 }
1048
1049 kfree(edesc);
1050
1051 aead_request_complete(req, err);
1052}
1053
fe5720e2 1054static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
e938e465
KP
1055 struct talitos_desc *desc,
1056 void *context, int err)
fe5720e2
KP
1057{
1058 struct aead_request *req = context;
19bbbc63
KP
1059 struct talitos_edesc *edesc;
1060
1061 edesc = container_of(desc, struct talitos_edesc, desc);
fe5720e2
KP
1062
1063 ipsec_esp_unmap(dev, edesc, req);
1064
1065 /* check ICV auth status */
e938e465
KP
1066 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1067 DESC_HDR_LO_ICCR1_PASS))
1068 err = -EBADMSG;
fe5720e2
KP
1069
1070 kfree(edesc);
1071
1072 aead_request_complete(req, err);
1073}
1074
9c4a7965
KP
1075/*
1076 * convert scatterlist to SEC h/w link table format
1077 * stop at cryptlen bytes
1078 */
aeb4c132
HX
1079static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1080 unsigned int offset, int cryptlen,
1081 struct talitos_ptr *link_tbl_ptr)
9c4a7965 1082{
70bcaca7 1083 int n_sg = sg_count;
aeb4c132 1084 int count = 0;
70bcaca7 1085
aeb4c132
HX
1086 while (cryptlen && sg && n_sg--) {
1087 unsigned int len = sg_dma_len(sg);
9c4a7965 1088
aeb4c132
HX
1089 if (offset >= len) {
1090 offset -= len;
1091 goto next;
1092 }
1093
1094 len -= offset;
1095
1096 if (len > cryptlen)
1097 len = cryptlen;
1098
1099 to_talitos_ptr(link_tbl_ptr + count,
1100 sg_dma_address(sg) + offset, 0);
b096b544
LC
1101 to_talitos_ptr_len(link_tbl_ptr + count, len, 0);
1102 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
aeb4c132
HX
1103 count++;
1104 cryptlen -= len;
1105 offset = 0;
1106
1107next:
1108 sg = sg_next(sg);
70bcaca7 1109 }
9c4a7965
KP
1110
1111 /* tag end of link table */
aeb4c132 1112 if (count > 0)
b096b544
LC
1113 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1114 DESC_PTR_LNKTBL_RETURN, 0);
70bcaca7 1115
aeb4c132
HX
1116 return count;
1117}
1118
6a1e8d14
LC
1119int talitos_sg_map(struct device *dev, struct scatterlist *src,
1120 unsigned int len, struct talitos_edesc *edesc,
1121 struct talitos_ptr *ptr,
1122 int sg_count, unsigned int offset, int tbl_off)
246a87cd 1123{
246a87cd
LC
1124 struct talitos_private *priv = dev_get_drvdata(dev);
1125 bool is_sec1 = has_ftr_sec1(priv);
1126
1127 to_talitos_ptr_len(ptr, len, is_sec1);
6a1e8d14 1128 to_talitos_ptr_ext_set(ptr, 0, is_sec1);
246a87cd 1129
6a1e8d14
LC
1130 if (sg_count == 1) {
1131 to_talitos_ptr(ptr, sg_dma_address(src) + offset, is_sec1);
1132 return sg_count;
246a87cd 1133 }
246a87cd 1134 if (is_sec1) {
6a1e8d14
LC
1135 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, is_sec1);
1136 return sg_count;
246a87cd 1137 }
6a1e8d14
LC
1138 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len,
1139 &edesc->link_tbl[tbl_off]);
1140 if (sg_count == 1) {
1141 /* Only one segment now, so no link tbl needed*/
1142 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1143 return sg_count;
1144 }
1145 to_talitos_ptr(ptr, edesc->dma_link_tbl +
1146 tbl_off * sizeof(struct talitos_ptr), is_sec1);
1147 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1148
1149 return sg_count;
246a87cd
LC
1150}
1151
9c4a7965
KP
1152/*
1153 * fill in and submit ipsec_esp descriptor
1154 */
56af8cd4 1155static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
aeb4c132
HX
1156 void (*callback)(struct device *dev,
1157 struct talitos_desc *desc,
1158 void *context, int error))
9c4a7965
KP
1159{
1160 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
aeb4c132 1161 unsigned int authsize = crypto_aead_authsize(aead);
9c4a7965
KP
1162 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1163 struct device *dev = ctx->dev;
1164 struct talitos_desc *desc = &edesc->desc;
1165 unsigned int cryptlen = areq->cryptlen;
e41256f1 1166 unsigned int ivsize = crypto_aead_ivsize(aead);
aeb4c132 1167 int tbl_off = 0;
fa86a267 1168 int sg_count, ret;
fe5720e2 1169 int sg_link_tbl_len;
549bd8bc
LC
1170 bool sync_needed = false;
1171 struct talitos_private *priv = dev_get_drvdata(dev);
1172 bool is_sec1 = has_ftr_sec1(priv);
9c4a7965
KP
1173
1174 /* hmac key */
1175 map_single_talitos_ptr(dev, &desc->ptr[0], ctx->authkeylen, &ctx->key,
a2b35aa8 1176 DMA_TO_DEVICE);
79fd31d3 1177
549bd8bc
LC
1178 sg_count = edesc->src_nents ?: 1;
1179 if (is_sec1 && sg_count > 1)
1180 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1181 areq->assoclen + cryptlen);
1182 else
1183 sg_count = dma_map_sg(dev, areq->src, sg_count,
1184 (areq->src == areq->dst) ?
1185 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
79fd31d3 1186
549bd8bc
LC
1187 /* hmac data */
1188 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1189 &desc->ptr[1], sg_count, 0, tbl_off);
340ff60a 1190
549bd8bc 1191 if (ret > 1) {
340ff60a 1192 tbl_off += ret;
549bd8bc 1193 sync_needed = true;
79fd31d3
HG
1194 }
1195
9c4a7965 1196 /* cipher iv */
549bd8bc
LC
1197 if (desc->hdr & DESC_HDR_TYPE_IPSEC_ESP) {
1198 to_talitos_ptr(&desc->ptr[2], edesc->iv_dma, is_sec1);
1199 to_talitos_ptr_len(&desc->ptr[2], ivsize, is_sec1);
1200 to_talitos_ptr_ext_set(&desc->ptr[2], 0, is_sec1);
1201 } else {
1202 to_talitos_ptr(&desc->ptr[3], edesc->iv_dma, is_sec1);
1203 to_talitos_ptr_len(&desc->ptr[3], ivsize, is_sec1);
1204 to_talitos_ptr_ext_set(&desc->ptr[3], 0, is_sec1);
1205 }
9c4a7965
KP
1206
1207 /* cipher key */
549bd8bc
LC
1208 if (desc->hdr & DESC_HDR_TYPE_IPSEC_ESP)
1209 map_single_talitos_ptr(dev, &desc->ptr[3], ctx->enckeylen,
1210 (char *)&ctx->key + ctx->authkeylen,
1211 DMA_TO_DEVICE);
1212 else
1213 map_single_talitos_ptr(dev, &desc->ptr[2], ctx->enckeylen,
1214 (char *)&ctx->key + ctx->authkeylen,
1215 DMA_TO_DEVICE);
9c4a7965
KP
1216
1217 /*
1218 * cipher in
1219 * map and adjust cipher len to aead request cryptlen.
1220 * extent is bytes of HMAC postpended to ciphertext,
1221 * typically 12 for ipsec
1222 */
549bd8bc
LC
1223 to_talitos_ptr_len(&desc->ptr[4], cryptlen, is_sec1);
1224 to_talitos_ptr_ext_set(&desc->ptr[4], 0, is_sec1);
9c4a7965 1225
aeb4c132 1226 sg_link_tbl_len = cryptlen;
aeb4c132 1227
549bd8bc
LC
1228 if (desc->hdr & DESC_HDR_TYPE_IPSEC_ESP) {
1229 to_talitos_ptr_ext_set(&desc->ptr[4], authsize, is_sec1);
1230
1231 if (edesc->desc.hdr & DESC_HDR_MODE1_MDEU_CICV)
1232 sg_link_tbl_len += authsize;
340ff60a 1233 }
9c4a7965 1234
549bd8bc
LC
1235 sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc,
1236 &desc->ptr[4], sg_count, areq->assoclen,
1237 tbl_off);
1238
1239 if (sg_count > 1) {
1240 tbl_off += sg_count;
1241 sync_needed = true;
1242 }
9c4a7965 1243
549bd8bc
LC
1244 /* cipher out */
1245 if (areq->src != areq->dst) {
1246 sg_count = edesc->dst_nents ? : 1;
1247 if (!is_sec1 || sg_count == 1)
1248 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1249 }
9c4a7965 1250
549bd8bc
LC
1251 sg_count = talitos_sg_map(dev, areq->dst, cryptlen, edesc,
1252 &desc->ptr[5], sg_count, areq->assoclen,
1253 tbl_off);
aeb4c132 1254
549bd8bc
LC
1255 if (desc->hdr & DESC_HDR_TYPE_IPSEC_ESP)
1256 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
aeb4c132 1257
549bd8bc 1258 if (sg_count > 1) {
aeb4c132 1259 edesc->icv_ool = true;
549bd8bc
LC
1260 sync_needed = true;
1261
1262 if (desc->hdr & DESC_HDR_TYPE_IPSEC_ESP) {
1263 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1264 int offset = (edesc->src_nents + edesc->dst_nents + 2) *
1265 sizeof(struct talitos_ptr) + authsize;
1266
1267 /* Add an entry to the link table for ICV data */
1268 tbl_ptr += sg_count - 1;
1269 to_talitos_ptr_ext_set(tbl_ptr, 0, is_sec1);
1270 tbl_ptr++;
1271 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RETURN,
1272 is_sec1);
1273 to_talitos_ptr_len(tbl_ptr, authsize, is_sec1);
1274
1275 /* icv data follows link tables */
1276 to_talitos_ptr(tbl_ptr, edesc->dma_link_tbl + offset,
1277 is_sec1);
1278 }
340ff60a 1279 } else {
549bd8bc
LC
1280 edesc->icv_ool = false;
1281 }
1282
1283 /* ICV data */
1284 if (!(desc->hdr & DESC_HDR_TYPE_IPSEC_ESP)) {
1285 to_talitos_ptr_len(&desc->ptr[6], authsize, is_sec1);
1286 to_talitos_ptr(&desc->ptr[6], edesc->dma_link_tbl +
1287 areq->assoclen + cryptlen, is_sec1);
340ff60a 1288 }
9c4a7965
KP
1289
1290 /* iv out */
549bd8bc
LC
1291 if (desc->hdr & DESC_HDR_TYPE_IPSEC_ESP)
1292 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1293 DMA_FROM_DEVICE);
1294
1295 if (sync_needed)
1296 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1297 edesc->dma_len,
1298 DMA_BIDIRECTIONAL);
9c4a7965 1299
5228f0f7 1300 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
fa86a267
KP
1301 if (ret != -EINPROGRESS) {
1302 ipsec_esp_unmap(dev, edesc, areq);
1303 kfree(edesc);
1304 }
1305 return ret;
9c4a7965
KP
1306}
1307
9c4a7965 1308/*
56af8cd4 1309 * allocate and map the extended descriptor
9c4a7965 1310 */
4de9d0b5
LN
1311static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1312 struct scatterlist *src,
1313 struct scatterlist *dst,
79fd31d3
HG
1314 u8 *iv,
1315 unsigned int assoclen,
4de9d0b5
LN
1316 unsigned int cryptlen,
1317 unsigned int authsize,
79fd31d3 1318 unsigned int ivsize,
4de9d0b5 1319 int icv_stashing,
62293a37
HG
1320 u32 cryptoflags,
1321 bool encrypt)
9c4a7965 1322{
56af8cd4 1323 struct talitos_edesc *edesc;
6a1e8d14 1324 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
79fd31d3 1325 dma_addr_t iv_dma = 0;
4de9d0b5 1326 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
586725f8 1327 GFP_ATOMIC;
6f65f6ac
LC
1328 struct talitos_private *priv = dev_get_drvdata(dev);
1329 bool is_sec1 = has_ftr_sec1(priv);
1330 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
8e409fe1 1331 void *err;
9c4a7965 1332
6f65f6ac 1333 if (cryptlen + authsize > max_len) {
4de9d0b5 1334 dev_err(dev, "length exceeds h/w max limit\n");
9c4a7965
KP
1335 return ERR_PTR(-EINVAL);
1336 }
1337
935e99a3 1338 if (ivsize)
79fd31d3
HG
1339 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1340
62293a37 1341 if (!dst || dst == src) {
6a1e8d14
LC
1342 src_len = assoclen + cryptlen + authsize;
1343 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1344 if (src_nents < 0) {
1345 dev_err(dev, "Invalid number of src SG.\n");
1346 err = ERR_PTR(-EINVAL);
1347 goto error_sg;
1348 }
62293a37
HG
1349 src_nents = (src_nents == 1) ? 0 : src_nents;
1350 dst_nents = dst ? src_nents : 0;
6a1e8d14 1351 dst_len = 0;
62293a37 1352 } else { /* dst && dst != src*/
6a1e8d14
LC
1353 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1354 src_nents = sg_nents_for_len(src, src_len);
8e409fe1
LC
1355 if (src_nents < 0) {
1356 dev_err(dev, "Invalid number of src SG.\n");
1357 err = ERR_PTR(-EINVAL);
1358 goto error_sg;
1359 }
62293a37 1360 src_nents = (src_nents == 1) ? 0 : src_nents;
6a1e8d14
LC
1361 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1362 dst_nents = sg_nents_for_len(dst, dst_len);
8e409fe1
LC
1363 if (dst_nents < 0) {
1364 dev_err(dev, "Invalid number of dst SG.\n");
1365 err = ERR_PTR(-EINVAL);
1366 goto error_sg;
1367 }
62293a37 1368 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
9c4a7965
KP
1369 }
1370
1371 /*
1372 * allocate space for base edesc plus the link tables,
aeb4c132
HX
1373 * allowing for two separate entries for AD and generated ICV (+ 2),
1374 * and space for two sets of ICVs (stashed and generated)
9c4a7965 1375 */
56af8cd4 1376 alloc_len = sizeof(struct talitos_edesc);
aeb4c132 1377 if (src_nents || dst_nents) {
6f65f6ac 1378 if (is_sec1)
6a1e8d14
LC
1379 dma_len = (src_nents ? src_len : 0) +
1380 (dst_nents ? dst_len : 0);
6f65f6ac 1381 else
aeb4c132
HX
1382 dma_len = (src_nents + dst_nents + 2) *
1383 sizeof(struct talitos_ptr) + authsize * 2;
9c4a7965
KP
1384 alloc_len += dma_len;
1385 } else {
1386 dma_len = 0;
4de9d0b5 1387 alloc_len += icv_stashing ? authsize : 0;
9c4a7965
KP
1388 }
1389
586725f8 1390 edesc = kmalloc(alloc_len, GFP_DMA | flags);
9c4a7965 1391 if (!edesc) {
4de9d0b5 1392 dev_err(dev, "could not allocate edescriptor\n");
8e409fe1
LC
1393 err = ERR_PTR(-ENOMEM);
1394 goto error_sg;
9c4a7965
KP
1395 }
1396
1397 edesc->src_nents = src_nents;
1398 edesc->dst_nents = dst_nents;
79fd31d3 1399 edesc->iv_dma = iv_dma;
9c4a7965 1400 edesc->dma_len = dma_len;
497f2e6b
LN
1401 if (dma_len)
1402 edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0],
1403 edesc->dma_len,
1404 DMA_BIDIRECTIONAL);
9c4a7965
KP
1405
1406 return edesc;
8e409fe1
LC
1407error_sg:
1408 if (iv_dma)
1409 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
1410 return err;
9c4a7965
KP
1411}
1412
79fd31d3 1413static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
62293a37 1414 int icv_stashing, bool encrypt)
4de9d0b5
LN
1415{
1416 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
aeb4c132 1417 unsigned int authsize = crypto_aead_authsize(authenc);
4de9d0b5 1418 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
79fd31d3 1419 unsigned int ivsize = crypto_aead_ivsize(authenc);
4de9d0b5 1420
aeb4c132 1421 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
79fd31d3 1422 iv, areq->assoclen, areq->cryptlen,
aeb4c132 1423 authsize, ivsize, icv_stashing,
62293a37 1424 areq->base.flags, encrypt);
4de9d0b5
LN
1425}
1426
56af8cd4 1427static int aead_encrypt(struct aead_request *req)
9c4a7965
KP
1428{
1429 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1430 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
56af8cd4 1431 struct talitos_edesc *edesc;
9c4a7965
KP
1432
1433 /* allocate extended descriptor */
62293a37 1434 edesc = aead_edesc_alloc(req, req->iv, 0, true);
9c4a7965
KP
1435 if (IS_ERR(edesc))
1436 return PTR_ERR(edesc);
1437
1438 /* set encrypt */
70bcaca7 1439 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
9c4a7965 1440
aeb4c132 1441 return ipsec_esp(edesc, req, ipsec_esp_encrypt_done);
9c4a7965
KP
1442}
1443
56af8cd4 1444static int aead_decrypt(struct aead_request *req)
9c4a7965
KP
1445{
1446 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
aeb4c132 1447 unsigned int authsize = crypto_aead_authsize(authenc);
9c4a7965 1448 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
fe5720e2 1449 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
56af8cd4 1450 struct talitos_edesc *edesc;
9c4a7965
KP
1451 struct scatterlist *sg;
1452 void *icvdata;
1453
1454 req->cryptlen -= authsize;
1455
1456 /* allocate extended descriptor */
62293a37 1457 edesc = aead_edesc_alloc(req, req->iv, 1, false);
9c4a7965
KP
1458 if (IS_ERR(edesc))
1459 return PTR_ERR(edesc);
1460
fe5720e2 1461 if ((priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
e938e465
KP
1462 ((!edesc->src_nents && !edesc->dst_nents) ||
1463 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
9c4a7965 1464
fe5720e2 1465 /* decrypt and check the ICV */
e938e465
KP
1466 edesc->desc.hdr = ctx->desc_hdr_template |
1467 DESC_HDR_DIR_INBOUND |
fe5720e2 1468 DESC_HDR_MODE1_MDEU_CICV;
9c4a7965 1469
fe5720e2
KP
1470 /* reset integrity check result bits */
1471 edesc->desc.hdr_lo = 0;
9c4a7965 1472
aeb4c132 1473 return ipsec_esp(edesc, req, ipsec_esp_decrypt_hwauth_done);
e938e465 1474 }
fe5720e2 1475
e938e465
KP
1476 /* Have to check the ICV with software */
1477 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
fe5720e2 1478
e938e465
KP
1479 /* stash incoming ICV for later cmp with ICV generated by the h/w */
1480 if (edesc->dma_len)
aeb4c132
HX
1481 icvdata = (char *)&edesc->link_tbl[edesc->src_nents +
1482 edesc->dst_nents + 2];
e938e465
KP
1483 else
1484 icvdata = &edesc->link_tbl[0];
fe5720e2 1485
e938e465 1486 sg = sg_last(req->src, edesc->src_nents ? : 1);
fe5720e2 1487
aeb4c132 1488 memcpy(icvdata, (char *)sg_virt(sg) + sg->length - authsize, authsize);
9c4a7965 1489
aeb4c132 1490 return ipsec_esp(edesc, req, ipsec_esp_decrypt_swauth_done);
9c4a7965
KP
1491}
1492
4de9d0b5
LN
1493static int ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1494 const u8 *key, unsigned int keylen)
1495{
1496 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
4de9d0b5 1497
03d2c511
MH
1498 if (keylen > TALITOS_MAX_KEY_SIZE) {
1499 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
1500 return -EINVAL;
1501 }
1502
4de9d0b5
LN
1503 memcpy(&ctx->key, key, keylen);
1504 ctx->keylen = keylen;
1505
1506 return 0;
4de9d0b5
LN
1507}
1508
1509static void common_nonsnoop_unmap(struct device *dev,
1510 struct talitos_edesc *edesc,
1511 struct ablkcipher_request *areq)
1512{
1513 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
032d197e 1514
6a1e8d14 1515 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0);
4de9d0b5
LN
1516 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[2], DMA_TO_DEVICE);
1517 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1518
4de9d0b5
LN
1519 if (edesc->dma_len)
1520 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1521 DMA_BIDIRECTIONAL);
1522}
1523
1524static void ablkcipher_done(struct device *dev,
1525 struct talitos_desc *desc, void *context,
1526 int err)
1527{
1528 struct ablkcipher_request *areq = context;
19bbbc63
KP
1529 struct talitos_edesc *edesc;
1530
1531 edesc = container_of(desc, struct talitos_edesc, desc);
4de9d0b5
LN
1532
1533 common_nonsnoop_unmap(dev, edesc, areq);
1534
1535 kfree(edesc);
1536
1537 areq->base.complete(&areq->base, err);
1538}
1539
1540static int common_nonsnoop(struct talitos_edesc *edesc,
1541 struct ablkcipher_request *areq,
4de9d0b5
LN
1542 void (*callback) (struct device *dev,
1543 struct talitos_desc *desc,
1544 void *context, int error))
1545{
1546 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1547 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1548 struct device *dev = ctx->dev;
1549 struct talitos_desc *desc = &edesc->desc;
1550 unsigned int cryptlen = areq->nbytes;
79fd31d3 1551 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1552 int sg_count, ret;
6a1e8d14 1553 bool sync_needed = false;
922f9dc8
LC
1554 struct talitos_private *priv = dev_get_drvdata(dev);
1555 bool is_sec1 = has_ftr_sec1(priv);
4de9d0b5
LN
1556
1557 /* first DWORD empty */
2529bc37 1558 desc->ptr[0] = zero_entry;
4de9d0b5
LN
1559
1560 /* cipher iv */
922f9dc8
LC
1561 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, is_sec1);
1562 to_talitos_ptr_len(&desc->ptr[1], ivsize, is_sec1);
b096b544 1563 to_talitos_ptr_ext_set(&desc->ptr[1], 0, is_sec1);
4de9d0b5
LN
1564
1565 /* cipher key */
1566 map_single_talitos_ptr(dev, &desc->ptr[2], ctx->keylen,
a2b35aa8 1567 (char *)&ctx->key, DMA_TO_DEVICE);
4de9d0b5 1568
6a1e8d14
LC
1569 sg_count = edesc->src_nents ?: 1;
1570 if (is_sec1 && sg_count > 1)
1571 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1572 cryptlen);
1573 else
1574 sg_count = dma_map_sg(dev, areq->src, sg_count,
1575 (areq->src == areq->dst) ?
1576 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
4de9d0b5
LN
1577 /*
1578 * cipher in
1579 */
6a1e8d14
LC
1580 sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc,
1581 &desc->ptr[3], sg_count, 0, 0);
1582 if (sg_count > 1)
1583 sync_needed = true;
4de9d0b5
LN
1584
1585 /* cipher out */
6a1e8d14
LC
1586 if (areq->src != areq->dst) {
1587 sg_count = edesc->dst_nents ? : 1;
1588 if (!is_sec1 || sg_count == 1)
1589 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1590 }
1591
1592 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1593 sg_count, 0, (edesc->src_nents + 1));
1594 if (ret > 1)
1595 sync_needed = true;
4de9d0b5
LN
1596
1597 /* iv out */
a2b35aa8 1598 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
4de9d0b5
LN
1599 DMA_FROM_DEVICE);
1600
1601 /* last DWORD empty */
2529bc37 1602 desc->ptr[6] = zero_entry;
4de9d0b5 1603
6a1e8d14
LC
1604 if (sync_needed)
1605 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1606 edesc->dma_len, DMA_BIDIRECTIONAL);
1607
5228f0f7 1608 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
4de9d0b5
LN
1609 if (ret != -EINPROGRESS) {
1610 common_nonsnoop_unmap(dev, edesc, areq);
1611 kfree(edesc);
1612 }
1613 return ret;
1614}
1615
e938e465 1616static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request *
62293a37 1617 areq, bool encrypt)
4de9d0b5
LN
1618{
1619 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1620 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
79fd31d3 1621 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
4de9d0b5 1622
aeb4c132 1623 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
79fd31d3 1624 areq->info, 0, areq->nbytes, 0, ivsize, 0,
62293a37 1625 areq->base.flags, encrypt);
4de9d0b5
LN
1626}
1627
1628static int ablkcipher_encrypt(struct ablkcipher_request *areq)
1629{
1630 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1631 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1632 struct talitos_edesc *edesc;
1633
1634 /* allocate extended descriptor */
62293a37 1635 edesc = ablkcipher_edesc_alloc(areq, true);
4de9d0b5
LN
1636 if (IS_ERR(edesc))
1637 return PTR_ERR(edesc);
1638
1639 /* set encrypt */
1640 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1641
febec542 1642 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1643}
1644
1645static int ablkcipher_decrypt(struct ablkcipher_request *areq)
1646{
1647 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1648 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1649 struct talitos_edesc *edesc;
1650
1651 /* allocate extended descriptor */
62293a37 1652 edesc = ablkcipher_edesc_alloc(areq, false);
4de9d0b5
LN
1653 if (IS_ERR(edesc))
1654 return PTR_ERR(edesc);
1655
1656 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1657
febec542 1658 return common_nonsnoop(edesc, areq, ablkcipher_done);
4de9d0b5
LN
1659}
1660
497f2e6b
LN
1661static void common_nonsnoop_hash_unmap(struct device *dev,
1662 struct talitos_edesc *edesc,
1663 struct ahash_request *areq)
1664{
1665 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
922f9dc8
LC
1666 struct talitos_private *priv = dev_get_drvdata(dev);
1667 bool is_sec1 = has_ftr_sec1(priv);
497f2e6b
LN
1668
1669 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1670
6a1e8d14 1671 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
032d197e 1672
497f2e6b 1673 /* When using hashctx-in, must unmap it. */
922f9dc8 1674 if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
497f2e6b
LN
1675 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1676 DMA_TO_DEVICE);
1677
922f9dc8 1678 if (from_talitos_ptr_len(&edesc->desc.ptr[2], is_sec1))
497f2e6b
LN
1679 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[2],
1680 DMA_TO_DEVICE);
1681
497f2e6b
LN
1682 if (edesc->dma_len)
1683 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1684 DMA_BIDIRECTIONAL);
1685
1686}
1687
1688static void ahash_done(struct device *dev,
1689 struct talitos_desc *desc, void *context,
1690 int err)
1691{
1692 struct ahash_request *areq = context;
1693 struct talitos_edesc *edesc =
1694 container_of(desc, struct talitos_edesc, desc);
1695 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1696
1697 if (!req_ctx->last && req_ctx->to_hash_later) {
1698 /* Position any partial block for next update/final/finup */
1699 memcpy(req_ctx->buf, req_ctx->bufnext, req_ctx->to_hash_later);
5e833bc4 1700 req_ctx->nbuf = req_ctx->to_hash_later;
497f2e6b
LN
1701 }
1702 common_nonsnoop_hash_unmap(dev, edesc, areq);
1703
1704 kfree(edesc);
1705
1706 areq->base.complete(&areq->base, err);
1707}
1708
2d02905e
LC
1709/*
1710 * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1711 * ourself and submit a padded block
1712 */
1713void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
1714 struct talitos_edesc *edesc,
1715 struct talitos_ptr *ptr)
1716{
1717 static u8 padded_hash[64] = {
1718 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1719 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1720 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1721 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1722 };
1723
1724 pr_err_once("Bug in SEC1, padding ourself\n");
1725 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1726 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1727 (char *)padded_hash, DMA_TO_DEVICE);
1728}
1729
497f2e6b
LN
1730static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1731 struct ahash_request *areq, unsigned int length,
1732 void (*callback) (struct device *dev,
1733 struct talitos_desc *desc,
1734 void *context, int error))
1735{
1736 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1737 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1738 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1739 struct device *dev = ctx->dev;
1740 struct talitos_desc *desc = &edesc->desc;
032d197e 1741 int ret;
6a1e8d14 1742 bool sync_needed = false;
922f9dc8
LC
1743 struct talitos_private *priv = dev_get_drvdata(dev);
1744 bool is_sec1 = has_ftr_sec1(priv);
6a1e8d14 1745 int sg_count;
497f2e6b
LN
1746
1747 /* first DWORD empty */
1748 desc->ptr[0] = zero_entry;
1749
60f208d7
KP
1750 /* hash context in */
1751 if (!req_ctx->first || req_ctx->swinit) {
497f2e6b
LN
1752 map_single_talitos_ptr(dev, &desc->ptr[1],
1753 req_ctx->hw_context_size,
a2b35aa8 1754 (char *)req_ctx->hw_context,
497f2e6b 1755 DMA_TO_DEVICE);
60f208d7 1756 req_ctx->swinit = 0;
497f2e6b
LN
1757 } else {
1758 desc->ptr[1] = zero_entry;
497f2e6b 1759 }
354e9aac
LC
1760 /* Indicate next op is not the first. */
1761 req_ctx->first = 0;
497f2e6b
LN
1762
1763 /* HMAC key */
1764 if (ctx->keylen)
1765 map_single_talitos_ptr(dev, &desc->ptr[2], ctx->keylen,
a2b35aa8 1766 (char *)&ctx->key, DMA_TO_DEVICE);
497f2e6b
LN
1767 else
1768 desc->ptr[2] = zero_entry;
1769
6a1e8d14
LC
1770 sg_count = edesc->src_nents ?: 1;
1771 if (is_sec1 && sg_count > 1)
cc5965f7 1772 sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length);
6a1e8d14
LC
1773 else
1774 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1775 DMA_TO_DEVICE);
497f2e6b
LN
1776 /*
1777 * data in
1778 */
6a1e8d14
LC
1779 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1780 &desc->ptr[3], sg_count, 0, 0);
1781 if (sg_count > 1)
1782 sync_needed = true;
497f2e6b
LN
1783
1784 /* fifth DWORD empty */
1785 desc->ptr[4] = zero_entry;
1786
1787 /* hash/HMAC out -or- hash context out */
1788 if (req_ctx->last)
1789 map_single_talitos_ptr(dev, &desc->ptr[5],
1790 crypto_ahash_digestsize(tfm),
a2b35aa8 1791 areq->result, DMA_FROM_DEVICE);
497f2e6b
LN
1792 else
1793 map_single_talitos_ptr(dev, &desc->ptr[5],
1794 req_ctx->hw_context_size,
a2b35aa8 1795 req_ctx->hw_context, DMA_FROM_DEVICE);
497f2e6b
LN
1796
1797 /* last DWORD empty */
1798 desc->ptr[6] = zero_entry;
1799
2d02905e
LC
1800 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1801 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1802
6a1e8d14
LC
1803 if (sync_needed)
1804 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1805 edesc->dma_len, DMA_BIDIRECTIONAL);
1806
5228f0f7 1807 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
497f2e6b
LN
1808 if (ret != -EINPROGRESS) {
1809 common_nonsnoop_hash_unmap(dev, edesc, areq);
1810 kfree(edesc);
1811 }
1812 return ret;
1813}
1814
1815static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1816 unsigned int nbytes)
1817{
1818 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1819 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1820 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1821
aeb4c132 1822 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
62293a37 1823 nbytes, 0, 0, 0, areq->base.flags, false);
497f2e6b
LN
1824}
1825
1826static int ahash_init(struct ahash_request *areq)
1827{
1828 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1829 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1830
1831 /* Initialize the context */
5e833bc4 1832 req_ctx->nbuf = 0;
60f208d7
KP
1833 req_ctx->first = 1; /* first indicates h/w must init its context */
1834 req_ctx->swinit = 0; /* assume h/w init of context */
497f2e6b
LN
1835 req_ctx->hw_context_size =
1836 (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
1837 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1838 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
1839
1840 return 0;
1841}
1842
60f208d7
KP
1843/*
1844 * on h/w without explicit sha224 support, we initialize h/w context
1845 * manually with sha224 constants, and tell it to run sha256.
1846 */
1847static int ahash_init_sha224_swinit(struct ahash_request *areq)
1848{
1849 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1850
1851 ahash_init(areq);
1852 req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
1853
a752447a
KP
1854 req_ctx->hw_context[0] = SHA224_H0;
1855 req_ctx->hw_context[1] = SHA224_H1;
1856 req_ctx->hw_context[2] = SHA224_H2;
1857 req_ctx->hw_context[3] = SHA224_H3;
1858 req_ctx->hw_context[4] = SHA224_H4;
1859 req_ctx->hw_context[5] = SHA224_H5;
1860 req_ctx->hw_context[6] = SHA224_H6;
1861 req_ctx->hw_context[7] = SHA224_H7;
60f208d7
KP
1862
1863 /* init 64-bit count */
1864 req_ctx->hw_context[8] = 0;
1865 req_ctx->hw_context[9] = 0;
1866
1867 return 0;
1868}
1869
497f2e6b
LN
1870static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1871{
1872 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1873 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1874 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1875 struct talitos_edesc *edesc;
1876 unsigned int blocksize =
1877 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1878 unsigned int nbytes_to_hash;
1879 unsigned int to_hash_later;
5e833bc4 1880 unsigned int nsg;
8e409fe1 1881 int nents;
497f2e6b 1882
5e833bc4
LN
1883 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1884 /* Buffer up to one whole block */
8e409fe1
LC
1885 nents = sg_nents_for_len(areq->src, nbytes);
1886 if (nents < 0) {
1887 dev_err(ctx->dev, "Invalid number of src SG.\n");
1888 return nents;
1889 }
1890 sg_copy_to_buffer(areq->src, nents,
5e833bc4
LN
1891 req_ctx->buf + req_ctx->nbuf, nbytes);
1892 req_ctx->nbuf += nbytes;
497f2e6b
LN
1893 return 0;
1894 }
1895
5e833bc4
LN
1896 /* At least (blocksize + 1) bytes are available to hash */
1897 nbytes_to_hash = nbytes + req_ctx->nbuf;
1898 to_hash_later = nbytes_to_hash & (blocksize - 1);
1899
1900 if (req_ctx->last)
1901 to_hash_later = 0;
1902 else if (to_hash_later)
1903 /* There is a partial block. Hash the full block(s) now */
1904 nbytes_to_hash -= to_hash_later;
1905 else {
1906 /* Keep one block buffered */
1907 nbytes_to_hash -= blocksize;
1908 to_hash_later = blocksize;
1909 }
1910
1911 /* Chain in any previously buffered data */
1912 if (req_ctx->nbuf) {
1913 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
1914 sg_init_table(req_ctx->bufsl, nsg);
1915 sg_set_buf(req_ctx->bufsl, req_ctx->buf, req_ctx->nbuf);
1916 if (nsg > 1)
c56f6d12 1917 sg_chain(req_ctx->bufsl, 2, areq->src);
497f2e6b 1918 req_ctx->psrc = req_ctx->bufsl;
5e833bc4 1919 } else
497f2e6b 1920 req_ctx->psrc = areq->src;
5e833bc4
LN
1921
1922 if (to_hash_later) {
8e409fe1
LC
1923 nents = sg_nents_for_len(areq->src, nbytes);
1924 if (nents < 0) {
1925 dev_err(ctx->dev, "Invalid number of src SG.\n");
1926 return nents;
1927 }
d0525723 1928 sg_pcopy_to_buffer(areq->src, nents,
5e833bc4
LN
1929 req_ctx->bufnext,
1930 to_hash_later,
1931 nbytes - to_hash_later);
497f2e6b 1932 }
5e833bc4 1933 req_ctx->to_hash_later = to_hash_later;
497f2e6b 1934
5e833bc4 1935 /* Allocate extended descriptor */
497f2e6b
LN
1936 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
1937 if (IS_ERR(edesc))
1938 return PTR_ERR(edesc);
1939
1940 edesc->desc.hdr = ctx->desc_hdr_template;
1941
1942 /* On last one, request SEC to pad; otherwise continue */
1943 if (req_ctx->last)
1944 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
1945 else
1946 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
1947
60f208d7
KP
1948 /* request SEC to INIT hash. */
1949 if (req_ctx->first && !req_ctx->swinit)
497f2e6b
LN
1950 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
1951
1952 /* When the tfm context has a keylen, it's an HMAC.
1953 * A first or last (ie. not middle) descriptor must request HMAC.
1954 */
1955 if (ctx->keylen && (req_ctx->first || req_ctx->last))
1956 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
1957
1958 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash,
1959 ahash_done);
1960}
1961
1962static int ahash_update(struct ahash_request *areq)
1963{
1964 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1965
1966 req_ctx->last = 0;
1967
1968 return ahash_process_req(areq, areq->nbytes);
1969}
1970
1971static int ahash_final(struct ahash_request *areq)
1972{
1973 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1974
1975 req_ctx->last = 1;
1976
1977 return ahash_process_req(areq, 0);
1978}
1979
1980static int ahash_finup(struct ahash_request *areq)
1981{
1982 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1983
1984 req_ctx->last = 1;
1985
1986 return ahash_process_req(areq, areq->nbytes);
1987}
1988
1989static int ahash_digest(struct ahash_request *areq)
1990{
1991 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
60f208d7 1992 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
497f2e6b 1993
60f208d7 1994 ahash->init(areq);
497f2e6b
LN
1995 req_ctx->last = 1;
1996
1997 return ahash_process_req(areq, areq->nbytes);
1998}
1999
3639ca84
HG
2000static int ahash_export(struct ahash_request *areq, void *out)
2001{
2002 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2003 struct talitos_export_state *export = out;
2004
2005 memcpy(export->hw_context, req_ctx->hw_context,
2006 req_ctx->hw_context_size);
2007 memcpy(export->buf, req_ctx->buf, req_ctx->nbuf);
2008 export->swinit = req_ctx->swinit;
2009 export->first = req_ctx->first;
2010 export->last = req_ctx->last;
2011 export->to_hash_later = req_ctx->to_hash_later;
2012 export->nbuf = req_ctx->nbuf;
2013
2014 return 0;
2015}
2016
2017static int ahash_import(struct ahash_request *areq, const void *in)
2018{
2019 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2020 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2021 const struct talitos_export_state *export = in;
2022
2023 memset(req_ctx, 0, sizeof(*req_ctx));
2024 req_ctx->hw_context_size =
2025 (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
2026 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2027 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
2028 memcpy(req_ctx->hw_context, export->hw_context,
2029 req_ctx->hw_context_size);
2030 memcpy(req_ctx->buf, export->buf, export->nbuf);
2031 req_ctx->swinit = export->swinit;
2032 req_ctx->first = export->first;
2033 req_ctx->last = export->last;
2034 req_ctx->to_hash_later = export->to_hash_later;
2035 req_ctx->nbuf = export->nbuf;
2036
2037 return 0;
2038}
2039
79b3a418
LN
2040struct keyhash_result {
2041 struct completion completion;
2042 int err;
2043};
2044
2045static void keyhash_complete(struct crypto_async_request *req, int err)
2046{
2047 struct keyhash_result *res = req->data;
2048
2049 if (err == -EINPROGRESS)
2050 return;
2051
2052 res->err = err;
2053 complete(&res->completion);
2054}
2055
2056static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2057 u8 *hash)
2058{
2059 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2060
2061 struct scatterlist sg[1];
2062 struct ahash_request *req;
2063 struct keyhash_result hresult;
2064 int ret;
2065
2066 init_completion(&hresult.completion);
2067
2068 req = ahash_request_alloc(tfm, GFP_KERNEL);
2069 if (!req)
2070 return -ENOMEM;
2071
2072 /* Keep tfm keylen == 0 during hash of the long key */
2073 ctx->keylen = 0;
2074 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2075 keyhash_complete, &hresult);
2076
2077 sg_init_one(&sg[0], key, keylen);
2078
2079 ahash_request_set_crypt(req, sg, hash, keylen);
2080 ret = crypto_ahash_digest(req);
2081 switch (ret) {
2082 case 0:
2083 break;
2084 case -EINPROGRESS:
2085 case -EBUSY:
2086 ret = wait_for_completion_interruptible(
2087 &hresult.completion);
2088 if (!ret)
2089 ret = hresult.err;
2090 break;
2091 default:
2092 break;
2093 }
2094 ahash_request_free(req);
2095
2096 return ret;
2097}
2098
2099static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2100 unsigned int keylen)
2101{
2102 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2103 unsigned int blocksize =
2104 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2105 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2106 unsigned int keysize = keylen;
2107 u8 hash[SHA512_DIGEST_SIZE];
2108 int ret;
2109
2110 if (keylen <= blocksize)
2111 memcpy(ctx->key, key, keysize);
2112 else {
2113 /* Must get the hash of the long key */
2114 ret = keyhash(tfm, key, keylen, hash);
2115
2116 if (ret) {
2117 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2118 return -EINVAL;
2119 }
2120
2121 keysize = digestsize;
2122 memcpy(ctx->key, hash, digestsize);
2123 }
2124
2125 ctx->keylen = keysize;
2126
2127 return 0;
2128}
2129
2130
9c4a7965 2131struct talitos_alg_template {
d5e4aaef 2132 u32 type;
b0057763 2133 u32 priority;
d5e4aaef
LN
2134 union {
2135 struct crypto_alg crypto;
acbf7c62 2136 struct ahash_alg hash;
aeb4c132 2137 struct aead_alg aead;
d5e4aaef 2138 } alg;
9c4a7965
KP
2139 __be32 desc_hdr_template;
2140};
2141
2142static struct talitos_alg_template driver_algs[] = {
991155ba 2143 /* AEAD algorithms. These use a single-pass ipsec_esp descriptor */
d5e4aaef 2144 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2145 .alg.aead = {
2146 .base = {
2147 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2148 .cra_driver_name = "authenc-hmac-sha1-"
2149 "cbc-aes-talitos",
2150 .cra_blocksize = AES_BLOCK_SIZE,
2151 .cra_flags = CRYPTO_ALG_ASYNC,
2152 },
2153 .ivsize = AES_BLOCK_SIZE,
2154 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2155 },
9c4a7965
KP
2156 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2157 DESC_HDR_SEL0_AESU |
2158 DESC_HDR_MODE0_AESU_CBC |
2159 DESC_HDR_SEL1_MDEUA |
2160 DESC_HDR_MODE1_MDEU_INIT |
2161 DESC_HDR_MODE1_MDEU_PAD |
2162 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
70bcaca7 2163 },
7405c8d7
LC
2164 { .type = CRYPTO_ALG_TYPE_AEAD,
2165 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2166 .alg.aead = {
2167 .base = {
2168 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2169 .cra_driver_name = "authenc-hmac-sha1-"
2170 "cbc-aes-talitos",
2171 .cra_blocksize = AES_BLOCK_SIZE,
2172 .cra_flags = CRYPTO_ALG_ASYNC,
2173 },
2174 .ivsize = AES_BLOCK_SIZE,
2175 .maxauthsize = SHA1_DIGEST_SIZE,
2176 },
2177 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2178 DESC_HDR_SEL0_AESU |
2179 DESC_HDR_MODE0_AESU_CBC |
2180 DESC_HDR_SEL1_MDEUA |
2181 DESC_HDR_MODE1_MDEU_INIT |
2182 DESC_HDR_MODE1_MDEU_PAD |
2183 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2184 },
d5e4aaef 2185 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2186 .alg.aead = {
2187 .base = {
2188 .cra_name = "authenc(hmac(sha1),"
2189 "cbc(des3_ede))",
2190 .cra_driver_name = "authenc-hmac-sha1-"
2191 "cbc-3des-talitos",
2192 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2193 .cra_flags = CRYPTO_ALG_ASYNC,
2194 },
2195 .ivsize = DES3_EDE_BLOCK_SIZE,
2196 .maxauthsize = SHA1_DIGEST_SIZE,
56af8cd4 2197 },
70bcaca7
LN
2198 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2199 DESC_HDR_SEL0_DEU |
2200 DESC_HDR_MODE0_DEU_CBC |
2201 DESC_HDR_MODE0_DEU_3DES |
2202 DESC_HDR_SEL1_MDEUA |
2203 DESC_HDR_MODE1_MDEU_INIT |
2204 DESC_HDR_MODE1_MDEU_PAD |
2205 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
3952f17e 2206 },
7405c8d7
LC
2207 { .type = CRYPTO_ALG_TYPE_AEAD,
2208 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2209 .alg.aead = {
2210 .base = {
2211 .cra_name = "authenc(hmac(sha1),"
2212 "cbc(des3_ede))",
2213 .cra_driver_name = "authenc-hmac-sha1-"
2214 "cbc-3des-talitos",
2215 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2216 .cra_flags = CRYPTO_ALG_ASYNC,
2217 },
2218 .ivsize = DES3_EDE_BLOCK_SIZE,
2219 .maxauthsize = SHA1_DIGEST_SIZE,
2220 },
2221 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2222 DESC_HDR_SEL0_DEU |
2223 DESC_HDR_MODE0_DEU_CBC |
2224 DESC_HDR_MODE0_DEU_3DES |
2225 DESC_HDR_SEL1_MDEUA |
2226 DESC_HDR_MODE1_MDEU_INIT |
2227 DESC_HDR_MODE1_MDEU_PAD |
2228 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2229 },
357fb605 2230 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2231 .alg.aead = {
2232 .base = {
2233 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2234 .cra_driver_name = "authenc-hmac-sha224-"
2235 "cbc-aes-talitos",
2236 .cra_blocksize = AES_BLOCK_SIZE,
2237 .cra_flags = CRYPTO_ALG_ASYNC,
2238 },
2239 .ivsize = AES_BLOCK_SIZE,
2240 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2241 },
2242 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2243 DESC_HDR_SEL0_AESU |
2244 DESC_HDR_MODE0_AESU_CBC |
2245 DESC_HDR_SEL1_MDEUA |
2246 DESC_HDR_MODE1_MDEU_INIT |
2247 DESC_HDR_MODE1_MDEU_PAD |
2248 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2249 },
7405c8d7
LC
2250 { .type = CRYPTO_ALG_TYPE_AEAD,
2251 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2252 .alg.aead = {
2253 .base = {
2254 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2255 .cra_driver_name = "authenc-hmac-sha224-"
2256 "cbc-aes-talitos",
2257 .cra_blocksize = AES_BLOCK_SIZE,
2258 .cra_flags = CRYPTO_ALG_ASYNC,
2259 },
2260 .ivsize = AES_BLOCK_SIZE,
2261 .maxauthsize = SHA224_DIGEST_SIZE,
2262 },
2263 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2264 DESC_HDR_SEL0_AESU |
2265 DESC_HDR_MODE0_AESU_CBC |
2266 DESC_HDR_SEL1_MDEUA |
2267 DESC_HDR_MODE1_MDEU_INIT |
2268 DESC_HDR_MODE1_MDEU_PAD |
2269 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2270 },
357fb605 2271 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2272 .alg.aead = {
2273 .base = {
2274 .cra_name = "authenc(hmac(sha224),"
2275 "cbc(des3_ede))",
2276 .cra_driver_name = "authenc-hmac-sha224-"
2277 "cbc-3des-talitos",
2278 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2279 .cra_flags = CRYPTO_ALG_ASYNC,
2280 },
2281 .ivsize = DES3_EDE_BLOCK_SIZE,
2282 .maxauthsize = SHA224_DIGEST_SIZE,
357fb605
HG
2283 },
2284 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2285 DESC_HDR_SEL0_DEU |
2286 DESC_HDR_MODE0_DEU_CBC |
2287 DESC_HDR_MODE0_DEU_3DES |
2288 DESC_HDR_SEL1_MDEUA |
2289 DESC_HDR_MODE1_MDEU_INIT |
2290 DESC_HDR_MODE1_MDEU_PAD |
2291 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2292 },
7405c8d7
LC
2293 { .type = CRYPTO_ALG_TYPE_AEAD,
2294 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2295 .alg.aead = {
2296 .base = {
2297 .cra_name = "authenc(hmac(sha224),"
2298 "cbc(des3_ede))",
2299 .cra_driver_name = "authenc-hmac-sha224-"
2300 "cbc-3des-talitos",
2301 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2302 .cra_flags = CRYPTO_ALG_ASYNC,
2303 },
2304 .ivsize = DES3_EDE_BLOCK_SIZE,
2305 .maxauthsize = SHA224_DIGEST_SIZE,
2306 },
2307 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2308 DESC_HDR_SEL0_DEU |
2309 DESC_HDR_MODE0_DEU_CBC |
2310 DESC_HDR_MODE0_DEU_3DES |
2311 DESC_HDR_SEL1_MDEUA |
2312 DESC_HDR_MODE1_MDEU_INIT |
2313 DESC_HDR_MODE1_MDEU_PAD |
2314 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2315 },
d5e4aaef 2316 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2317 .alg.aead = {
2318 .base = {
2319 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2320 .cra_driver_name = "authenc-hmac-sha256-"
2321 "cbc-aes-talitos",
2322 .cra_blocksize = AES_BLOCK_SIZE,
2323 .cra_flags = CRYPTO_ALG_ASYNC,
2324 },
2325 .ivsize = AES_BLOCK_SIZE,
2326 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2327 },
3952f17e
LN
2328 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2329 DESC_HDR_SEL0_AESU |
2330 DESC_HDR_MODE0_AESU_CBC |
2331 DESC_HDR_SEL1_MDEUA |
2332 DESC_HDR_MODE1_MDEU_INIT |
2333 DESC_HDR_MODE1_MDEU_PAD |
2334 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2335 },
7405c8d7
LC
2336 { .type = CRYPTO_ALG_TYPE_AEAD,
2337 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2338 .alg.aead = {
2339 .base = {
2340 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2341 .cra_driver_name = "authenc-hmac-sha256-"
2342 "cbc-aes-talitos",
2343 .cra_blocksize = AES_BLOCK_SIZE,
2344 .cra_flags = CRYPTO_ALG_ASYNC,
2345 },
2346 .ivsize = AES_BLOCK_SIZE,
2347 .maxauthsize = SHA256_DIGEST_SIZE,
2348 },
2349 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2350 DESC_HDR_SEL0_AESU |
2351 DESC_HDR_MODE0_AESU_CBC |
2352 DESC_HDR_SEL1_MDEUA |
2353 DESC_HDR_MODE1_MDEU_INIT |
2354 DESC_HDR_MODE1_MDEU_PAD |
2355 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2356 },
d5e4aaef 2357 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2358 .alg.aead = {
2359 .base = {
2360 .cra_name = "authenc(hmac(sha256),"
2361 "cbc(des3_ede))",
2362 .cra_driver_name = "authenc-hmac-sha256-"
2363 "cbc-3des-talitos",
2364 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2365 .cra_flags = CRYPTO_ALG_ASYNC,
2366 },
2367 .ivsize = DES3_EDE_BLOCK_SIZE,
2368 .maxauthsize = SHA256_DIGEST_SIZE,
56af8cd4 2369 },
3952f17e
LN
2370 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2371 DESC_HDR_SEL0_DEU |
2372 DESC_HDR_MODE0_DEU_CBC |
2373 DESC_HDR_MODE0_DEU_3DES |
2374 DESC_HDR_SEL1_MDEUA |
2375 DESC_HDR_MODE1_MDEU_INIT |
2376 DESC_HDR_MODE1_MDEU_PAD |
2377 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2378 },
7405c8d7
LC
2379 { .type = CRYPTO_ALG_TYPE_AEAD,
2380 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2381 .alg.aead = {
2382 .base = {
2383 .cra_name = "authenc(hmac(sha256),"
2384 "cbc(des3_ede))",
2385 .cra_driver_name = "authenc-hmac-sha256-"
2386 "cbc-3des-talitos",
2387 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2388 .cra_flags = CRYPTO_ALG_ASYNC,
2389 },
2390 .ivsize = DES3_EDE_BLOCK_SIZE,
2391 .maxauthsize = SHA256_DIGEST_SIZE,
2392 },
2393 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2394 DESC_HDR_SEL0_DEU |
2395 DESC_HDR_MODE0_DEU_CBC |
2396 DESC_HDR_MODE0_DEU_3DES |
2397 DESC_HDR_SEL1_MDEUA |
2398 DESC_HDR_MODE1_MDEU_INIT |
2399 DESC_HDR_MODE1_MDEU_PAD |
2400 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2401 },
d5e4aaef 2402 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2403 .alg.aead = {
2404 .base = {
2405 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2406 .cra_driver_name = "authenc-hmac-sha384-"
2407 "cbc-aes-talitos",
2408 .cra_blocksize = AES_BLOCK_SIZE,
2409 .cra_flags = CRYPTO_ALG_ASYNC,
2410 },
2411 .ivsize = AES_BLOCK_SIZE,
2412 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2413 },
2414 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2415 DESC_HDR_SEL0_AESU |
2416 DESC_HDR_MODE0_AESU_CBC |
2417 DESC_HDR_SEL1_MDEUB |
2418 DESC_HDR_MODE1_MDEU_INIT |
2419 DESC_HDR_MODE1_MDEU_PAD |
2420 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2421 },
2422 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2423 .alg.aead = {
2424 .base = {
2425 .cra_name = "authenc(hmac(sha384),"
2426 "cbc(des3_ede))",
2427 .cra_driver_name = "authenc-hmac-sha384-"
2428 "cbc-3des-talitos",
2429 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2430 .cra_flags = CRYPTO_ALG_ASYNC,
2431 },
2432 .ivsize = DES3_EDE_BLOCK_SIZE,
2433 .maxauthsize = SHA384_DIGEST_SIZE,
357fb605
HG
2434 },
2435 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2436 DESC_HDR_SEL0_DEU |
2437 DESC_HDR_MODE0_DEU_CBC |
2438 DESC_HDR_MODE0_DEU_3DES |
2439 DESC_HDR_SEL1_MDEUB |
2440 DESC_HDR_MODE1_MDEU_INIT |
2441 DESC_HDR_MODE1_MDEU_PAD |
2442 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2443 },
2444 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2445 .alg.aead = {
2446 .base = {
2447 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2448 .cra_driver_name = "authenc-hmac-sha512-"
2449 "cbc-aes-talitos",
2450 .cra_blocksize = AES_BLOCK_SIZE,
2451 .cra_flags = CRYPTO_ALG_ASYNC,
2452 },
2453 .ivsize = AES_BLOCK_SIZE,
2454 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2455 },
2456 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2457 DESC_HDR_SEL0_AESU |
2458 DESC_HDR_MODE0_AESU_CBC |
2459 DESC_HDR_SEL1_MDEUB |
2460 DESC_HDR_MODE1_MDEU_INIT |
2461 DESC_HDR_MODE1_MDEU_PAD |
2462 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2463 },
2464 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2465 .alg.aead = {
2466 .base = {
2467 .cra_name = "authenc(hmac(sha512),"
2468 "cbc(des3_ede))",
2469 .cra_driver_name = "authenc-hmac-sha512-"
2470 "cbc-3des-talitos",
2471 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2472 .cra_flags = CRYPTO_ALG_ASYNC,
2473 },
2474 .ivsize = DES3_EDE_BLOCK_SIZE,
2475 .maxauthsize = SHA512_DIGEST_SIZE,
357fb605
HG
2476 },
2477 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2478 DESC_HDR_SEL0_DEU |
2479 DESC_HDR_MODE0_DEU_CBC |
2480 DESC_HDR_MODE0_DEU_3DES |
2481 DESC_HDR_SEL1_MDEUB |
2482 DESC_HDR_MODE1_MDEU_INIT |
2483 DESC_HDR_MODE1_MDEU_PAD |
2484 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2485 },
2486 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2487 .alg.aead = {
2488 .base = {
2489 .cra_name = "authenc(hmac(md5),cbc(aes))",
2490 .cra_driver_name = "authenc-hmac-md5-"
2491 "cbc-aes-talitos",
2492 .cra_blocksize = AES_BLOCK_SIZE,
2493 .cra_flags = CRYPTO_ALG_ASYNC,
2494 },
2495 .ivsize = AES_BLOCK_SIZE,
2496 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2497 },
3952f17e
LN
2498 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2499 DESC_HDR_SEL0_AESU |
2500 DESC_HDR_MODE0_AESU_CBC |
2501 DESC_HDR_SEL1_MDEUA |
2502 DESC_HDR_MODE1_MDEU_INIT |
2503 DESC_HDR_MODE1_MDEU_PAD |
2504 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2505 },
7405c8d7
LC
2506 { .type = CRYPTO_ALG_TYPE_AEAD,
2507 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2508 .alg.aead = {
2509 .base = {
2510 .cra_name = "authenc(hmac(md5),cbc(aes))",
2511 .cra_driver_name = "authenc-hmac-md5-"
2512 "cbc-aes-talitos",
2513 .cra_blocksize = AES_BLOCK_SIZE,
2514 .cra_flags = CRYPTO_ALG_ASYNC,
2515 },
2516 .ivsize = AES_BLOCK_SIZE,
2517 .maxauthsize = MD5_DIGEST_SIZE,
2518 },
2519 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2520 DESC_HDR_SEL0_AESU |
2521 DESC_HDR_MODE0_AESU_CBC |
2522 DESC_HDR_SEL1_MDEUA |
2523 DESC_HDR_MODE1_MDEU_INIT |
2524 DESC_HDR_MODE1_MDEU_PAD |
2525 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2526 },
d5e4aaef 2527 { .type = CRYPTO_ALG_TYPE_AEAD,
aeb4c132
HX
2528 .alg.aead = {
2529 .base = {
2530 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2531 .cra_driver_name = "authenc-hmac-md5-"
2532 "cbc-3des-talitos",
2533 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2534 .cra_flags = CRYPTO_ALG_ASYNC,
2535 },
2536 .ivsize = DES3_EDE_BLOCK_SIZE,
2537 .maxauthsize = MD5_DIGEST_SIZE,
56af8cd4 2538 },
3952f17e
LN
2539 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2540 DESC_HDR_SEL0_DEU |
2541 DESC_HDR_MODE0_DEU_CBC |
2542 DESC_HDR_MODE0_DEU_3DES |
2543 DESC_HDR_SEL1_MDEUA |
2544 DESC_HDR_MODE1_MDEU_INIT |
2545 DESC_HDR_MODE1_MDEU_PAD |
2546 DESC_HDR_MODE1_MDEU_MD5_HMAC,
4de9d0b5 2547 },
7405c8d7
LC
2548 { .type = CRYPTO_ALG_TYPE_AEAD,
2549 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2550 .alg.aead = {
2551 .base = {
2552 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2553 .cra_driver_name = "authenc-hmac-md5-"
2554 "cbc-3des-talitos",
2555 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2556 .cra_flags = CRYPTO_ALG_ASYNC,
2557 },
2558 .ivsize = DES3_EDE_BLOCK_SIZE,
2559 .maxauthsize = MD5_DIGEST_SIZE,
2560 },
2561 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2562 DESC_HDR_SEL0_DEU |
2563 DESC_HDR_MODE0_DEU_CBC |
2564 DESC_HDR_MODE0_DEU_3DES |
2565 DESC_HDR_SEL1_MDEUA |
2566 DESC_HDR_MODE1_MDEU_INIT |
2567 DESC_HDR_MODE1_MDEU_PAD |
2568 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2569 },
4de9d0b5 2570 /* ABLKCIPHER algorithms. */
5e75ae1b
LC
2571 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2572 .alg.crypto = {
2573 .cra_name = "ecb(aes)",
2574 .cra_driver_name = "ecb-aes-talitos",
2575 .cra_blocksize = AES_BLOCK_SIZE,
2576 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2577 CRYPTO_ALG_ASYNC,
2578 .cra_ablkcipher = {
2579 .min_keysize = AES_MIN_KEY_SIZE,
2580 .max_keysize = AES_MAX_KEY_SIZE,
2581 .ivsize = AES_BLOCK_SIZE,
2582 }
2583 },
2584 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2585 DESC_HDR_SEL0_AESU,
2586 },
d5e4aaef
LN
2587 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2588 .alg.crypto = {
4de9d0b5
LN
2589 .cra_name = "cbc(aes)",
2590 .cra_driver_name = "cbc-aes-talitos",
2591 .cra_blocksize = AES_BLOCK_SIZE,
2592 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2593 CRYPTO_ALG_ASYNC,
4de9d0b5 2594 .cra_ablkcipher = {
4de9d0b5
LN
2595 .min_keysize = AES_MIN_KEY_SIZE,
2596 .max_keysize = AES_MAX_KEY_SIZE,
2597 .ivsize = AES_BLOCK_SIZE,
2598 }
2599 },
2600 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2601 DESC_HDR_SEL0_AESU |
2602 DESC_HDR_MODE0_AESU_CBC,
2603 },
5e75ae1b
LC
2604 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2605 .alg.crypto = {
2606 .cra_name = "ctr(aes)",
2607 .cra_driver_name = "ctr-aes-talitos",
2608 .cra_blocksize = AES_BLOCK_SIZE,
2609 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2610 CRYPTO_ALG_ASYNC,
2611 .cra_ablkcipher = {
2612 .min_keysize = AES_MIN_KEY_SIZE,
2613 .max_keysize = AES_MAX_KEY_SIZE,
2614 .ivsize = AES_BLOCK_SIZE,
2615 }
2616 },
2617 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2618 DESC_HDR_SEL0_AESU |
2619 DESC_HDR_MODE0_AESU_CTR,
2620 },
2621 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2622 .alg.crypto = {
2623 .cra_name = "ecb(des)",
2624 .cra_driver_name = "ecb-des-talitos",
2625 .cra_blocksize = DES_BLOCK_SIZE,
2626 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2627 CRYPTO_ALG_ASYNC,
2628 .cra_ablkcipher = {
2629 .min_keysize = DES_KEY_SIZE,
2630 .max_keysize = DES_KEY_SIZE,
2631 .ivsize = DES_BLOCK_SIZE,
2632 }
2633 },
2634 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2635 DESC_HDR_SEL0_DEU,
2636 },
2637 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2638 .alg.crypto = {
2639 .cra_name = "cbc(des)",
2640 .cra_driver_name = "cbc-des-talitos",
2641 .cra_blocksize = DES_BLOCK_SIZE,
2642 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2643 CRYPTO_ALG_ASYNC,
2644 .cra_ablkcipher = {
2645 .min_keysize = DES_KEY_SIZE,
2646 .max_keysize = DES_KEY_SIZE,
2647 .ivsize = DES_BLOCK_SIZE,
2648 }
2649 },
2650 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2651 DESC_HDR_SEL0_DEU |
2652 DESC_HDR_MODE0_DEU_CBC,
2653 },
2654 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2655 .alg.crypto = {
2656 .cra_name = "ecb(des3_ede)",
2657 .cra_driver_name = "ecb-3des-talitos",
2658 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2659 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2660 CRYPTO_ALG_ASYNC,
2661 .cra_ablkcipher = {
2662 .min_keysize = DES3_EDE_KEY_SIZE,
2663 .max_keysize = DES3_EDE_KEY_SIZE,
2664 .ivsize = DES3_EDE_BLOCK_SIZE,
2665 }
2666 },
2667 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2668 DESC_HDR_SEL0_DEU |
2669 DESC_HDR_MODE0_DEU_3DES,
2670 },
d5e4aaef
LN
2671 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2672 .alg.crypto = {
4de9d0b5
LN
2673 .cra_name = "cbc(des3_ede)",
2674 .cra_driver_name = "cbc-3des-talitos",
2675 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2676 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2677 CRYPTO_ALG_ASYNC,
4de9d0b5 2678 .cra_ablkcipher = {
4de9d0b5
LN
2679 .min_keysize = DES3_EDE_KEY_SIZE,
2680 .max_keysize = DES3_EDE_KEY_SIZE,
2681 .ivsize = DES3_EDE_BLOCK_SIZE,
2682 }
2683 },
2684 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2685 DESC_HDR_SEL0_DEU |
2686 DESC_HDR_MODE0_DEU_CBC |
2687 DESC_HDR_MODE0_DEU_3DES,
497f2e6b
LN
2688 },
2689 /* AHASH algorithms. */
2690 { .type = CRYPTO_ALG_TYPE_AHASH,
2691 .alg.hash = {
497f2e6b 2692 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2693 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2694 .halg.base = {
2695 .cra_name = "md5",
2696 .cra_driver_name = "md5-talitos",
b3988618 2697 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
497f2e6b
LN
2698 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2699 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2700 }
2701 },
2702 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2703 DESC_HDR_SEL0_MDEUA |
2704 DESC_HDR_MODE0_MDEU_MD5,
2705 },
2706 { .type = CRYPTO_ALG_TYPE_AHASH,
2707 .alg.hash = {
497f2e6b 2708 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2709 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2710 .halg.base = {
2711 .cra_name = "sha1",
2712 .cra_driver_name = "sha1-talitos",
2713 .cra_blocksize = SHA1_BLOCK_SIZE,
2714 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2715 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2716 }
2717 },
2718 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2719 DESC_HDR_SEL0_MDEUA |
2720 DESC_HDR_MODE0_MDEU_SHA1,
2721 },
60f208d7
KP
2722 { .type = CRYPTO_ALG_TYPE_AHASH,
2723 .alg.hash = {
60f208d7 2724 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2725 .halg.statesize = sizeof(struct talitos_export_state),
60f208d7
KP
2726 .halg.base = {
2727 .cra_name = "sha224",
2728 .cra_driver_name = "sha224-talitos",
2729 .cra_blocksize = SHA224_BLOCK_SIZE,
2730 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2731 CRYPTO_ALG_ASYNC,
60f208d7
KP
2732 }
2733 },
2734 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2735 DESC_HDR_SEL0_MDEUA |
2736 DESC_HDR_MODE0_MDEU_SHA224,
2737 },
497f2e6b
LN
2738 { .type = CRYPTO_ALG_TYPE_AHASH,
2739 .alg.hash = {
497f2e6b 2740 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2741 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2742 .halg.base = {
2743 .cra_name = "sha256",
2744 .cra_driver_name = "sha256-talitos",
2745 .cra_blocksize = SHA256_BLOCK_SIZE,
2746 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2747 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2748 }
2749 },
2750 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2751 DESC_HDR_SEL0_MDEUA |
2752 DESC_HDR_MODE0_MDEU_SHA256,
2753 },
2754 { .type = CRYPTO_ALG_TYPE_AHASH,
2755 .alg.hash = {
497f2e6b 2756 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2757 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2758 .halg.base = {
2759 .cra_name = "sha384",
2760 .cra_driver_name = "sha384-talitos",
2761 .cra_blocksize = SHA384_BLOCK_SIZE,
2762 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2763 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2764 }
2765 },
2766 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2767 DESC_HDR_SEL0_MDEUB |
2768 DESC_HDR_MODE0_MDEUB_SHA384,
2769 },
2770 { .type = CRYPTO_ALG_TYPE_AHASH,
2771 .alg.hash = {
497f2e6b 2772 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2773 .halg.statesize = sizeof(struct talitos_export_state),
497f2e6b
LN
2774 .halg.base = {
2775 .cra_name = "sha512",
2776 .cra_driver_name = "sha512-talitos",
2777 .cra_blocksize = SHA512_BLOCK_SIZE,
2778 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2779 CRYPTO_ALG_ASYNC,
497f2e6b
LN
2780 }
2781 },
2782 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2783 DESC_HDR_SEL0_MDEUB |
2784 DESC_HDR_MODE0_MDEUB_SHA512,
2785 },
79b3a418
LN
2786 { .type = CRYPTO_ALG_TYPE_AHASH,
2787 .alg.hash = {
79b3a418 2788 .halg.digestsize = MD5_DIGEST_SIZE,
3639ca84 2789 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2790 .halg.base = {
2791 .cra_name = "hmac(md5)",
2792 .cra_driver_name = "hmac-md5-talitos",
b3988618 2793 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
79b3a418
LN
2794 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2795 CRYPTO_ALG_ASYNC,
79b3a418
LN
2796 }
2797 },
2798 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2799 DESC_HDR_SEL0_MDEUA |
2800 DESC_HDR_MODE0_MDEU_MD5,
2801 },
2802 { .type = CRYPTO_ALG_TYPE_AHASH,
2803 .alg.hash = {
79b3a418 2804 .halg.digestsize = SHA1_DIGEST_SIZE,
3639ca84 2805 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2806 .halg.base = {
2807 .cra_name = "hmac(sha1)",
2808 .cra_driver_name = "hmac-sha1-talitos",
2809 .cra_blocksize = SHA1_BLOCK_SIZE,
2810 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2811 CRYPTO_ALG_ASYNC,
79b3a418
LN
2812 }
2813 },
2814 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2815 DESC_HDR_SEL0_MDEUA |
2816 DESC_HDR_MODE0_MDEU_SHA1,
2817 },
2818 { .type = CRYPTO_ALG_TYPE_AHASH,
2819 .alg.hash = {
79b3a418 2820 .halg.digestsize = SHA224_DIGEST_SIZE,
3639ca84 2821 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2822 .halg.base = {
2823 .cra_name = "hmac(sha224)",
2824 .cra_driver_name = "hmac-sha224-talitos",
2825 .cra_blocksize = SHA224_BLOCK_SIZE,
2826 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2827 CRYPTO_ALG_ASYNC,
79b3a418
LN
2828 }
2829 },
2830 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2831 DESC_HDR_SEL0_MDEUA |
2832 DESC_HDR_MODE0_MDEU_SHA224,
2833 },
2834 { .type = CRYPTO_ALG_TYPE_AHASH,
2835 .alg.hash = {
79b3a418 2836 .halg.digestsize = SHA256_DIGEST_SIZE,
3639ca84 2837 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2838 .halg.base = {
2839 .cra_name = "hmac(sha256)",
2840 .cra_driver_name = "hmac-sha256-talitos",
2841 .cra_blocksize = SHA256_BLOCK_SIZE,
2842 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2843 CRYPTO_ALG_ASYNC,
79b3a418
LN
2844 }
2845 },
2846 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2847 DESC_HDR_SEL0_MDEUA |
2848 DESC_HDR_MODE0_MDEU_SHA256,
2849 },
2850 { .type = CRYPTO_ALG_TYPE_AHASH,
2851 .alg.hash = {
79b3a418 2852 .halg.digestsize = SHA384_DIGEST_SIZE,
3639ca84 2853 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2854 .halg.base = {
2855 .cra_name = "hmac(sha384)",
2856 .cra_driver_name = "hmac-sha384-talitos",
2857 .cra_blocksize = SHA384_BLOCK_SIZE,
2858 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2859 CRYPTO_ALG_ASYNC,
79b3a418
LN
2860 }
2861 },
2862 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2863 DESC_HDR_SEL0_MDEUB |
2864 DESC_HDR_MODE0_MDEUB_SHA384,
2865 },
2866 { .type = CRYPTO_ALG_TYPE_AHASH,
2867 .alg.hash = {
79b3a418 2868 .halg.digestsize = SHA512_DIGEST_SIZE,
3639ca84 2869 .halg.statesize = sizeof(struct talitos_export_state),
79b3a418
LN
2870 .halg.base = {
2871 .cra_name = "hmac(sha512)",
2872 .cra_driver_name = "hmac-sha512-talitos",
2873 .cra_blocksize = SHA512_BLOCK_SIZE,
2874 .cra_flags = CRYPTO_ALG_TYPE_AHASH |
2875 CRYPTO_ALG_ASYNC,
79b3a418
LN
2876 }
2877 },
2878 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2879 DESC_HDR_SEL0_MDEUB |
2880 DESC_HDR_MODE0_MDEUB_SHA512,
2881 }
9c4a7965
KP
2882};
2883
2884struct talitos_crypto_alg {
2885 struct list_head entry;
2886 struct device *dev;
acbf7c62 2887 struct talitos_alg_template algt;
9c4a7965
KP
2888};
2889
89d124cb
JE
2890static int talitos_init_common(struct talitos_ctx *ctx,
2891 struct talitos_crypto_alg *talitos_alg)
9c4a7965 2892{
5228f0f7 2893 struct talitos_private *priv;
9c4a7965
KP
2894
2895 /* update context with ptr to dev */
2896 ctx->dev = talitos_alg->dev;
19bbbc63 2897
5228f0f7
KP
2898 /* assign SEC channel to tfm in round-robin fashion */
2899 priv = dev_get_drvdata(ctx->dev);
2900 ctx->ch = atomic_inc_return(&priv->last_chan) &
2901 (priv->num_channels - 1);
2902
9c4a7965 2903 /* copy descriptor header template value */
acbf7c62 2904 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
9c4a7965 2905
602dba5a
KP
2906 /* select done notification */
2907 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
2908
497f2e6b
LN
2909 return 0;
2910}
2911
89d124cb
JE
2912static int talitos_cra_init(struct crypto_tfm *tfm)
2913{
2914 struct crypto_alg *alg = tfm->__crt_alg;
2915 struct talitos_crypto_alg *talitos_alg;
2916 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
2917
2918 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_AHASH)
2919 talitos_alg = container_of(__crypto_ahash_alg(alg),
2920 struct talitos_crypto_alg,
2921 algt.alg.hash);
2922 else
2923 talitos_alg = container_of(alg, struct talitos_crypto_alg,
2924 algt.alg.crypto);
2925
2926 return talitos_init_common(ctx, talitos_alg);
2927}
2928
aeb4c132 2929static int talitos_cra_init_aead(struct crypto_aead *tfm)
497f2e6b 2930{
89d124cb
JE
2931 struct aead_alg *alg = crypto_aead_alg(tfm);
2932 struct talitos_crypto_alg *talitos_alg;
2933 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
2934
2935 talitos_alg = container_of(alg, struct talitos_crypto_alg,
2936 algt.alg.aead);
2937
2938 return talitos_init_common(ctx, talitos_alg);
9c4a7965
KP
2939}
2940
497f2e6b
LN
2941static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
2942{
2943 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
2944
2945 talitos_cra_init(tfm);
2946
2947 ctx->keylen = 0;
2948 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
2949 sizeof(struct talitos_ahash_req_ctx));
2950
2951 return 0;
2952}
2953
9c4a7965
KP
2954/*
2955 * given the alg's descriptor header template, determine whether descriptor
2956 * type and primary/secondary execution units required match the hw
2957 * capabilities description provided in the device tree node.
2958 */
2959static int hw_supports(struct device *dev, __be32 desc_hdr_template)
2960{
2961 struct talitos_private *priv = dev_get_drvdata(dev);
2962 int ret;
2963
2964 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
2965 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
2966
2967 if (SECONDARY_EU(desc_hdr_template))
2968 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
2969 & priv->exec_units);
2970
2971 return ret;
2972}
2973
2dc11581 2974static int talitos_remove(struct platform_device *ofdev)
9c4a7965
KP
2975{
2976 struct device *dev = &ofdev->dev;
2977 struct talitos_private *priv = dev_get_drvdata(dev);
2978 struct talitos_crypto_alg *t_alg, *n;
2979 int i;
2980
2981 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
acbf7c62
LN
2982 switch (t_alg->algt.type) {
2983 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62 2984 break;
aeb4c132
HX
2985 case CRYPTO_ALG_TYPE_AEAD:
2986 crypto_unregister_aead(&t_alg->algt.alg.aead);
acbf7c62
LN
2987 case CRYPTO_ALG_TYPE_AHASH:
2988 crypto_unregister_ahash(&t_alg->algt.alg.hash);
2989 break;
2990 }
9c4a7965
KP
2991 list_del(&t_alg->entry);
2992 kfree(t_alg);
2993 }
2994
2995 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
2996 talitos_unregister_rng(dev);
2997
35a3bb3d 2998 for (i = 0; priv->chan && i < priv->num_channels; i++)
0b798247 2999 kfree(priv->chan[i].fifo);
9c4a7965 3000
4b992628 3001 kfree(priv->chan);
9c4a7965 3002
c3e337f8 3003 for (i = 0; i < 2; i++)
2cdba3cf 3004 if (priv->irq[i]) {
c3e337f8
KP
3005 free_irq(priv->irq[i], dev);
3006 irq_dispose_mapping(priv->irq[i]);
3007 }
9c4a7965 3008
c3e337f8 3009 tasklet_kill(&priv->done_task[0]);
2cdba3cf 3010 if (priv->irq[1])
c3e337f8 3011 tasklet_kill(&priv->done_task[1]);
9c4a7965
KP
3012
3013 iounmap(priv->reg);
3014
9c4a7965
KP
3015 kfree(priv);
3016
3017 return 0;
3018}
3019
3020static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3021 struct talitos_alg_template
3022 *template)
3023{
60f208d7 3024 struct talitos_private *priv = dev_get_drvdata(dev);
9c4a7965
KP
3025 struct talitos_crypto_alg *t_alg;
3026 struct crypto_alg *alg;
3027
3028 t_alg = kzalloc(sizeof(struct talitos_crypto_alg), GFP_KERNEL);
3029 if (!t_alg)
3030 return ERR_PTR(-ENOMEM);
3031
acbf7c62
LN
3032 t_alg->algt = *template;
3033
3034 switch (t_alg->algt.type) {
3035 case CRYPTO_ALG_TYPE_ABLKCIPHER:
497f2e6b
LN
3036 alg = &t_alg->algt.alg.crypto;
3037 alg->cra_init = talitos_cra_init;
d4cd3283 3038 alg->cra_type = &crypto_ablkcipher_type;
b286e003
KP
3039 alg->cra_ablkcipher.setkey = ablkcipher_setkey;
3040 alg->cra_ablkcipher.encrypt = ablkcipher_encrypt;
3041 alg->cra_ablkcipher.decrypt = ablkcipher_decrypt;
3042 alg->cra_ablkcipher.geniv = "eseqiv";
497f2e6b 3043 break;
acbf7c62 3044 case CRYPTO_ALG_TYPE_AEAD:
aeb4c132 3045 alg = &t_alg->algt.alg.aead.base;
aeb4c132
HX
3046 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
3047 t_alg->algt.alg.aead.setkey = aead_setkey;
3048 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3049 t_alg->algt.alg.aead.decrypt = aead_decrypt;
acbf7c62
LN
3050 break;
3051 case CRYPTO_ALG_TYPE_AHASH:
3052 alg = &t_alg->algt.alg.hash.halg.base;
497f2e6b 3053 alg->cra_init = talitos_cra_init_ahash;
d4cd3283 3054 alg->cra_type = &crypto_ahash_type;
b286e003
KP
3055 t_alg->algt.alg.hash.init = ahash_init;
3056 t_alg->algt.alg.hash.update = ahash_update;
3057 t_alg->algt.alg.hash.final = ahash_final;
3058 t_alg->algt.alg.hash.finup = ahash_finup;
3059 t_alg->algt.alg.hash.digest = ahash_digest;
8b91ba91
LC
3060 if (!strncmp(alg->cra_name, "hmac", 4))
3061 t_alg->algt.alg.hash.setkey = ahash_setkey;
3639ca84
HG
3062 t_alg->algt.alg.hash.import = ahash_import;
3063 t_alg->algt.alg.hash.export = ahash_export;
b286e003 3064
79b3a418 3065 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
0b2730d8
KP
3066 !strncmp(alg->cra_name, "hmac", 4)) {
3067 kfree(t_alg);
79b3a418 3068 return ERR_PTR(-ENOTSUPP);
0b2730d8 3069 }
60f208d7 3070 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
79b3a418
LN
3071 (!strcmp(alg->cra_name, "sha224") ||
3072 !strcmp(alg->cra_name, "hmac(sha224)"))) {
60f208d7
KP
3073 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3074 t_alg->algt.desc_hdr_template =
3075 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3076 DESC_HDR_SEL0_MDEUA |
3077 DESC_HDR_MODE0_MDEU_SHA256;
3078 }
497f2e6b 3079 break;
1d11911a
KP
3080 default:
3081 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
5fa7dadc 3082 kfree(t_alg);
1d11911a 3083 return ERR_PTR(-EINVAL);
acbf7c62 3084 }
9c4a7965 3085
9c4a7965 3086 alg->cra_module = THIS_MODULE;
b0057763
LC
3087 if (t_alg->algt.priority)
3088 alg->cra_priority = t_alg->algt.priority;
3089 else
3090 alg->cra_priority = TALITOS_CRA_PRIORITY;
9c4a7965 3091 alg->cra_alignmask = 0;
9c4a7965 3092 alg->cra_ctxsize = sizeof(struct talitos_ctx);
d912bb76 3093 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
9c4a7965 3094
9c4a7965
KP
3095 t_alg->dev = dev;
3096
3097 return t_alg;
3098}
3099
c3e337f8
KP
3100static int talitos_probe_irq(struct platform_device *ofdev)
3101{
3102 struct device *dev = &ofdev->dev;
3103 struct device_node *np = ofdev->dev.of_node;
3104 struct talitos_private *priv = dev_get_drvdata(dev);
3105 int err;
dd3c0987 3106 bool is_sec1 = has_ftr_sec1(priv);
c3e337f8
KP
3107
3108 priv->irq[0] = irq_of_parse_and_map(np, 0);
2cdba3cf 3109 if (!priv->irq[0]) {
c3e337f8
KP
3110 dev_err(dev, "failed to map irq\n");
3111 return -EINVAL;
3112 }
dd3c0987
LC
3113 if (is_sec1) {
3114 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3115 dev_driver_string(dev), dev);
3116 goto primary_out;
3117 }
c3e337f8
KP
3118
3119 priv->irq[1] = irq_of_parse_and_map(np, 1);
3120
3121 /* get the primary irq line */
2cdba3cf 3122 if (!priv->irq[1]) {
dd3c0987 3123 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
c3e337f8
KP
3124 dev_driver_string(dev), dev);
3125 goto primary_out;
3126 }
3127
dd3c0987 3128 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
c3e337f8
KP
3129 dev_driver_string(dev), dev);
3130 if (err)
3131 goto primary_out;
3132
3133 /* get the secondary irq line */
dd3c0987 3134 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
c3e337f8
KP
3135 dev_driver_string(dev), dev);
3136 if (err) {
3137 dev_err(dev, "failed to request secondary irq\n");
3138 irq_dispose_mapping(priv->irq[1]);
2cdba3cf 3139 priv->irq[1] = 0;
c3e337f8
KP
3140 }
3141
3142 return err;
3143
3144primary_out:
3145 if (err) {
3146 dev_err(dev, "failed to request primary irq\n");
3147 irq_dispose_mapping(priv->irq[0]);
2cdba3cf 3148 priv->irq[0] = 0;
c3e337f8
KP
3149 }
3150
3151 return err;
3152}
3153
1c48a5c9 3154static int talitos_probe(struct platform_device *ofdev)
9c4a7965
KP
3155{
3156 struct device *dev = &ofdev->dev;
61c7a080 3157 struct device_node *np = ofdev->dev.of_node;
9c4a7965
KP
3158 struct talitos_private *priv;
3159 const unsigned int *prop;
3160 int i, err;
5fa7fa14 3161 int stride;
9c4a7965
KP
3162
3163 priv = kzalloc(sizeof(struct talitos_private), GFP_KERNEL);
3164 if (!priv)
3165 return -ENOMEM;
3166
f3de9cb1
KH
3167 INIT_LIST_HEAD(&priv->alg_list);
3168
9c4a7965
KP
3169 dev_set_drvdata(dev, priv);
3170
3171 priv->ofdev = ofdev;
3172
511d63cb
HG
3173 spin_lock_init(&priv->reg_lock);
3174
9c4a7965
KP
3175 priv->reg = of_iomap(np, 0);
3176 if (!priv->reg) {
3177 dev_err(dev, "failed to of_iomap\n");
3178 err = -ENOMEM;
3179 goto err_out;
3180 }
3181
3182 /* get SEC version capabilities from device tree */
3183 prop = of_get_property(np, "fsl,num-channels", NULL);
3184 if (prop)
3185 priv->num_channels = *prop;
3186
3187 prop = of_get_property(np, "fsl,channel-fifo-len", NULL);
3188 if (prop)
3189 priv->chfifo_len = *prop;
3190
3191 prop = of_get_property(np, "fsl,exec-units-mask", NULL);
3192 if (prop)
3193 priv->exec_units = *prop;
3194
3195 prop = of_get_property(np, "fsl,descriptor-types-mask", NULL);
3196 if (prop)
3197 priv->desc_types = *prop;
3198
3199 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3200 !priv->exec_units || !priv->desc_types) {
3201 dev_err(dev, "invalid property data in device tree node\n");
3202 err = -EINVAL;
3203 goto err_out;
3204 }
3205
f3c85bc1
LN
3206 if (of_device_is_compatible(np, "fsl,sec3.0"))
3207 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3208
fe5720e2 3209 if (of_device_is_compatible(np, "fsl,sec2.1"))
60f208d7 3210 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
79b3a418
LN
3211 TALITOS_FTR_SHA224_HWINIT |
3212 TALITOS_FTR_HMAC_OK;
fe5720e2 3213
21590888
LC
3214 if (of_device_is_compatible(np, "fsl,sec1.0"))
3215 priv->features |= TALITOS_FTR_SEC1;
3216
5fa7fa14
LC
3217 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3218 priv->reg_deu = priv->reg + TALITOS12_DEU;
3219 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3220 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3221 stride = TALITOS1_CH_STRIDE;
3222 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3223 priv->reg_deu = priv->reg + TALITOS10_DEU;
3224 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3225 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3226 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3227 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3228 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3229 stride = TALITOS1_CH_STRIDE;
3230 } else {
3231 priv->reg_deu = priv->reg + TALITOS2_DEU;
3232 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3233 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3234 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3235 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3236 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3237 priv->reg_keu = priv->reg + TALITOS2_KEU;
3238 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3239 stride = TALITOS2_CH_STRIDE;
3240 }
3241
dd3c0987
LC
3242 err = talitos_probe_irq(ofdev);
3243 if (err)
3244 goto err_out;
3245
3246 if (of_device_is_compatible(np, "fsl,sec1.0")) {
3247 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3248 (unsigned long)dev);
3249 } else {
3250 if (!priv->irq[1]) {
3251 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3252 (unsigned long)dev);
3253 } else {
3254 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3255 (unsigned long)dev);
3256 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3257 (unsigned long)dev);
3258 }
3259 }
3260
4b992628
KP
3261 priv->chan = kzalloc(sizeof(struct talitos_channel) *
3262 priv->num_channels, GFP_KERNEL);
3263 if (!priv->chan) {
3264 dev_err(dev, "failed to allocate channel management space\n");
9c4a7965
KP
3265 err = -ENOMEM;
3266 goto err_out;
3267 }
3268
f641dddd
MH
3269 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3270
c3e337f8 3271 for (i = 0; i < priv->num_channels; i++) {
5fa7fa14 3272 priv->chan[i].reg = priv->reg + stride * (i + 1);
2cdba3cf 3273 if (!priv->irq[1] || !(i & 1))
c3e337f8 3274 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
ad42d5fc 3275
4b992628
KP
3276 spin_lock_init(&priv->chan[i].head_lock);
3277 spin_lock_init(&priv->chan[i].tail_lock);
9c4a7965 3278
4b992628
KP
3279 priv->chan[i].fifo = kzalloc(sizeof(struct talitos_request) *
3280 priv->fifo_len, GFP_KERNEL);
3281 if (!priv->chan[i].fifo) {
9c4a7965
KP
3282 dev_err(dev, "failed to allocate request fifo %d\n", i);
3283 err = -ENOMEM;
3284 goto err_out;
3285 }
9c4a7965 3286
4b992628
KP
3287 atomic_set(&priv->chan[i].submit_count,
3288 -(priv->chfifo_len - 1));
f641dddd 3289 }
9c4a7965 3290
81eb024c
KP
3291 dma_set_mask(dev, DMA_BIT_MASK(36));
3292
9c4a7965
KP
3293 /* reset and initialize the h/w */
3294 err = init_device(dev);
3295 if (err) {
3296 dev_err(dev, "failed to initialize device\n");
3297 goto err_out;
3298 }
3299
3300 /* register the RNG, if available */
3301 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3302 err = talitos_register_rng(dev);
3303 if (err) {
3304 dev_err(dev, "failed to register hwrng: %d\n", err);
3305 goto err_out;
3306 } else
3307 dev_info(dev, "hwrng\n");
3308 }
3309
3310 /* register crypto algorithms the device supports */
9c4a7965
KP
3311 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3312 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3313 struct talitos_crypto_alg *t_alg;
aeb4c132 3314 struct crypto_alg *alg = NULL;
9c4a7965
KP
3315
3316 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3317 if (IS_ERR(t_alg)) {
3318 err = PTR_ERR(t_alg);
0b2730d8 3319 if (err == -ENOTSUPP)
79b3a418 3320 continue;
9c4a7965
KP
3321 goto err_out;
3322 }
3323
acbf7c62
LN
3324 switch (t_alg->algt.type) {
3325 case CRYPTO_ALG_TYPE_ABLKCIPHER:
acbf7c62
LN
3326 err = crypto_register_alg(
3327 &t_alg->algt.alg.crypto);
aeb4c132 3328 alg = &t_alg->algt.alg.crypto;
acbf7c62 3329 break;
aeb4c132
HX
3330
3331 case CRYPTO_ALG_TYPE_AEAD:
3332 err = crypto_register_aead(
3333 &t_alg->algt.alg.aead);
3334 alg = &t_alg->algt.alg.aead.base;
3335 break;
3336
acbf7c62
LN
3337 case CRYPTO_ALG_TYPE_AHASH:
3338 err = crypto_register_ahash(
3339 &t_alg->algt.alg.hash);
aeb4c132 3340 alg = &t_alg->algt.alg.hash.halg.base;
acbf7c62
LN
3341 break;
3342 }
9c4a7965
KP
3343 if (err) {
3344 dev_err(dev, "%s alg registration failed\n",
aeb4c132 3345 alg->cra_driver_name);
9c4a7965 3346 kfree(t_alg);
991155ba 3347 } else
9c4a7965 3348 list_add_tail(&t_alg->entry, &priv->alg_list);
9c4a7965
KP
3349 }
3350 }
5b859b6e
KP
3351 if (!list_empty(&priv->alg_list))
3352 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3353 (char *)of_get_property(np, "compatible", NULL));
9c4a7965
KP
3354
3355 return 0;
3356
3357err_out:
3358 talitos_remove(ofdev);
9c4a7965
KP
3359
3360 return err;
3361}
3362
6c3f975a 3363static const struct of_device_id talitos_match[] = {
0635b7db
LC
3364#ifdef CONFIG_CRYPTO_DEV_TALITOS1
3365 {
3366 .compatible = "fsl,sec1.0",
3367 },
3368#endif
3369#ifdef CONFIG_CRYPTO_DEV_TALITOS2
9c4a7965
KP
3370 {
3371 .compatible = "fsl,sec2.0",
3372 },
0635b7db 3373#endif
9c4a7965
KP
3374 {},
3375};
3376MODULE_DEVICE_TABLE(of, talitos_match);
3377
1c48a5c9 3378static struct platform_driver talitos_driver = {
4018294b
GL
3379 .driver = {
3380 .name = "talitos",
4018294b
GL
3381 .of_match_table = talitos_match,
3382 },
9c4a7965 3383 .probe = talitos_probe,
596f1034 3384 .remove = talitos_remove,
9c4a7965
KP
3385};
3386
741e8c2d 3387module_platform_driver(talitos_driver);
9c4a7965
KP
3388
3389MODULE_LICENSE("GPL");
3390MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3391MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");