4 * Support for ATMEL AES HW acceleration.
6 * Copyright (c) 2012 Eukréa Electromatique - ATMEL
7 * Author: Nicolas Royer <nicolas@eukrea.com>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License version 2 as published
11 * by the Free Software Foundation.
13 * Some ideas are from omap-aes.c driver.
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/slab.h>
20 #include <linux/err.h>
21 #include <linux/clk.h>
23 #include <linux/hw_random.h>
24 #include <linux/platform_device.h>
26 #include <linux/device.h>
27 #include <linux/init.h>
28 #include <linux/errno.h>
29 #include <linux/interrupt.h>
30 #include <linux/irq.h>
31 #include <linux/scatterlist.h>
32 #include <linux/dma-mapping.h>
33 #include <linux/of_device.h>
34 #include <linux/delay.h>
35 #include <linux/crypto.h>
36 #include <crypto/scatterwalk.h>
37 #include <crypto/algapi.h>
38 #include <crypto/aes.h>
39 #include <crypto/gcm.h>
40 #include <crypto/xts.h>
41 #include <crypto/internal/aead.h>
42 #include <linux/platform_data/crypto-atmel.h>
43 #include <dt-bindings/dma/at91.h>
44 #include "atmel-aes-regs.h"
45 #include "atmel-authenc.h"
47 #define ATMEL_AES_PRIORITY 300
49 #define ATMEL_AES_BUFFER_ORDER 2
50 #define ATMEL_AES_BUFFER_SIZE (PAGE_SIZE << ATMEL_AES_BUFFER_ORDER)
52 #define CFB8_BLOCK_SIZE 1
53 #define CFB16_BLOCK_SIZE 2
54 #define CFB32_BLOCK_SIZE 4
55 #define CFB64_BLOCK_SIZE 8
57 #define SIZE_IN_WORDS(x) ((x) >> 2)
60 /* Reserve bits [18:16] [14:12] [1:0] for mode (same as for AES_MR) */
61 #define AES_FLAGS_ENCRYPT AES_MR_CYPHER_ENC
62 #define AES_FLAGS_GTAGEN AES_MR_GTAGEN
63 #define AES_FLAGS_OPMODE_MASK (AES_MR_OPMOD_MASK | AES_MR_CFBS_MASK)
64 #define AES_FLAGS_ECB AES_MR_OPMOD_ECB
65 #define AES_FLAGS_CBC AES_MR_OPMOD_CBC
66 #define AES_FLAGS_OFB AES_MR_OPMOD_OFB
67 #define AES_FLAGS_CFB128 (AES_MR_OPMOD_CFB | AES_MR_CFBS_128b)
68 #define AES_FLAGS_CFB64 (AES_MR_OPMOD_CFB | AES_MR_CFBS_64b)
69 #define AES_FLAGS_CFB32 (AES_MR_OPMOD_CFB | AES_MR_CFBS_32b)
70 #define AES_FLAGS_CFB16 (AES_MR_OPMOD_CFB | AES_MR_CFBS_16b)
71 #define AES_FLAGS_CFB8 (AES_MR_OPMOD_CFB | AES_MR_CFBS_8b)
72 #define AES_FLAGS_CTR AES_MR_OPMOD_CTR
73 #define AES_FLAGS_GCM AES_MR_OPMOD_GCM
74 #define AES_FLAGS_XTS AES_MR_OPMOD_XTS
76 #define AES_FLAGS_MODE_MASK (AES_FLAGS_OPMODE_MASK | \
80 #define AES_FLAGS_BUSY BIT(3)
81 #define AES_FLAGS_DUMP_REG BIT(4)
82 #define AES_FLAGS_OWN_SHA BIT(5)
84 #define AES_FLAGS_PERSISTENT AES_FLAGS_BUSY
86 #define ATMEL_AES_QUEUE_LENGTH 50
88 #define ATMEL_AES_DMA_THRESHOLD 256
91 struct atmel_aes_caps
{
101 struct atmel_aes_dev
;
104 typedef int (*atmel_aes_fn_t
)(struct atmel_aes_dev
*);
107 struct atmel_aes_base_ctx
{
108 struct atmel_aes_dev
*dd
;
109 atmel_aes_fn_t start
;
111 u32 key
[AES_KEYSIZE_256
/ sizeof(u32
)];
116 struct atmel_aes_ctx
{
117 struct atmel_aes_base_ctx base
;
120 struct atmel_aes_ctr_ctx
{
121 struct atmel_aes_base_ctx base
;
123 u32 iv
[AES_BLOCK_SIZE
/ sizeof(u32
)];
125 struct scatterlist src
[2];
126 struct scatterlist dst
[2];
129 struct atmel_aes_gcm_ctx
{
130 struct atmel_aes_base_ctx base
;
132 struct scatterlist src
[2];
133 struct scatterlist dst
[2];
135 u32 j0
[AES_BLOCK_SIZE
/ sizeof(u32
)];
136 u32 tag
[AES_BLOCK_SIZE
/ sizeof(u32
)];
137 u32 ghash
[AES_BLOCK_SIZE
/ sizeof(u32
)];
142 atmel_aes_fn_t ghash_resume
;
145 struct atmel_aes_xts_ctx
{
146 struct atmel_aes_base_ctx base
;
148 u32 key2
[AES_KEYSIZE_256
/ sizeof(u32
)];
151 #ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
152 struct atmel_aes_authenc_ctx
{
153 struct atmel_aes_base_ctx base
;
154 struct atmel_sha_authenc_ctx
*auth
;
158 struct atmel_aes_reqctx
{
160 u32 lastc
[AES_BLOCK_SIZE
/ sizeof(u32
)];
163 #ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
164 struct atmel_aes_authenc_reqctx
{
165 struct atmel_aes_reqctx base
;
167 struct scatterlist src
[2];
168 struct scatterlist dst
[2];
170 u32 digest
[SHA512_DIGEST_SIZE
/ sizeof(u32
)];
172 /* auth_req MUST be place last. */
173 struct ahash_request auth_req
;
177 struct atmel_aes_dma
{
178 struct dma_chan
*chan
;
179 struct scatterlist
*sg
;
181 unsigned int remainder
;
185 struct atmel_aes_dev
{
186 struct list_head list
;
187 unsigned long phys_base
;
188 void __iomem
*io_base
;
190 struct crypto_async_request
*areq
;
191 struct atmel_aes_base_ctx
*ctx
;
194 atmel_aes_fn_t resume
;
195 atmel_aes_fn_t cpu_transfer_complete
;
204 struct crypto_queue queue
;
206 struct tasklet_struct done_task
;
207 struct tasklet_struct queue_task
;
213 struct atmel_aes_dma src
;
214 struct atmel_aes_dma dst
;
218 struct scatterlist aligned_sg
;
219 struct scatterlist
*real_dst
;
221 struct atmel_aes_caps caps
;
226 struct atmel_aes_drv
{
227 struct list_head dev_list
;
231 static struct atmel_aes_drv atmel_aes
= {
232 .dev_list
= LIST_HEAD_INIT(atmel_aes
.dev_list
),
233 .lock
= __SPIN_LOCK_UNLOCKED(atmel_aes
.lock
),
237 static const char *atmel_aes_reg_name(u32 offset
, char *tmp
, size_t sz
)
266 snprintf(tmp
, sz
, "KEYWR[%u]", (offset
- AES_KEYWR(0)) >> 2);
273 snprintf(tmp
, sz
, "IDATAR[%u]", (offset
- AES_IDATAR(0)) >> 2);
280 snprintf(tmp
, sz
, "ODATAR[%u]", (offset
- AES_ODATAR(0)) >> 2);
287 snprintf(tmp
, sz
, "IVR[%u]", (offset
- AES_IVR(0)) >> 2);
300 snprintf(tmp
, sz
, "GHASHR[%u]", (offset
- AES_GHASHR(0)) >> 2);
307 snprintf(tmp
, sz
, "TAGR[%u]", (offset
- AES_TAGR(0)) >> 2);
317 snprintf(tmp
, sz
, "GCMHR[%u]", (offset
- AES_GCMHR(0)) >> 2);
327 snprintf(tmp
, sz
, "TWR[%u]", (offset
- AES_TWR(0)) >> 2);
334 snprintf(tmp
, sz
, "ALPHAR[%u]", (offset
- AES_ALPHAR(0)) >> 2);
338 snprintf(tmp
, sz
, "0x%02x", offset
);
344 #endif /* VERBOSE_DEBUG */
346 /* Shared functions */
348 static inline u32
atmel_aes_read(struct atmel_aes_dev
*dd
, u32 offset
)
350 u32 value
= readl_relaxed(dd
->io_base
+ offset
);
353 if (dd
->flags
& AES_FLAGS_DUMP_REG
) {
356 dev_vdbg(dd
->dev
, "read 0x%08x from %s\n", value
,
357 atmel_aes_reg_name(offset
, tmp
, sizeof(tmp
)));
359 #endif /* VERBOSE_DEBUG */
364 static inline void atmel_aes_write(struct atmel_aes_dev
*dd
,
365 u32 offset
, u32 value
)
368 if (dd
->flags
& AES_FLAGS_DUMP_REG
) {
371 dev_vdbg(dd
->dev
, "write 0x%08x into %s\n", value
,
372 atmel_aes_reg_name(offset
, tmp
, sizeof(tmp
)));
374 #endif /* VERBOSE_DEBUG */
376 writel_relaxed(value
, dd
->io_base
+ offset
);
379 static void atmel_aes_read_n(struct atmel_aes_dev
*dd
, u32 offset
,
380 u32
*value
, int count
)
382 for (; count
--; value
++, offset
+= 4)
383 *value
= atmel_aes_read(dd
, offset
);
386 static void atmel_aes_write_n(struct atmel_aes_dev
*dd
, u32 offset
,
387 const u32
*value
, int count
)
389 for (; count
--; value
++, offset
+= 4)
390 atmel_aes_write(dd
, offset
, *value
);
393 static inline void atmel_aes_read_block(struct atmel_aes_dev
*dd
, u32 offset
,
396 atmel_aes_read_n(dd
, offset
, value
, SIZE_IN_WORDS(AES_BLOCK_SIZE
));
399 static inline void atmel_aes_write_block(struct atmel_aes_dev
*dd
, u32 offset
,
402 atmel_aes_write_n(dd
, offset
, value
, SIZE_IN_WORDS(AES_BLOCK_SIZE
));
405 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev
*dd
,
406 atmel_aes_fn_t resume
)
408 u32 isr
= atmel_aes_read(dd
, AES_ISR
);
410 if (unlikely(isr
& AES_INT_DATARDY
))
414 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
418 static inline size_t atmel_aes_padlen(size_t len
, size_t block_size
)
420 len
&= block_size
- 1;
421 return len
? block_size
- len
: 0;
424 static struct atmel_aes_dev
*atmel_aes_find_dev(struct atmel_aes_base_ctx
*ctx
)
426 struct atmel_aes_dev
*aes_dd
= NULL
;
427 struct atmel_aes_dev
*tmp
;
429 spin_lock_bh(&atmel_aes
.lock
);
431 list_for_each_entry(tmp
, &atmel_aes
.dev_list
, list
) {
440 spin_unlock_bh(&atmel_aes
.lock
);
445 static int atmel_aes_hw_init(struct atmel_aes_dev
*dd
)
449 err
= clk_enable(dd
->iclk
);
453 atmel_aes_write(dd
, AES_CR
, AES_CR_SWRST
);
454 atmel_aes_write(dd
, AES_MR
, 0xE << AES_MR_CKEY_OFFSET
);
459 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev
*dd
)
461 return atmel_aes_read(dd
, AES_HW_VERSION
) & 0x00000fff;
464 static int atmel_aes_hw_version_init(struct atmel_aes_dev
*dd
)
468 err
= atmel_aes_hw_init(dd
);
472 dd
->hw_version
= atmel_aes_get_version(dd
);
474 dev_info(dd
->dev
, "version: 0x%x\n", dd
->hw_version
);
476 clk_disable(dd
->iclk
);
480 static inline void atmel_aes_set_mode(struct atmel_aes_dev
*dd
,
481 const struct atmel_aes_reqctx
*rctx
)
483 /* Clear all but persistent flags and set request flags. */
484 dd
->flags
= (dd
->flags
& AES_FLAGS_PERSISTENT
) | rctx
->mode
;
487 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev
*dd
)
489 return (dd
->flags
& AES_FLAGS_ENCRYPT
);
492 #ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
493 static void atmel_aes_authenc_complete(struct atmel_aes_dev
*dd
, int err
);
496 static void atmel_aes_set_iv_as_last_ciphertext_block(struct atmel_aes_dev
*dd
)
498 struct ablkcipher_request
*req
= ablkcipher_request_cast(dd
->areq
);
499 struct atmel_aes_reqctx
*rctx
= ablkcipher_request_ctx(req
);
500 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
501 unsigned int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
503 if (req
->nbytes
< ivsize
)
506 if (rctx
->mode
& AES_FLAGS_ENCRYPT
) {
507 scatterwalk_map_and_copy(req
->info
, req
->dst
,
508 req
->nbytes
- ivsize
, ivsize
, 0);
510 if (req
->src
== req
->dst
)
511 memcpy(req
->info
, rctx
->lastc
, ivsize
);
513 scatterwalk_map_and_copy(req
->info
, req
->src
,
514 req
->nbytes
- ivsize
,
519 static inline int atmel_aes_complete(struct atmel_aes_dev
*dd
, int err
)
521 #ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
522 if (dd
->ctx
->is_aead
)
523 atmel_aes_authenc_complete(dd
, err
);
526 clk_disable(dd
->iclk
);
527 dd
->flags
&= ~AES_FLAGS_BUSY
;
529 if (!dd
->ctx
->is_aead
)
530 atmel_aes_set_iv_as_last_ciphertext_block(dd
);
533 dd
->areq
->complete(dd
->areq
, err
);
535 tasklet_schedule(&dd
->queue_task
);
540 static void atmel_aes_write_ctrl_key(struct atmel_aes_dev
*dd
, bool use_dma
,
541 const u32
*iv
, const u32
*key
, int keylen
)
545 /* MR register must be set before IV registers */
546 if (keylen
== AES_KEYSIZE_128
)
547 valmr
|= AES_MR_KEYSIZE_128
;
548 else if (keylen
== AES_KEYSIZE_192
)
549 valmr
|= AES_MR_KEYSIZE_192
;
551 valmr
|= AES_MR_KEYSIZE_256
;
553 valmr
|= dd
->flags
& AES_FLAGS_MODE_MASK
;
556 valmr
|= AES_MR_SMOD_IDATAR0
;
557 if (dd
->caps
.has_dualbuff
)
558 valmr
|= AES_MR_DUALBUFF
;
560 valmr
|= AES_MR_SMOD_AUTO
;
563 atmel_aes_write(dd
, AES_MR
, valmr
);
565 atmel_aes_write_n(dd
, AES_KEYWR(0), key
, SIZE_IN_WORDS(keylen
));
567 if (iv
&& (valmr
& AES_MR_OPMOD_MASK
) != AES_MR_OPMOD_ECB
)
568 atmel_aes_write_block(dd
, AES_IVR(0), iv
);
571 static inline void atmel_aes_write_ctrl(struct atmel_aes_dev
*dd
, bool use_dma
,
575 atmel_aes_write_ctrl_key(dd
, use_dma
, iv
,
576 dd
->ctx
->key
, dd
->ctx
->keylen
);
581 static int atmel_aes_cpu_transfer(struct atmel_aes_dev
*dd
)
587 atmel_aes_read_block(dd
, AES_ODATAR(0), dd
->data
);
589 dd
->datalen
-= AES_BLOCK_SIZE
;
591 if (dd
->datalen
< AES_BLOCK_SIZE
)
594 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
596 isr
= atmel_aes_read(dd
, AES_ISR
);
597 if (!(isr
& AES_INT_DATARDY
)) {
598 dd
->resume
= atmel_aes_cpu_transfer
;
599 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
604 if (!sg_copy_from_buffer(dd
->real_dst
, sg_nents(dd
->real_dst
),
609 return atmel_aes_complete(dd
, err
);
611 return dd
->cpu_transfer_complete(dd
);
614 static int atmel_aes_cpu_start(struct atmel_aes_dev
*dd
,
615 struct scatterlist
*src
,
616 struct scatterlist
*dst
,
618 atmel_aes_fn_t resume
)
620 size_t padlen
= atmel_aes_padlen(len
, AES_BLOCK_SIZE
);
622 if (unlikely(len
== 0))
625 sg_copy_to_buffer(src
, sg_nents(src
), dd
->buf
, len
);
629 dd
->cpu_transfer_complete
= resume
;
630 dd
->datalen
= len
+ padlen
;
631 dd
->data
= (u32
*)dd
->buf
;
632 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
633 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_cpu_transfer
);
639 static void atmel_aes_dma_callback(void *data
);
641 static bool atmel_aes_check_aligned(struct atmel_aes_dev
*dd
,
642 struct scatterlist
*sg
,
644 struct atmel_aes_dma
*dma
)
648 if (!IS_ALIGNED(len
, dd
->ctx
->block_size
))
651 for (nents
= 0; sg
; sg
= sg_next(sg
), ++nents
) {
652 if (!IS_ALIGNED(sg
->offset
, sizeof(u32
)))
655 if (len
<= sg
->length
) {
656 if (!IS_ALIGNED(len
, dd
->ctx
->block_size
))
659 dma
->nents
= nents
+1;
660 dma
->remainder
= sg
->length
- len
;
665 if (!IS_ALIGNED(sg
->length
, dd
->ctx
->block_size
))
674 static inline void atmel_aes_restore_sg(const struct atmel_aes_dma
*dma
)
676 struct scatterlist
*sg
= dma
->sg
;
677 int nents
= dma
->nents
;
682 while (--nents
> 0 && sg
)
688 sg
->length
+= dma
->remainder
;
691 static int atmel_aes_map(struct atmel_aes_dev
*dd
,
692 struct scatterlist
*src
,
693 struct scatterlist
*dst
,
696 bool src_aligned
, dst_aligned
;
704 src_aligned
= atmel_aes_check_aligned(dd
, src
, len
, &dd
->src
);
706 dst_aligned
= src_aligned
;
708 dst_aligned
= atmel_aes_check_aligned(dd
, dst
, len
, &dd
->dst
);
709 if (!src_aligned
|| !dst_aligned
) {
710 padlen
= atmel_aes_padlen(len
, dd
->ctx
->block_size
);
712 if (dd
->buflen
< len
+ padlen
)
716 sg_copy_to_buffer(src
, sg_nents(src
), dd
->buf
, len
);
717 dd
->src
.sg
= &dd
->aligned_sg
;
719 dd
->src
.remainder
= 0;
723 dd
->dst
.sg
= &dd
->aligned_sg
;
725 dd
->dst
.remainder
= 0;
728 sg_init_table(&dd
->aligned_sg
, 1);
729 sg_set_buf(&dd
->aligned_sg
, dd
->buf
, len
+ padlen
);
732 if (dd
->src
.sg
== dd
->dst
.sg
) {
733 dd
->src
.sg_len
= dma_map_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
735 dd
->dst
.sg_len
= dd
->src
.sg_len
;
739 dd
->src
.sg_len
= dma_map_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
744 dd
->dst
.sg_len
= dma_map_sg(dd
->dev
, dd
->dst
.sg
, dd
->dst
.nents
,
746 if (!dd
->dst
.sg_len
) {
747 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
756 static void atmel_aes_unmap(struct atmel_aes_dev
*dd
)
758 if (dd
->src
.sg
== dd
->dst
.sg
) {
759 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
762 if (dd
->src
.sg
!= &dd
->aligned_sg
)
763 atmel_aes_restore_sg(&dd
->src
);
765 dma_unmap_sg(dd
->dev
, dd
->dst
.sg
, dd
->dst
.nents
,
768 if (dd
->dst
.sg
!= &dd
->aligned_sg
)
769 atmel_aes_restore_sg(&dd
->dst
);
771 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
774 if (dd
->src
.sg
!= &dd
->aligned_sg
)
775 atmel_aes_restore_sg(&dd
->src
);
778 if (dd
->dst
.sg
== &dd
->aligned_sg
)
779 sg_copy_from_buffer(dd
->real_dst
, sg_nents(dd
->real_dst
),
783 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev
*dd
,
784 enum dma_slave_buswidth addr_width
,
785 enum dma_transfer_direction dir
,
788 struct dma_async_tx_descriptor
*desc
;
789 struct dma_slave_config config
;
790 dma_async_tx_callback callback
;
791 struct atmel_aes_dma
*dma
;
794 memset(&config
, 0, sizeof(config
));
795 config
.direction
= dir
;
796 config
.src_addr_width
= addr_width
;
797 config
.dst_addr_width
= addr_width
;
798 config
.src_maxburst
= maxburst
;
799 config
.dst_maxburst
= maxburst
;
805 config
.dst_addr
= dd
->phys_base
+ AES_IDATAR(0);
810 callback
= atmel_aes_dma_callback
;
811 config
.src_addr
= dd
->phys_base
+ AES_ODATAR(0);
818 err
= dmaengine_slave_config(dma
->chan
, &config
);
822 desc
= dmaengine_prep_slave_sg(dma
->chan
, dma
->sg
, dma
->sg_len
, dir
,
823 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
827 desc
->callback
= callback
;
828 desc
->callback_param
= dd
;
829 dmaengine_submit(desc
);
830 dma_async_issue_pending(dma
->chan
);
835 static void atmel_aes_dma_transfer_stop(struct atmel_aes_dev
*dd
,
836 enum dma_transfer_direction dir
)
838 struct atmel_aes_dma
*dma
;
853 dmaengine_terminate_all(dma
->chan
);
856 static int atmel_aes_dma_start(struct atmel_aes_dev
*dd
,
857 struct scatterlist
*src
,
858 struct scatterlist
*dst
,
860 atmel_aes_fn_t resume
)
862 enum dma_slave_buswidth addr_width
;
866 switch (dd
->ctx
->block_size
) {
867 case CFB8_BLOCK_SIZE
:
868 addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
872 case CFB16_BLOCK_SIZE
:
873 addr_width
= DMA_SLAVE_BUSWIDTH_2_BYTES
;
877 case CFB32_BLOCK_SIZE
:
878 case CFB64_BLOCK_SIZE
:
879 addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
884 addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
885 maxburst
= dd
->caps
.max_burst_size
;
893 err
= atmel_aes_map(dd
, src
, dst
, len
);
899 /* Set output DMA transfer first */
900 err
= atmel_aes_dma_transfer_start(dd
, addr_width
, DMA_DEV_TO_MEM
,
905 /* Then set input DMA transfer */
906 err
= atmel_aes_dma_transfer_start(dd
, addr_width
, DMA_MEM_TO_DEV
,
909 goto output_transfer_stop
;
913 output_transfer_stop
:
914 atmel_aes_dma_transfer_stop(dd
, DMA_DEV_TO_MEM
);
918 return atmel_aes_complete(dd
, err
);
921 static void atmel_aes_dma_stop(struct atmel_aes_dev
*dd
)
923 atmel_aes_dma_transfer_stop(dd
, DMA_MEM_TO_DEV
);
924 atmel_aes_dma_transfer_stop(dd
, DMA_DEV_TO_MEM
);
928 static void atmel_aes_dma_callback(void *data
)
930 struct atmel_aes_dev
*dd
= data
;
932 atmel_aes_dma_stop(dd
);
934 (void)dd
->resume(dd
);
937 static int atmel_aes_handle_queue(struct atmel_aes_dev
*dd
,
938 struct crypto_async_request
*new_areq
)
940 struct crypto_async_request
*areq
, *backlog
;
941 struct atmel_aes_base_ctx
*ctx
;
946 spin_lock_irqsave(&dd
->lock
, flags
);
948 ret
= crypto_enqueue_request(&dd
->queue
, new_areq
);
949 if (dd
->flags
& AES_FLAGS_BUSY
) {
950 spin_unlock_irqrestore(&dd
->lock
, flags
);
953 backlog
= crypto_get_backlog(&dd
->queue
);
954 areq
= crypto_dequeue_request(&dd
->queue
);
956 dd
->flags
|= AES_FLAGS_BUSY
;
957 spin_unlock_irqrestore(&dd
->lock
, flags
);
963 backlog
->complete(backlog
, -EINPROGRESS
);
965 ctx
= crypto_tfm_ctx(areq
->tfm
);
969 start_async
= (areq
!= new_areq
);
970 dd
->is_async
= start_async
;
972 /* WARNING: ctx->start() MAY change dd->is_async. */
973 err
= ctx
->start(dd
);
974 return (start_async
) ? ret
: err
;
978 /* AES async block ciphers */
980 static int atmel_aes_transfer_complete(struct atmel_aes_dev
*dd
)
982 return atmel_aes_complete(dd
, 0);
985 static int atmel_aes_start(struct atmel_aes_dev
*dd
)
987 struct ablkcipher_request
*req
= ablkcipher_request_cast(dd
->areq
);
988 struct atmel_aes_reqctx
*rctx
= ablkcipher_request_ctx(req
);
989 bool use_dma
= (req
->nbytes
>= ATMEL_AES_DMA_THRESHOLD
||
990 dd
->ctx
->block_size
!= AES_BLOCK_SIZE
);
993 atmel_aes_set_mode(dd
, rctx
);
995 err
= atmel_aes_hw_init(dd
);
997 return atmel_aes_complete(dd
, err
);
999 atmel_aes_write_ctrl(dd
, use_dma
, req
->info
);
1001 return atmel_aes_dma_start(dd
, req
->src
, req
->dst
, req
->nbytes
,
1002 atmel_aes_transfer_complete
);
1004 return atmel_aes_cpu_start(dd
, req
->src
, req
->dst
, req
->nbytes
,
1005 atmel_aes_transfer_complete
);
1008 static inline struct atmel_aes_ctr_ctx
*
1009 atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1011 return container_of(ctx
, struct atmel_aes_ctr_ctx
, base
);
1014 static int atmel_aes_ctr_transfer(struct atmel_aes_dev
*dd
)
1016 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
1017 struct ablkcipher_request
*req
= ablkcipher_request_cast(dd
->areq
);
1018 struct scatterlist
*src
, *dst
;
1021 bool use_dma
, fragmented
= false;
1023 /* Check for transfer completion. */
1024 ctx
->offset
+= dd
->total
;
1025 if (ctx
->offset
>= req
->nbytes
)
1026 return atmel_aes_transfer_complete(dd
);
1028 /* Compute data length. */
1029 datalen
= req
->nbytes
- ctx
->offset
;
1030 blocks
= DIV_ROUND_UP(datalen
, AES_BLOCK_SIZE
);
1031 ctr
= be32_to_cpu(ctx
->iv
[3]);
1032 if (dd
->caps
.has_ctr32
) {
1033 /* Check 32bit counter overflow. */
1035 u32 end
= start
+ blocks
- 1;
1039 datalen
= AES_BLOCK_SIZE
* -start
;
1043 /* Check 16bit counter overflow. */
1044 u16 start
= ctr
& 0xffff;
1045 u16 end
= start
+ (u16
)blocks
- 1;
1047 if (blocks
>> 16 || end
< start
) {
1049 datalen
= AES_BLOCK_SIZE
* (0x10000-start
);
1053 use_dma
= (datalen
>= ATMEL_AES_DMA_THRESHOLD
);
1055 /* Jump to offset. */
1056 src
= scatterwalk_ffwd(ctx
->src
, req
->src
, ctx
->offset
);
1057 dst
= ((req
->src
== req
->dst
) ? src
:
1058 scatterwalk_ffwd(ctx
->dst
, req
->dst
, ctx
->offset
));
1060 /* Configure hardware. */
1061 atmel_aes_write_ctrl(dd
, use_dma
, ctx
->iv
);
1062 if (unlikely(fragmented
)) {
1064 * Increment the counter manually to cope with the hardware
1067 ctx
->iv
[3] = cpu_to_be32(ctr
);
1068 crypto_inc((u8
*)ctx
->iv
, AES_BLOCK_SIZE
);
1072 return atmel_aes_dma_start(dd
, src
, dst
, datalen
,
1073 atmel_aes_ctr_transfer
);
1075 return atmel_aes_cpu_start(dd
, src
, dst
, datalen
,
1076 atmel_aes_ctr_transfer
);
1079 static int atmel_aes_ctr_start(struct atmel_aes_dev
*dd
)
1081 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
1082 struct ablkcipher_request
*req
= ablkcipher_request_cast(dd
->areq
);
1083 struct atmel_aes_reqctx
*rctx
= ablkcipher_request_ctx(req
);
1086 atmel_aes_set_mode(dd
, rctx
);
1088 err
= atmel_aes_hw_init(dd
);
1090 return atmel_aes_complete(dd
, err
);
1092 memcpy(ctx
->iv
, req
->info
, AES_BLOCK_SIZE
);
1095 return atmel_aes_ctr_transfer(dd
);
1098 static int atmel_aes_crypt(struct ablkcipher_request
*req
, unsigned long mode
)
1100 struct crypto_ablkcipher
*ablkcipher
= crypto_ablkcipher_reqtfm(req
);
1101 struct atmel_aes_base_ctx
*ctx
= crypto_ablkcipher_ctx(ablkcipher
);
1102 struct atmel_aes_reqctx
*rctx
;
1103 struct atmel_aes_dev
*dd
;
1105 switch (mode
& AES_FLAGS_OPMODE_MASK
) {
1106 case AES_FLAGS_CFB8
:
1107 ctx
->block_size
= CFB8_BLOCK_SIZE
;
1110 case AES_FLAGS_CFB16
:
1111 ctx
->block_size
= CFB16_BLOCK_SIZE
;
1114 case AES_FLAGS_CFB32
:
1115 ctx
->block_size
= CFB32_BLOCK_SIZE
;
1118 case AES_FLAGS_CFB64
:
1119 ctx
->block_size
= CFB64_BLOCK_SIZE
;
1123 ctx
->block_size
= AES_BLOCK_SIZE
;
1126 ctx
->is_aead
= false;
1128 dd
= atmel_aes_find_dev(ctx
);
1132 rctx
= ablkcipher_request_ctx(req
);
1135 if (!(mode
& AES_FLAGS_ENCRYPT
) && (req
->src
== req
->dst
)) {
1136 unsigned int ivsize
= crypto_ablkcipher_ivsize(ablkcipher
);
1138 if (req
->nbytes
>= ivsize
)
1139 scatterwalk_map_and_copy(rctx
->lastc
, req
->src
,
1140 req
->nbytes
- ivsize
,
1144 return atmel_aes_handle_queue(dd
, &req
->base
);
1147 static int atmel_aes_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
1148 unsigned int keylen
)
1150 struct atmel_aes_base_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
1152 if (keylen
!= AES_KEYSIZE_128
&&
1153 keylen
!= AES_KEYSIZE_192
&&
1154 keylen
!= AES_KEYSIZE_256
) {
1155 crypto_ablkcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
1159 memcpy(ctx
->key
, key
, keylen
);
1160 ctx
->keylen
= keylen
;
1165 static int atmel_aes_ecb_encrypt(struct ablkcipher_request
*req
)
1167 return atmel_aes_crypt(req
, AES_FLAGS_ECB
| AES_FLAGS_ENCRYPT
);
1170 static int atmel_aes_ecb_decrypt(struct ablkcipher_request
*req
)
1172 return atmel_aes_crypt(req
, AES_FLAGS_ECB
);
1175 static int atmel_aes_cbc_encrypt(struct ablkcipher_request
*req
)
1177 return atmel_aes_crypt(req
, AES_FLAGS_CBC
| AES_FLAGS_ENCRYPT
);
1180 static int atmel_aes_cbc_decrypt(struct ablkcipher_request
*req
)
1182 return atmel_aes_crypt(req
, AES_FLAGS_CBC
);
1185 static int atmel_aes_ofb_encrypt(struct ablkcipher_request
*req
)
1187 return atmel_aes_crypt(req
, AES_FLAGS_OFB
| AES_FLAGS_ENCRYPT
);
1190 static int atmel_aes_ofb_decrypt(struct ablkcipher_request
*req
)
1192 return atmel_aes_crypt(req
, AES_FLAGS_OFB
);
1195 static int atmel_aes_cfb_encrypt(struct ablkcipher_request
*req
)
1197 return atmel_aes_crypt(req
, AES_FLAGS_CFB128
| AES_FLAGS_ENCRYPT
);
1200 static int atmel_aes_cfb_decrypt(struct ablkcipher_request
*req
)
1202 return atmel_aes_crypt(req
, AES_FLAGS_CFB128
);
1205 static int atmel_aes_cfb64_encrypt(struct ablkcipher_request
*req
)
1207 return atmel_aes_crypt(req
, AES_FLAGS_CFB64
| AES_FLAGS_ENCRYPT
);
1210 static int atmel_aes_cfb64_decrypt(struct ablkcipher_request
*req
)
1212 return atmel_aes_crypt(req
, AES_FLAGS_CFB64
);
1215 static int atmel_aes_cfb32_encrypt(struct ablkcipher_request
*req
)
1217 return atmel_aes_crypt(req
, AES_FLAGS_CFB32
| AES_FLAGS_ENCRYPT
);
1220 static int atmel_aes_cfb32_decrypt(struct ablkcipher_request
*req
)
1222 return atmel_aes_crypt(req
, AES_FLAGS_CFB32
);
1225 static int atmel_aes_cfb16_encrypt(struct ablkcipher_request
*req
)
1227 return atmel_aes_crypt(req
, AES_FLAGS_CFB16
| AES_FLAGS_ENCRYPT
);
1230 static int atmel_aes_cfb16_decrypt(struct ablkcipher_request
*req
)
1232 return atmel_aes_crypt(req
, AES_FLAGS_CFB16
);
1235 static int atmel_aes_cfb8_encrypt(struct ablkcipher_request
*req
)
1237 return atmel_aes_crypt(req
, AES_FLAGS_CFB8
| AES_FLAGS_ENCRYPT
);
1240 static int atmel_aes_cfb8_decrypt(struct ablkcipher_request
*req
)
1242 return atmel_aes_crypt(req
, AES_FLAGS_CFB8
);
1245 static int atmel_aes_ctr_encrypt(struct ablkcipher_request
*req
)
1247 return atmel_aes_crypt(req
, AES_FLAGS_CTR
| AES_FLAGS_ENCRYPT
);
1250 static int atmel_aes_ctr_decrypt(struct ablkcipher_request
*req
)
1252 return atmel_aes_crypt(req
, AES_FLAGS_CTR
);
1255 static int atmel_aes_cra_init(struct crypto_tfm
*tfm
)
1257 struct atmel_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1259 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct atmel_aes_reqctx
);
1260 ctx
->base
.start
= atmel_aes_start
;
1265 static int atmel_aes_ctr_cra_init(struct crypto_tfm
*tfm
)
1267 struct atmel_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1269 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct atmel_aes_reqctx
);
1270 ctx
->base
.start
= atmel_aes_ctr_start
;
1275 static struct crypto_alg aes_algs
[] = {
1277 .cra_name
= "ecb(aes)",
1278 .cra_driver_name
= "atmel-ecb-aes",
1279 .cra_priority
= ATMEL_AES_PRIORITY
,
1280 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1281 .cra_blocksize
= AES_BLOCK_SIZE
,
1282 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1283 .cra_alignmask
= 0xf,
1284 .cra_type
= &crypto_ablkcipher_type
,
1285 .cra_module
= THIS_MODULE
,
1286 .cra_init
= atmel_aes_cra_init
,
1287 .cra_u
.ablkcipher
= {
1288 .min_keysize
= AES_MIN_KEY_SIZE
,
1289 .max_keysize
= AES_MAX_KEY_SIZE
,
1290 .setkey
= atmel_aes_setkey
,
1291 .encrypt
= atmel_aes_ecb_encrypt
,
1292 .decrypt
= atmel_aes_ecb_decrypt
,
1296 .cra_name
= "cbc(aes)",
1297 .cra_driver_name
= "atmel-cbc-aes",
1298 .cra_priority
= ATMEL_AES_PRIORITY
,
1299 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1300 .cra_blocksize
= AES_BLOCK_SIZE
,
1301 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1302 .cra_alignmask
= 0xf,
1303 .cra_type
= &crypto_ablkcipher_type
,
1304 .cra_module
= THIS_MODULE
,
1305 .cra_init
= atmel_aes_cra_init
,
1306 .cra_u
.ablkcipher
= {
1307 .min_keysize
= AES_MIN_KEY_SIZE
,
1308 .max_keysize
= AES_MAX_KEY_SIZE
,
1309 .ivsize
= AES_BLOCK_SIZE
,
1310 .setkey
= atmel_aes_setkey
,
1311 .encrypt
= atmel_aes_cbc_encrypt
,
1312 .decrypt
= atmel_aes_cbc_decrypt
,
1316 .cra_name
= "ofb(aes)",
1317 .cra_driver_name
= "atmel-ofb-aes",
1318 .cra_priority
= ATMEL_AES_PRIORITY
,
1319 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1320 .cra_blocksize
= AES_BLOCK_SIZE
,
1321 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1322 .cra_alignmask
= 0xf,
1323 .cra_type
= &crypto_ablkcipher_type
,
1324 .cra_module
= THIS_MODULE
,
1325 .cra_init
= atmel_aes_cra_init
,
1326 .cra_u
.ablkcipher
= {
1327 .min_keysize
= AES_MIN_KEY_SIZE
,
1328 .max_keysize
= AES_MAX_KEY_SIZE
,
1329 .ivsize
= AES_BLOCK_SIZE
,
1330 .setkey
= atmel_aes_setkey
,
1331 .encrypt
= atmel_aes_ofb_encrypt
,
1332 .decrypt
= atmel_aes_ofb_decrypt
,
1336 .cra_name
= "cfb(aes)",
1337 .cra_driver_name
= "atmel-cfb-aes",
1338 .cra_priority
= ATMEL_AES_PRIORITY
,
1339 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1340 .cra_blocksize
= AES_BLOCK_SIZE
,
1341 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1342 .cra_alignmask
= 0xf,
1343 .cra_type
= &crypto_ablkcipher_type
,
1344 .cra_module
= THIS_MODULE
,
1345 .cra_init
= atmel_aes_cra_init
,
1346 .cra_u
.ablkcipher
= {
1347 .min_keysize
= AES_MIN_KEY_SIZE
,
1348 .max_keysize
= AES_MAX_KEY_SIZE
,
1349 .ivsize
= AES_BLOCK_SIZE
,
1350 .setkey
= atmel_aes_setkey
,
1351 .encrypt
= atmel_aes_cfb_encrypt
,
1352 .decrypt
= atmel_aes_cfb_decrypt
,
1356 .cra_name
= "cfb32(aes)",
1357 .cra_driver_name
= "atmel-cfb32-aes",
1358 .cra_priority
= ATMEL_AES_PRIORITY
,
1359 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1360 .cra_blocksize
= CFB32_BLOCK_SIZE
,
1361 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1362 .cra_alignmask
= 0x3,
1363 .cra_type
= &crypto_ablkcipher_type
,
1364 .cra_module
= THIS_MODULE
,
1365 .cra_init
= atmel_aes_cra_init
,
1366 .cra_u
.ablkcipher
= {
1367 .min_keysize
= AES_MIN_KEY_SIZE
,
1368 .max_keysize
= AES_MAX_KEY_SIZE
,
1369 .ivsize
= AES_BLOCK_SIZE
,
1370 .setkey
= atmel_aes_setkey
,
1371 .encrypt
= atmel_aes_cfb32_encrypt
,
1372 .decrypt
= atmel_aes_cfb32_decrypt
,
1376 .cra_name
= "cfb16(aes)",
1377 .cra_driver_name
= "atmel-cfb16-aes",
1378 .cra_priority
= ATMEL_AES_PRIORITY
,
1379 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1380 .cra_blocksize
= CFB16_BLOCK_SIZE
,
1381 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1382 .cra_alignmask
= 0x1,
1383 .cra_type
= &crypto_ablkcipher_type
,
1384 .cra_module
= THIS_MODULE
,
1385 .cra_init
= atmel_aes_cra_init
,
1386 .cra_u
.ablkcipher
= {
1387 .min_keysize
= AES_MIN_KEY_SIZE
,
1388 .max_keysize
= AES_MAX_KEY_SIZE
,
1389 .ivsize
= AES_BLOCK_SIZE
,
1390 .setkey
= atmel_aes_setkey
,
1391 .encrypt
= atmel_aes_cfb16_encrypt
,
1392 .decrypt
= atmel_aes_cfb16_decrypt
,
1396 .cra_name
= "cfb8(aes)",
1397 .cra_driver_name
= "atmel-cfb8-aes",
1398 .cra_priority
= ATMEL_AES_PRIORITY
,
1399 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1400 .cra_blocksize
= CFB8_BLOCK_SIZE
,
1401 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1402 .cra_alignmask
= 0x0,
1403 .cra_type
= &crypto_ablkcipher_type
,
1404 .cra_module
= THIS_MODULE
,
1405 .cra_init
= atmel_aes_cra_init
,
1406 .cra_u
.ablkcipher
= {
1407 .min_keysize
= AES_MIN_KEY_SIZE
,
1408 .max_keysize
= AES_MAX_KEY_SIZE
,
1409 .ivsize
= AES_BLOCK_SIZE
,
1410 .setkey
= atmel_aes_setkey
,
1411 .encrypt
= atmel_aes_cfb8_encrypt
,
1412 .decrypt
= atmel_aes_cfb8_decrypt
,
1416 .cra_name
= "ctr(aes)",
1417 .cra_driver_name
= "atmel-ctr-aes",
1418 .cra_priority
= ATMEL_AES_PRIORITY
,
1419 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1421 .cra_ctxsize
= sizeof(struct atmel_aes_ctr_ctx
),
1422 .cra_alignmask
= 0xf,
1423 .cra_type
= &crypto_ablkcipher_type
,
1424 .cra_module
= THIS_MODULE
,
1425 .cra_init
= atmel_aes_ctr_cra_init
,
1426 .cra_u
.ablkcipher
= {
1427 .min_keysize
= AES_MIN_KEY_SIZE
,
1428 .max_keysize
= AES_MAX_KEY_SIZE
,
1429 .ivsize
= AES_BLOCK_SIZE
,
1430 .setkey
= atmel_aes_setkey
,
1431 .encrypt
= atmel_aes_ctr_encrypt
,
1432 .decrypt
= atmel_aes_ctr_decrypt
,
1437 static struct crypto_alg aes_cfb64_alg
= {
1438 .cra_name
= "cfb64(aes)",
1439 .cra_driver_name
= "atmel-cfb64-aes",
1440 .cra_priority
= ATMEL_AES_PRIORITY
,
1441 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1442 .cra_blocksize
= CFB64_BLOCK_SIZE
,
1443 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1444 .cra_alignmask
= 0x7,
1445 .cra_type
= &crypto_ablkcipher_type
,
1446 .cra_module
= THIS_MODULE
,
1447 .cra_init
= atmel_aes_cra_init
,
1448 .cra_u
.ablkcipher
= {
1449 .min_keysize
= AES_MIN_KEY_SIZE
,
1450 .max_keysize
= AES_MAX_KEY_SIZE
,
1451 .ivsize
= AES_BLOCK_SIZE
,
1452 .setkey
= atmel_aes_setkey
,
1453 .encrypt
= atmel_aes_cfb64_encrypt
,
1454 .decrypt
= atmel_aes_cfb64_decrypt
,
1459 /* gcm aead functions */
1461 static int atmel_aes_gcm_ghash(struct atmel_aes_dev
*dd
,
1462 const u32
*data
, size_t datalen
,
1463 const u32
*ghash_in
, u32
*ghash_out
,
1464 atmel_aes_fn_t resume
);
1465 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev
*dd
);
1466 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev
*dd
);
1468 static int atmel_aes_gcm_start(struct atmel_aes_dev
*dd
);
1469 static int atmel_aes_gcm_process(struct atmel_aes_dev
*dd
);
1470 static int atmel_aes_gcm_length(struct atmel_aes_dev
*dd
);
1471 static int atmel_aes_gcm_data(struct atmel_aes_dev
*dd
);
1472 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev
*dd
);
1473 static int atmel_aes_gcm_tag(struct atmel_aes_dev
*dd
);
1474 static int atmel_aes_gcm_finalize(struct atmel_aes_dev
*dd
);
1476 static inline struct atmel_aes_gcm_ctx
*
1477 atmel_aes_gcm_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1479 return container_of(ctx
, struct atmel_aes_gcm_ctx
, base
);
1482 static int atmel_aes_gcm_ghash(struct atmel_aes_dev
*dd
,
1483 const u32
*data
, size_t datalen
,
1484 const u32
*ghash_in
, u32
*ghash_out
,
1485 atmel_aes_fn_t resume
)
1487 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1489 dd
->data
= (u32
*)data
;
1490 dd
->datalen
= datalen
;
1491 ctx
->ghash_in
= ghash_in
;
1492 ctx
->ghash_out
= ghash_out
;
1493 ctx
->ghash_resume
= resume
;
1495 atmel_aes_write_ctrl(dd
, false, NULL
);
1496 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_ghash_init
);
1499 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev
*dd
)
1501 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1503 /* Set the data length. */
1504 atmel_aes_write(dd
, AES_AADLENR
, dd
->total
);
1505 atmel_aes_write(dd
, AES_CLENR
, 0);
1507 /* If needed, overwrite the GCM Intermediate Hash Word Registers */
1509 atmel_aes_write_block(dd
, AES_GHASHR(0), ctx
->ghash_in
);
1511 return atmel_aes_gcm_ghash_finalize(dd
);
1514 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev
*dd
)
1516 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1519 /* Write data into the Input Data Registers. */
1520 while (dd
->datalen
> 0) {
1521 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
1523 dd
->datalen
-= AES_BLOCK_SIZE
;
1525 isr
= atmel_aes_read(dd
, AES_ISR
);
1526 if (!(isr
& AES_INT_DATARDY
)) {
1527 dd
->resume
= atmel_aes_gcm_ghash_finalize
;
1528 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
1529 return -EINPROGRESS
;
1533 /* Read the computed hash from GHASHRx. */
1534 atmel_aes_read_block(dd
, AES_GHASHR(0), ctx
->ghash_out
);
1536 return ctx
->ghash_resume(dd
);
1540 static int atmel_aes_gcm_start(struct atmel_aes_dev
*dd
)
1542 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1543 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1544 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1545 struct atmel_aes_reqctx
*rctx
= aead_request_ctx(req
);
1546 size_t ivsize
= crypto_aead_ivsize(tfm
);
1547 size_t datalen
, padlen
;
1548 const void *iv
= req
->iv
;
1552 atmel_aes_set_mode(dd
, rctx
);
1554 err
= atmel_aes_hw_init(dd
);
1556 return atmel_aes_complete(dd
, err
);
1558 if (likely(ivsize
== GCM_AES_IV_SIZE
)) {
1559 memcpy(ctx
->j0
, iv
, ivsize
);
1560 ctx
->j0
[3] = cpu_to_be32(1);
1561 return atmel_aes_gcm_process(dd
);
1564 padlen
= atmel_aes_padlen(ivsize
, AES_BLOCK_SIZE
);
1565 datalen
= ivsize
+ padlen
+ AES_BLOCK_SIZE
;
1566 if (datalen
> dd
->buflen
)
1567 return atmel_aes_complete(dd
, -EINVAL
);
1569 memcpy(data
, iv
, ivsize
);
1570 memset(data
+ ivsize
, 0, padlen
+ sizeof(u64
));
1571 ((u64
*)(data
+ datalen
))[-1] = cpu_to_be64(ivsize
* 8);
1573 return atmel_aes_gcm_ghash(dd
, (const u32
*)data
, datalen
,
1574 NULL
, ctx
->j0
, atmel_aes_gcm_process
);
1577 static int atmel_aes_gcm_process(struct atmel_aes_dev
*dd
)
1579 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1580 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1581 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1582 bool enc
= atmel_aes_is_encrypt(dd
);
1585 /* Compute text length. */
1586 authsize
= crypto_aead_authsize(tfm
);
1587 ctx
->textlen
= req
->cryptlen
- (enc
? 0 : authsize
);
1590 * According to tcrypt test suite, the GCM Automatic Tag Generation
1591 * fails when both the message and its associated data are empty.
1593 if (likely(req
->assoclen
!= 0 || ctx
->textlen
!= 0))
1594 dd
->flags
|= AES_FLAGS_GTAGEN
;
1596 atmel_aes_write_ctrl(dd
, false, NULL
);
1597 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_length
);
1600 static int atmel_aes_gcm_length(struct atmel_aes_dev
*dd
)
1602 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1603 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1604 u32 j0_lsw
, *j0
= ctx
->j0
;
1607 /* Write incr32(J0) into IV. */
1609 j0
[3] = cpu_to_be32(be32_to_cpu(j0
[3]) + 1);
1610 atmel_aes_write_block(dd
, AES_IVR(0), j0
);
1613 /* Set aad and text lengths. */
1614 atmel_aes_write(dd
, AES_AADLENR
, req
->assoclen
);
1615 atmel_aes_write(dd
, AES_CLENR
, ctx
->textlen
);
1617 /* Check whether AAD are present. */
1618 if (unlikely(req
->assoclen
== 0)) {
1620 return atmel_aes_gcm_data(dd
);
1623 /* Copy assoc data and add padding. */
1624 padlen
= atmel_aes_padlen(req
->assoclen
, AES_BLOCK_SIZE
);
1625 if (unlikely(req
->assoclen
+ padlen
> dd
->buflen
))
1626 return atmel_aes_complete(dd
, -EINVAL
);
1627 sg_copy_to_buffer(req
->src
, sg_nents(req
->src
), dd
->buf
, req
->assoclen
);
1629 /* Write assoc data into the Input Data register. */
1630 dd
->data
= (u32
*)dd
->buf
;
1631 dd
->datalen
= req
->assoclen
+ padlen
;
1632 return atmel_aes_gcm_data(dd
);
1635 static int atmel_aes_gcm_data(struct atmel_aes_dev
*dd
)
1637 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1638 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1639 bool use_dma
= (ctx
->textlen
>= ATMEL_AES_DMA_THRESHOLD
);
1640 struct scatterlist
*src
, *dst
;
1643 /* Write AAD first. */
1644 while (dd
->datalen
> 0) {
1645 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
1647 dd
->datalen
-= AES_BLOCK_SIZE
;
1649 isr
= atmel_aes_read(dd
, AES_ISR
);
1650 if (!(isr
& AES_INT_DATARDY
)) {
1651 dd
->resume
= atmel_aes_gcm_data
;
1652 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
1653 return -EINPROGRESS
;
1658 if (unlikely(ctx
->textlen
== 0))
1659 return atmel_aes_gcm_tag_init(dd
);
1661 /* Prepare src and dst scatter lists to transfer cipher/plain texts */
1662 src
= scatterwalk_ffwd(ctx
->src
, req
->src
, req
->assoclen
);
1663 dst
= ((req
->src
== req
->dst
) ? src
:
1664 scatterwalk_ffwd(ctx
->dst
, req
->dst
, req
->assoclen
));
1667 /* Update the Mode Register for DMA transfers. */
1668 mr
= atmel_aes_read(dd
, AES_MR
);
1669 mr
&= ~(AES_MR_SMOD_MASK
| AES_MR_DUALBUFF
);
1670 mr
|= AES_MR_SMOD_IDATAR0
;
1671 if (dd
->caps
.has_dualbuff
)
1672 mr
|= AES_MR_DUALBUFF
;
1673 atmel_aes_write(dd
, AES_MR
, mr
);
1675 return atmel_aes_dma_start(dd
, src
, dst
, ctx
->textlen
,
1676 atmel_aes_gcm_tag_init
);
1679 return atmel_aes_cpu_start(dd
, src
, dst
, ctx
->textlen
,
1680 atmel_aes_gcm_tag_init
);
1683 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev
*dd
)
1685 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1686 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1687 u64
*data
= dd
->buf
;
1689 if (likely(dd
->flags
& AES_FLAGS_GTAGEN
)) {
1690 if (!(atmel_aes_read(dd
, AES_ISR
) & AES_INT_TAGRDY
)) {
1691 dd
->resume
= atmel_aes_gcm_tag_init
;
1692 atmel_aes_write(dd
, AES_IER
, AES_INT_TAGRDY
);
1693 return -EINPROGRESS
;
1696 return atmel_aes_gcm_finalize(dd
);
1699 /* Read the GCM Intermediate Hash Word Registers. */
1700 atmel_aes_read_block(dd
, AES_GHASHR(0), ctx
->ghash
);
1702 data
[0] = cpu_to_be64(req
->assoclen
* 8);
1703 data
[1] = cpu_to_be64(ctx
->textlen
* 8);
1705 return atmel_aes_gcm_ghash(dd
, (const u32
*)data
, AES_BLOCK_SIZE
,
1706 ctx
->ghash
, ctx
->ghash
, atmel_aes_gcm_tag
);
1709 static int atmel_aes_gcm_tag(struct atmel_aes_dev
*dd
)
1711 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1712 unsigned long flags
;
1715 * Change mode to CTR to complete the tag generation.
1716 * Use J0 as Initialization Vector.
1719 dd
->flags
&= ~(AES_FLAGS_OPMODE_MASK
| AES_FLAGS_GTAGEN
);
1720 dd
->flags
|= AES_FLAGS_CTR
;
1721 atmel_aes_write_ctrl(dd
, false, ctx
->j0
);
1724 atmel_aes_write_block(dd
, AES_IDATAR(0), ctx
->ghash
);
1725 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_finalize
);
1728 static int atmel_aes_gcm_finalize(struct atmel_aes_dev
*dd
)
1730 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1731 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1732 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1733 bool enc
= atmel_aes_is_encrypt(dd
);
1734 u32 offset
, authsize
, itag
[4], *otag
= ctx
->tag
;
1737 /* Read the computed tag. */
1738 if (likely(dd
->flags
& AES_FLAGS_GTAGEN
))
1739 atmel_aes_read_block(dd
, AES_TAGR(0), ctx
->tag
);
1741 atmel_aes_read_block(dd
, AES_ODATAR(0), ctx
->tag
);
1743 offset
= req
->assoclen
+ ctx
->textlen
;
1744 authsize
= crypto_aead_authsize(tfm
);
1746 scatterwalk_map_and_copy(otag
, req
->dst
, offset
, authsize
, 1);
1749 scatterwalk_map_and_copy(itag
, req
->src
, offset
, authsize
, 0);
1750 err
= crypto_memneq(itag
, otag
, authsize
) ? -EBADMSG
: 0;
1753 return atmel_aes_complete(dd
, err
);
1756 static int atmel_aes_gcm_crypt(struct aead_request
*req
,
1759 struct atmel_aes_base_ctx
*ctx
;
1760 struct atmel_aes_reqctx
*rctx
;
1761 struct atmel_aes_dev
*dd
;
1763 ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
1764 ctx
->block_size
= AES_BLOCK_SIZE
;
1765 ctx
->is_aead
= true;
1767 dd
= atmel_aes_find_dev(ctx
);
1771 rctx
= aead_request_ctx(req
);
1772 rctx
->mode
= AES_FLAGS_GCM
| mode
;
1774 return atmel_aes_handle_queue(dd
, &req
->base
);
1777 static int atmel_aes_gcm_setkey(struct crypto_aead
*tfm
, const u8
*key
,
1778 unsigned int keylen
)
1780 struct atmel_aes_base_ctx
*ctx
= crypto_aead_ctx(tfm
);
1782 if (keylen
!= AES_KEYSIZE_256
&&
1783 keylen
!= AES_KEYSIZE_192
&&
1784 keylen
!= AES_KEYSIZE_128
) {
1785 crypto_aead_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
1789 memcpy(ctx
->key
, key
, keylen
);
1790 ctx
->keylen
= keylen
;
1795 static int atmel_aes_gcm_setauthsize(struct crypto_aead
*tfm
,
1796 unsigned int authsize
)
1798 /* Same as crypto_gcm_authsize() from crypto/gcm.c */
1815 static int atmel_aes_gcm_encrypt(struct aead_request
*req
)
1817 return atmel_aes_gcm_crypt(req
, AES_FLAGS_ENCRYPT
);
1820 static int atmel_aes_gcm_decrypt(struct aead_request
*req
)
1822 return atmel_aes_gcm_crypt(req
, 0);
1825 static int atmel_aes_gcm_init(struct crypto_aead
*tfm
)
1827 struct atmel_aes_gcm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1829 crypto_aead_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1830 ctx
->base
.start
= atmel_aes_gcm_start
;
1835 static struct aead_alg aes_gcm_alg
= {
1836 .setkey
= atmel_aes_gcm_setkey
,
1837 .setauthsize
= atmel_aes_gcm_setauthsize
,
1838 .encrypt
= atmel_aes_gcm_encrypt
,
1839 .decrypt
= atmel_aes_gcm_decrypt
,
1840 .init
= atmel_aes_gcm_init
,
1841 .ivsize
= GCM_AES_IV_SIZE
,
1842 .maxauthsize
= AES_BLOCK_SIZE
,
1845 .cra_name
= "gcm(aes)",
1846 .cra_driver_name
= "atmel-gcm-aes",
1847 .cra_priority
= ATMEL_AES_PRIORITY
,
1848 .cra_flags
= CRYPTO_ALG_ASYNC
,
1850 .cra_ctxsize
= sizeof(struct atmel_aes_gcm_ctx
),
1851 .cra_alignmask
= 0xf,
1852 .cra_module
= THIS_MODULE
,
1859 static inline struct atmel_aes_xts_ctx
*
1860 atmel_aes_xts_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1862 return container_of(ctx
, struct atmel_aes_xts_ctx
, base
);
1865 static int atmel_aes_xts_process_data(struct atmel_aes_dev
*dd
);
1867 static int atmel_aes_xts_start(struct atmel_aes_dev
*dd
)
1869 struct atmel_aes_xts_ctx
*ctx
= atmel_aes_xts_ctx_cast(dd
->ctx
);
1870 struct ablkcipher_request
*req
= ablkcipher_request_cast(dd
->areq
);
1871 struct atmel_aes_reqctx
*rctx
= ablkcipher_request_ctx(req
);
1872 unsigned long flags
;
1875 atmel_aes_set_mode(dd
, rctx
);
1877 err
= atmel_aes_hw_init(dd
);
1879 return atmel_aes_complete(dd
, err
);
1881 /* Compute the tweak value from req->info with ecb(aes). */
1883 dd
->flags
&= ~AES_FLAGS_MODE_MASK
;
1884 dd
->flags
|= (AES_FLAGS_ECB
| AES_FLAGS_ENCRYPT
);
1885 atmel_aes_write_ctrl_key(dd
, false, NULL
,
1886 ctx
->key2
, ctx
->base
.keylen
);
1889 atmel_aes_write_block(dd
, AES_IDATAR(0), req
->info
);
1890 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_xts_process_data
);
1893 static int atmel_aes_xts_process_data(struct atmel_aes_dev
*dd
)
1895 struct ablkcipher_request
*req
= ablkcipher_request_cast(dd
->areq
);
1896 bool use_dma
= (req
->nbytes
>= ATMEL_AES_DMA_THRESHOLD
);
1897 u32 tweak
[AES_BLOCK_SIZE
/ sizeof(u32
)];
1898 static const u32 one
[AES_BLOCK_SIZE
/ sizeof(u32
)] = {cpu_to_le32(1), };
1899 u8
*tweak_bytes
= (u8
*)tweak
;
1902 /* Read the computed ciphered tweak value. */
1903 atmel_aes_read_block(dd
, AES_ODATAR(0), tweak
);
1906 * the order of the ciphered tweak bytes need to be reversed before
1907 * writing them into the ODATARx registers.
1909 for (i
= 0; i
< AES_BLOCK_SIZE
/2; ++i
) {
1910 u8 tmp
= tweak_bytes
[AES_BLOCK_SIZE
- 1 - i
];
1912 tweak_bytes
[AES_BLOCK_SIZE
- 1 - i
] = tweak_bytes
[i
];
1913 tweak_bytes
[i
] = tmp
;
1916 /* Process the data. */
1917 atmel_aes_write_ctrl(dd
, use_dma
, NULL
);
1918 atmel_aes_write_block(dd
, AES_TWR(0), tweak
);
1919 atmel_aes_write_block(dd
, AES_ALPHAR(0), one
);
1921 return atmel_aes_dma_start(dd
, req
->src
, req
->dst
, req
->nbytes
,
1922 atmel_aes_transfer_complete
);
1924 return atmel_aes_cpu_start(dd
, req
->src
, req
->dst
, req
->nbytes
,
1925 atmel_aes_transfer_complete
);
1928 static int atmel_aes_xts_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
1929 unsigned int keylen
)
1931 struct atmel_aes_xts_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
1934 err
= xts_check_key(crypto_ablkcipher_tfm(tfm
), key
, keylen
);
1938 memcpy(ctx
->base
.key
, key
, keylen
/2);
1939 memcpy(ctx
->key2
, key
+ keylen
/2, keylen
/2);
1940 ctx
->base
.keylen
= keylen
/2;
1945 static int atmel_aes_xts_encrypt(struct ablkcipher_request
*req
)
1947 return atmel_aes_crypt(req
, AES_FLAGS_XTS
| AES_FLAGS_ENCRYPT
);
1950 static int atmel_aes_xts_decrypt(struct ablkcipher_request
*req
)
1952 return atmel_aes_crypt(req
, AES_FLAGS_XTS
);
1955 static int atmel_aes_xts_cra_init(struct crypto_tfm
*tfm
)
1957 struct atmel_aes_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1959 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct atmel_aes_reqctx
);
1960 ctx
->base
.start
= atmel_aes_xts_start
;
1965 static struct crypto_alg aes_xts_alg
= {
1966 .cra_name
= "xts(aes)",
1967 .cra_driver_name
= "atmel-xts-aes",
1968 .cra_priority
= ATMEL_AES_PRIORITY
,
1969 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1970 .cra_blocksize
= AES_BLOCK_SIZE
,
1971 .cra_ctxsize
= sizeof(struct atmel_aes_xts_ctx
),
1972 .cra_alignmask
= 0xf,
1973 .cra_type
= &crypto_ablkcipher_type
,
1974 .cra_module
= THIS_MODULE
,
1975 .cra_init
= atmel_aes_xts_cra_init
,
1976 .cra_u
.ablkcipher
= {
1977 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1978 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1979 .ivsize
= AES_BLOCK_SIZE
,
1980 .setkey
= atmel_aes_xts_setkey
,
1981 .encrypt
= atmel_aes_xts_encrypt
,
1982 .decrypt
= atmel_aes_xts_decrypt
,
1986 #ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
1987 /* authenc aead functions */
1989 static int atmel_aes_authenc_start(struct atmel_aes_dev
*dd
);
1990 static int atmel_aes_authenc_init(struct atmel_aes_dev
*dd
, int err
,
1992 static int atmel_aes_authenc_transfer(struct atmel_aes_dev
*dd
, int err
,
1994 static int atmel_aes_authenc_digest(struct atmel_aes_dev
*dd
);
1995 static int atmel_aes_authenc_final(struct atmel_aes_dev
*dd
, int err
,
1998 static void atmel_aes_authenc_complete(struct atmel_aes_dev
*dd
, int err
)
2000 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2001 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2003 if (err
&& (dd
->flags
& AES_FLAGS_OWN_SHA
))
2004 atmel_sha_authenc_abort(&rctx
->auth_req
);
2005 dd
->flags
&= ~AES_FLAGS_OWN_SHA
;
2008 static int atmel_aes_authenc_start(struct atmel_aes_dev
*dd
)
2010 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2011 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2012 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2013 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
2016 atmel_aes_set_mode(dd
, &rctx
->base
);
2018 err
= atmel_aes_hw_init(dd
);
2020 return atmel_aes_complete(dd
, err
);
2022 return atmel_sha_authenc_schedule(&rctx
->auth_req
, ctx
->auth
,
2023 atmel_aes_authenc_init
, dd
);
2026 static int atmel_aes_authenc_init(struct atmel_aes_dev
*dd
, int err
,
2029 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2030 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2033 dd
->is_async
= true;
2035 return atmel_aes_complete(dd
, err
);
2037 /* If here, we've got the ownership of the SHA device. */
2038 dd
->flags
|= AES_FLAGS_OWN_SHA
;
2040 /* Configure the SHA device. */
2041 return atmel_sha_authenc_init(&rctx
->auth_req
,
2042 req
->src
, req
->assoclen
,
2044 atmel_aes_authenc_transfer
, dd
);
2047 static int atmel_aes_authenc_transfer(struct atmel_aes_dev
*dd
, int err
,
2050 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2051 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2052 bool enc
= atmel_aes_is_encrypt(dd
);
2053 struct scatterlist
*src
, *dst
;
2054 u32 iv
[AES_BLOCK_SIZE
/ sizeof(u32
)];
2058 dd
->is_async
= true;
2060 return atmel_aes_complete(dd
, err
);
2062 /* Prepare src and dst scatter-lists to transfer cipher/plain texts. */
2063 src
= scatterwalk_ffwd(rctx
->src
, req
->src
, req
->assoclen
);
2066 if (req
->src
!= req
->dst
)
2067 dst
= scatterwalk_ffwd(rctx
->dst
, req
->dst
, req
->assoclen
);
2069 /* Configure the AES device. */
2070 memcpy(iv
, req
->iv
, sizeof(iv
));
2073 * Here we always set the 2nd parameter of atmel_aes_write_ctrl() to
2074 * 'true' even if the data transfer is actually performed by the CPU (so
2075 * not by the DMA) because we must force the AES_MR_SMOD bitfield to the
2076 * value AES_MR_SMOD_IDATAR0. Indeed, both AES_MR_SMOD and SHA_MR_SMOD
2077 * must be set to *_MR_SMOD_IDATAR0.
2079 atmel_aes_write_ctrl(dd
, true, iv
);
2080 emr
= AES_EMR_PLIPEN
;
2082 emr
|= AES_EMR_PLIPD
;
2083 atmel_aes_write(dd
, AES_EMR
, emr
);
2085 /* Transfer data. */
2086 return atmel_aes_dma_start(dd
, src
, dst
, rctx
->textlen
,
2087 atmel_aes_authenc_digest
);
2090 static int atmel_aes_authenc_digest(struct atmel_aes_dev
*dd
)
2092 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2093 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2095 /* atmel_sha_authenc_final() releases the SHA device. */
2096 dd
->flags
&= ~AES_FLAGS_OWN_SHA
;
2097 return atmel_sha_authenc_final(&rctx
->auth_req
,
2098 rctx
->digest
, sizeof(rctx
->digest
),
2099 atmel_aes_authenc_final
, dd
);
2102 static int atmel_aes_authenc_final(struct atmel_aes_dev
*dd
, int err
,
2105 struct aead_request
*req
= aead_request_cast(dd
->areq
);
2106 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2107 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2108 bool enc
= atmel_aes_is_encrypt(dd
);
2109 u32 idigest
[SHA512_DIGEST_SIZE
/ sizeof(u32
)], *odigest
= rctx
->digest
;
2113 dd
->is_async
= true;
2117 offs
= req
->assoclen
+ rctx
->textlen
;
2118 authsize
= crypto_aead_authsize(tfm
);
2120 scatterwalk_map_and_copy(odigest
, req
->dst
, offs
, authsize
, 1);
2122 scatterwalk_map_and_copy(idigest
, req
->src
, offs
, authsize
, 0);
2123 if (crypto_memneq(idigest
, odigest
, authsize
))
2128 return atmel_aes_complete(dd
, err
);
2131 static int atmel_aes_authenc_setkey(struct crypto_aead
*tfm
, const u8
*key
,
2132 unsigned int keylen
)
2134 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
2135 struct crypto_authenc_keys keys
;
2139 if (crypto_authenc_extractkeys(&keys
, key
, keylen
) != 0)
2142 if (keys
.enckeylen
> sizeof(ctx
->base
.key
))
2145 /* Save auth key. */
2146 flags
= crypto_aead_get_flags(tfm
);
2147 err
= atmel_sha_authenc_setkey(ctx
->auth
,
2148 keys
.authkey
, keys
.authkeylen
,
2150 crypto_aead_set_flags(tfm
, flags
& CRYPTO_TFM_RES_MASK
);
2152 memzero_explicit(&keys
, sizeof(keys
));
2157 ctx
->base
.keylen
= keys
.enckeylen
;
2158 memcpy(ctx
->base
.key
, keys
.enckey
, keys
.enckeylen
);
2160 memzero_explicit(&keys
, sizeof(keys
));
2164 crypto_aead_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
2165 memzero_explicit(&keys
, sizeof(keys
));
2169 static int atmel_aes_authenc_init_tfm(struct crypto_aead
*tfm
,
2170 unsigned long auth_mode
)
2172 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
2173 unsigned int auth_reqsize
= atmel_sha_authenc_get_reqsize();
2175 ctx
->auth
= atmel_sha_authenc_spawn(auth_mode
);
2176 if (IS_ERR(ctx
->auth
))
2177 return PTR_ERR(ctx
->auth
);
2179 crypto_aead_set_reqsize(tfm
, (sizeof(struct atmel_aes_authenc_reqctx
) +
2181 ctx
->base
.start
= atmel_aes_authenc_start
;
2186 static int atmel_aes_authenc_hmac_sha1_init_tfm(struct crypto_aead
*tfm
)
2188 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA1
);
2191 static int atmel_aes_authenc_hmac_sha224_init_tfm(struct crypto_aead
*tfm
)
2193 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA224
);
2196 static int atmel_aes_authenc_hmac_sha256_init_tfm(struct crypto_aead
*tfm
)
2198 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA256
);
2201 static int atmel_aes_authenc_hmac_sha384_init_tfm(struct crypto_aead
*tfm
)
2203 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA384
);
2206 static int atmel_aes_authenc_hmac_sha512_init_tfm(struct crypto_aead
*tfm
)
2208 return atmel_aes_authenc_init_tfm(tfm
, SHA_FLAGS_HMAC_SHA512
);
2211 static void atmel_aes_authenc_exit_tfm(struct crypto_aead
*tfm
)
2213 struct atmel_aes_authenc_ctx
*ctx
= crypto_aead_ctx(tfm
);
2215 atmel_sha_authenc_free(ctx
->auth
);
2218 static int atmel_aes_authenc_crypt(struct aead_request
*req
,
2221 struct atmel_aes_authenc_reqctx
*rctx
= aead_request_ctx(req
);
2222 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2223 struct atmel_aes_base_ctx
*ctx
= crypto_aead_ctx(tfm
);
2224 u32 authsize
= crypto_aead_authsize(tfm
);
2225 bool enc
= (mode
& AES_FLAGS_ENCRYPT
);
2226 struct atmel_aes_dev
*dd
;
2228 /* Compute text length. */
2229 if (!enc
&& req
->cryptlen
< authsize
)
2231 rctx
->textlen
= req
->cryptlen
- (enc
? 0 : authsize
);
2234 * Currently, empty messages are not supported yet:
2235 * the SHA auto-padding can be used only on non-empty messages.
2236 * Hence a special case needs to be implemented for empty message.
2238 if (!rctx
->textlen
&& !req
->assoclen
)
2241 rctx
->base
.mode
= mode
;
2242 ctx
->block_size
= AES_BLOCK_SIZE
;
2243 ctx
->is_aead
= true;
2245 dd
= atmel_aes_find_dev(ctx
);
2249 return atmel_aes_handle_queue(dd
, &req
->base
);
2252 static int atmel_aes_authenc_cbc_aes_encrypt(struct aead_request
*req
)
2254 return atmel_aes_authenc_crypt(req
, AES_FLAGS_CBC
| AES_FLAGS_ENCRYPT
);
2257 static int atmel_aes_authenc_cbc_aes_decrypt(struct aead_request
*req
)
2259 return atmel_aes_authenc_crypt(req
, AES_FLAGS_CBC
);
2262 static struct aead_alg aes_authenc_algs
[] = {
2264 .setkey
= atmel_aes_authenc_setkey
,
2265 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2266 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2267 .init
= atmel_aes_authenc_hmac_sha1_init_tfm
,
2268 .exit
= atmel_aes_authenc_exit_tfm
,
2269 .ivsize
= AES_BLOCK_SIZE
,
2270 .maxauthsize
= SHA1_DIGEST_SIZE
,
2273 .cra_name
= "authenc(hmac(sha1),cbc(aes))",
2274 .cra_driver_name
= "atmel-authenc-hmac-sha1-cbc-aes",
2275 .cra_priority
= ATMEL_AES_PRIORITY
,
2276 .cra_flags
= CRYPTO_ALG_ASYNC
,
2277 .cra_blocksize
= AES_BLOCK_SIZE
,
2278 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2279 .cra_alignmask
= 0xf,
2280 .cra_module
= THIS_MODULE
,
2284 .setkey
= atmel_aes_authenc_setkey
,
2285 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2286 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2287 .init
= atmel_aes_authenc_hmac_sha224_init_tfm
,
2288 .exit
= atmel_aes_authenc_exit_tfm
,
2289 .ivsize
= AES_BLOCK_SIZE
,
2290 .maxauthsize
= SHA224_DIGEST_SIZE
,
2293 .cra_name
= "authenc(hmac(sha224),cbc(aes))",
2294 .cra_driver_name
= "atmel-authenc-hmac-sha224-cbc-aes",
2295 .cra_priority
= ATMEL_AES_PRIORITY
,
2296 .cra_flags
= CRYPTO_ALG_ASYNC
,
2297 .cra_blocksize
= AES_BLOCK_SIZE
,
2298 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2299 .cra_alignmask
= 0xf,
2300 .cra_module
= THIS_MODULE
,
2304 .setkey
= atmel_aes_authenc_setkey
,
2305 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2306 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2307 .init
= atmel_aes_authenc_hmac_sha256_init_tfm
,
2308 .exit
= atmel_aes_authenc_exit_tfm
,
2309 .ivsize
= AES_BLOCK_SIZE
,
2310 .maxauthsize
= SHA256_DIGEST_SIZE
,
2313 .cra_name
= "authenc(hmac(sha256),cbc(aes))",
2314 .cra_driver_name
= "atmel-authenc-hmac-sha256-cbc-aes",
2315 .cra_priority
= ATMEL_AES_PRIORITY
,
2316 .cra_flags
= CRYPTO_ALG_ASYNC
,
2317 .cra_blocksize
= AES_BLOCK_SIZE
,
2318 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2319 .cra_alignmask
= 0xf,
2320 .cra_module
= THIS_MODULE
,
2324 .setkey
= atmel_aes_authenc_setkey
,
2325 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2326 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2327 .init
= atmel_aes_authenc_hmac_sha384_init_tfm
,
2328 .exit
= atmel_aes_authenc_exit_tfm
,
2329 .ivsize
= AES_BLOCK_SIZE
,
2330 .maxauthsize
= SHA384_DIGEST_SIZE
,
2333 .cra_name
= "authenc(hmac(sha384),cbc(aes))",
2334 .cra_driver_name
= "atmel-authenc-hmac-sha384-cbc-aes",
2335 .cra_priority
= ATMEL_AES_PRIORITY
,
2336 .cra_flags
= CRYPTO_ALG_ASYNC
,
2337 .cra_blocksize
= AES_BLOCK_SIZE
,
2338 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2339 .cra_alignmask
= 0xf,
2340 .cra_module
= THIS_MODULE
,
2344 .setkey
= atmel_aes_authenc_setkey
,
2345 .encrypt
= atmel_aes_authenc_cbc_aes_encrypt
,
2346 .decrypt
= atmel_aes_authenc_cbc_aes_decrypt
,
2347 .init
= atmel_aes_authenc_hmac_sha512_init_tfm
,
2348 .exit
= atmel_aes_authenc_exit_tfm
,
2349 .ivsize
= AES_BLOCK_SIZE
,
2350 .maxauthsize
= SHA512_DIGEST_SIZE
,
2353 .cra_name
= "authenc(hmac(sha512),cbc(aes))",
2354 .cra_driver_name
= "atmel-authenc-hmac-sha512-cbc-aes",
2355 .cra_priority
= ATMEL_AES_PRIORITY
,
2356 .cra_flags
= CRYPTO_ALG_ASYNC
,
2357 .cra_blocksize
= AES_BLOCK_SIZE
,
2358 .cra_ctxsize
= sizeof(struct atmel_aes_authenc_ctx
),
2359 .cra_alignmask
= 0xf,
2360 .cra_module
= THIS_MODULE
,
2364 #endif /* CONFIG_CRYPTO_DEV_ATMEL_AUTHENC */
2366 /* Probe functions */
2368 static int atmel_aes_buff_init(struct atmel_aes_dev
*dd
)
2370 dd
->buf
= (void *)__get_free_pages(GFP_KERNEL
, ATMEL_AES_BUFFER_ORDER
);
2371 dd
->buflen
= ATMEL_AES_BUFFER_SIZE
;
2372 dd
->buflen
&= ~(AES_BLOCK_SIZE
- 1);
2375 dev_err(dd
->dev
, "unable to alloc pages.\n");
2382 static void atmel_aes_buff_cleanup(struct atmel_aes_dev
*dd
)
2384 free_page((unsigned long)dd
->buf
);
2387 static bool atmel_aes_filter(struct dma_chan
*chan
, void *slave
)
2389 struct at_dma_slave
*sl
= slave
;
2391 if (sl
&& sl
->dma_dev
== chan
->device
->dev
) {
2399 static int atmel_aes_dma_init(struct atmel_aes_dev
*dd
,
2400 struct crypto_platform_data
*pdata
)
2402 struct at_dma_slave
*slave
;
2403 dma_cap_mask_t mask
;
2406 dma_cap_set(DMA_SLAVE
, mask
);
2408 /* Try to grab 2 DMA channels */
2409 slave
= &pdata
->dma_slave
->rxdata
;
2410 dd
->src
.chan
= dma_request_slave_channel_compat(mask
, atmel_aes_filter
,
2411 slave
, dd
->dev
, "tx");
2415 slave
= &pdata
->dma_slave
->txdata
;
2416 dd
->dst
.chan
= dma_request_slave_channel_compat(mask
, atmel_aes_filter
,
2417 slave
, dd
->dev
, "rx");
2424 dma_release_channel(dd
->src
.chan
);
2426 dev_warn(dd
->dev
, "no DMA channel available\n");
2430 static void atmel_aes_dma_cleanup(struct atmel_aes_dev
*dd
)
2432 dma_release_channel(dd
->dst
.chan
);
2433 dma_release_channel(dd
->src
.chan
);
2436 static void atmel_aes_queue_task(unsigned long data
)
2438 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
2440 atmel_aes_handle_queue(dd
, NULL
);
2443 static void atmel_aes_done_task(unsigned long data
)
2445 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
2447 dd
->is_async
= true;
2448 (void)dd
->resume(dd
);
2451 static irqreturn_t
atmel_aes_irq(int irq
, void *dev_id
)
2453 struct atmel_aes_dev
*aes_dd
= dev_id
;
2456 reg
= atmel_aes_read(aes_dd
, AES_ISR
);
2457 if (reg
& atmel_aes_read(aes_dd
, AES_IMR
)) {
2458 atmel_aes_write(aes_dd
, AES_IDR
, reg
);
2459 if (AES_FLAGS_BUSY
& aes_dd
->flags
)
2460 tasklet_schedule(&aes_dd
->done_task
);
2462 dev_warn(aes_dd
->dev
, "AES interrupt when no active requests.\n");
2469 static void atmel_aes_unregister_algs(struct atmel_aes_dev
*dd
)
2473 #ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
2474 if (dd
->caps
.has_authenc
)
2475 for (i
= 0; i
< ARRAY_SIZE(aes_authenc_algs
); i
++)
2476 crypto_unregister_aead(&aes_authenc_algs
[i
]);
2479 if (dd
->caps
.has_xts
)
2480 crypto_unregister_alg(&aes_xts_alg
);
2482 if (dd
->caps
.has_gcm
)
2483 crypto_unregister_aead(&aes_gcm_alg
);
2485 if (dd
->caps
.has_cfb64
)
2486 crypto_unregister_alg(&aes_cfb64_alg
);
2488 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++)
2489 crypto_unregister_alg(&aes_algs
[i
]);
2492 static int atmel_aes_register_algs(struct atmel_aes_dev
*dd
)
2496 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
2497 err
= crypto_register_alg(&aes_algs
[i
]);
2502 if (dd
->caps
.has_cfb64
) {
2503 err
= crypto_register_alg(&aes_cfb64_alg
);
2505 goto err_aes_cfb64_alg
;
2508 if (dd
->caps
.has_gcm
) {
2509 err
= crypto_register_aead(&aes_gcm_alg
);
2511 goto err_aes_gcm_alg
;
2514 if (dd
->caps
.has_xts
) {
2515 err
= crypto_register_alg(&aes_xts_alg
);
2517 goto err_aes_xts_alg
;
2520 #ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
2521 if (dd
->caps
.has_authenc
) {
2522 for (i
= 0; i
< ARRAY_SIZE(aes_authenc_algs
); i
++) {
2523 err
= crypto_register_aead(&aes_authenc_algs
[i
]);
2525 goto err_aes_authenc_alg
;
2532 #ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
2533 /* i = ARRAY_SIZE(aes_authenc_algs); */
2534 err_aes_authenc_alg
:
2535 for (j
= 0; j
< i
; j
++)
2536 crypto_unregister_aead(&aes_authenc_algs
[j
]);
2537 crypto_unregister_alg(&aes_xts_alg
);
2540 crypto_unregister_aead(&aes_gcm_alg
);
2542 crypto_unregister_alg(&aes_cfb64_alg
);
2544 i
= ARRAY_SIZE(aes_algs
);
2546 for (j
= 0; j
< i
; j
++)
2547 crypto_unregister_alg(&aes_algs
[j
]);
2552 static void atmel_aes_get_cap(struct atmel_aes_dev
*dd
)
2554 dd
->caps
.has_dualbuff
= 0;
2555 dd
->caps
.has_cfb64
= 0;
2556 dd
->caps
.has_ctr32
= 0;
2557 dd
->caps
.has_gcm
= 0;
2558 dd
->caps
.has_xts
= 0;
2559 dd
->caps
.has_authenc
= 0;
2560 dd
->caps
.max_burst_size
= 1;
2562 /* keep only major version number */
2563 switch (dd
->hw_version
& 0xff0) {
2565 dd
->caps
.has_dualbuff
= 1;
2566 dd
->caps
.has_cfb64
= 1;
2567 dd
->caps
.has_ctr32
= 1;
2568 dd
->caps
.has_gcm
= 1;
2569 dd
->caps
.has_xts
= 1;
2570 dd
->caps
.has_authenc
= 1;
2571 dd
->caps
.max_burst_size
= 4;
2574 dd
->caps
.has_dualbuff
= 1;
2575 dd
->caps
.has_cfb64
= 1;
2576 dd
->caps
.has_ctr32
= 1;
2577 dd
->caps
.has_gcm
= 1;
2578 dd
->caps
.max_burst_size
= 4;
2581 dd
->caps
.has_dualbuff
= 1;
2582 dd
->caps
.has_cfb64
= 1;
2583 dd
->caps
.max_burst_size
= 4;
2589 "Unmanaged aes version, set minimum capabilities\n");
2594 #if defined(CONFIG_OF)
2595 static const struct of_device_id atmel_aes_dt_ids
[] = {
2596 { .compatible
= "atmel,at91sam9g46-aes" },
2599 MODULE_DEVICE_TABLE(of
, atmel_aes_dt_ids
);
2601 static struct crypto_platform_data
*atmel_aes_of_init(struct platform_device
*pdev
)
2603 struct device_node
*np
= pdev
->dev
.of_node
;
2604 struct crypto_platform_data
*pdata
;
2607 dev_err(&pdev
->dev
, "device node not found\n");
2608 return ERR_PTR(-EINVAL
);
2611 pdata
= devm_kzalloc(&pdev
->dev
, sizeof(*pdata
), GFP_KERNEL
);
2613 dev_err(&pdev
->dev
, "could not allocate memory for pdata\n");
2614 return ERR_PTR(-ENOMEM
);
2617 pdata
->dma_slave
= devm_kzalloc(&pdev
->dev
,
2618 sizeof(*(pdata
->dma_slave
)),
2620 if (!pdata
->dma_slave
) {
2621 dev_err(&pdev
->dev
, "could not allocate memory for dma_slave\n");
2622 devm_kfree(&pdev
->dev
, pdata
);
2623 return ERR_PTR(-ENOMEM
);
2629 static inline struct crypto_platform_data
*atmel_aes_of_init(struct platform_device
*pdev
)
2631 return ERR_PTR(-EINVAL
);
2635 static int atmel_aes_probe(struct platform_device
*pdev
)
2637 struct atmel_aes_dev
*aes_dd
;
2638 struct crypto_platform_data
*pdata
;
2639 struct device
*dev
= &pdev
->dev
;
2640 struct resource
*aes_res
;
2643 pdata
= pdev
->dev
.platform_data
;
2645 pdata
= atmel_aes_of_init(pdev
);
2646 if (IS_ERR(pdata
)) {
2647 err
= PTR_ERR(pdata
);
2652 if (!pdata
->dma_slave
) {
2657 aes_dd
= devm_kzalloc(&pdev
->dev
, sizeof(*aes_dd
), GFP_KERNEL
);
2658 if (aes_dd
== NULL
) {
2659 dev_err(dev
, "unable to alloc data struct.\n");
2666 platform_set_drvdata(pdev
, aes_dd
);
2668 INIT_LIST_HEAD(&aes_dd
->list
);
2669 spin_lock_init(&aes_dd
->lock
);
2671 tasklet_init(&aes_dd
->done_task
, atmel_aes_done_task
,
2672 (unsigned long)aes_dd
);
2673 tasklet_init(&aes_dd
->queue_task
, atmel_aes_queue_task
,
2674 (unsigned long)aes_dd
);
2676 crypto_init_queue(&aes_dd
->queue
, ATMEL_AES_QUEUE_LENGTH
);
2678 /* Get the base address */
2679 aes_res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
2681 dev_err(dev
, "no MEM resource info\n");
2685 aes_dd
->phys_base
= aes_res
->start
;
2688 aes_dd
->irq
= platform_get_irq(pdev
, 0);
2689 if (aes_dd
->irq
< 0) {
2690 dev_err(dev
, "no IRQ resource info\n");
2695 err
= devm_request_irq(&pdev
->dev
, aes_dd
->irq
, atmel_aes_irq
,
2696 IRQF_SHARED
, "atmel-aes", aes_dd
);
2698 dev_err(dev
, "unable to request aes irq.\n");
2702 /* Initializing the clock */
2703 aes_dd
->iclk
= devm_clk_get(&pdev
->dev
, "aes_clk");
2704 if (IS_ERR(aes_dd
->iclk
)) {
2705 dev_err(dev
, "clock initialization failed.\n");
2706 err
= PTR_ERR(aes_dd
->iclk
);
2710 aes_dd
->io_base
= devm_ioremap_resource(&pdev
->dev
, aes_res
);
2711 if (IS_ERR(aes_dd
->io_base
)) {
2712 dev_err(dev
, "can't ioremap\n");
2713 err
= PTR_ERR(aes_dd
->io_base
);
2717 err
= clk_prepare(aes_dd
->iclk
);
2721 err
= atmel_aes_hw_version_init(aes_dd
);
2723 goto iclk_unprepare
;
2725 atmel_aes_get_cap(aes_dd
);
2727 #ifdef CONFIG_CRYPTO_DEV_ATMEL_AUTHENC
2728 if (aes_dd
->caps
.has_authenc
&& !atmel_sha_authenc_is_ready()) {
2729 err
= -EPROBE_DEFER
;
2730 goto iclk_unprepare
;
2734 err
= atmel_aes_buff_init(aes_dd
);
2738 err
= atmel_aes_dma_init(aes_dd
, pdata
);
2742 spin_lock(&atmel_aes
.lock
);
2743 list_add_tail(&aes_dd
->list
, &atmel_aes
.dev_list
);
2744 spin_unlock(&atmel_aes
.lock
);
2746 err
= atmel_aes_register_algs(aes_dd
);
2750 dev_info(dev
, "Atmel AES - Using %s, %s for DMA transfers\n",
2751 dma_chan_name(aes_dd
->src
.chan
),
2752 dma_chan_name(aes_dd
->dst
.chan
));
2757 spin_lock(&atmel_aes
.lock
);
2758 list_del(&aes_dd
->list
);
2759 spin_unlock(&atmel_aes
.lock
);
2760 atmel_aes_dma_cleanup(aes_dd
);
2762 atmel_aes_buff_cleanup(aes_dd
);
2765 clk_unprepare(aes_dd
->iclk
);
2767 tasklet_kill(&aes_dd
->done_task
);
2768 tasklet_kill(&aes_dd
->queue_task
);
2770 if (err
!= -EPROBE_DEFER
)
2771 dev_err(dev
, "initialization failed.\n");
2776 static int atmel_aes_remove(struct platform_device
*pdev
)
2778 struct atmel_aes_dev
*aes_dd
;
2780 aes_dd
= platform_get_drvdata(pdev
);
2783 spin_lock(&atmel_aes
.lock
);
2784 list_del(&aes_dd
->list
);
2785 spin_unlock(&atmel_aes
.lock
);
2787 atmel_aes_unregister_algs(aes_dd
);
2789 tasklet_kill(&aes_dd
->done_task
);
2790 tasklet_kill(&aes_dd
->queue_task
);
2792 atmel_aes_dma_cleanup(aes_dd
);
2793 atmel_aes_buff_cleanup(aes_dd
);
2795 clk_unprepare(aes_dd
->iclk
);
2800 static struct platform_driver atmel_aes_driver
= {
2801 .probe
= atmel_aes_probe
,
2802 .remove
= atmel_aes_remove
,
2804 .name
= "atmel_aes",
2805 .of_match_table
= of_match_ptr(atmel_aes_dt_ids
),
2809 module_platform_driver(atmel_aes_driver
);
2811 MODULE_DESCRIPTION("Atmel AES hw acceleration support.");
2812 MODULE_LICENSE("GPL v2");
2813 MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");