4 * Support for ATMEL AES HW acceleration.
6 * Copyright (c) 2012 Eukréa Electromatique - ATMEL
7 * Author: Nicolas Royer <nicolas@eukrea.com>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License version 2 as published
11 * by the Free Software Foundation.
13 * Some ideas are from omap-aes.c driver.
17 #include <linux/kernel.h>
18 #include <linux/module.h>
19 #include <linux/slab.h>
20 #include <linux/err.h>
21 #include <linux/clk.h>
23 #include <linux/hw_random.h>
24 #include <linux/platform_device.h>
26 #include <linux/device.h>
27 #include <linux/init.h>
28 #include <linux/errno.h>
29 #include <linux/interrupt.h>
30 #include <linux/irq.h>
31 #include <linux/scatterlist.h>
32 #include <linux/dma-mapping.h>
33 #include <linux/of_device.h>
34 #include <linux/delay.h>
35 #include <linux/crypto.h>
36 #include <crypto/scatterwalk.h>
37 #include <crypto/algapi.h>
38 #include <crypto/aes.h>
39 #include <crypto/internal/aead.h>
40 #include <linux/platform_data/crypto-atmel.h>
41 #include <dt-bindings/dma/at91.h>
42 #include "atmel-aes-regs.h"
44 #define ATMEL_AES_PRIORITY 300
46 #define ATMEL_AES_BUFFER_ORDER 2
47 #define ATMEL_AES_BUFFER_SIZE (PAGE_SIZE << ATMEL_AES_BUFFER_ORDER)
49 #define CFB8_BLOCK_SIZE 1
50 #define CFB16_BLOCK_SIZE 2
51 #define CFB32_BLOCK_SIZE 4
52 #define CFB64_BLOCK_SIZE 8
54 #define SIZE_IN_WORDS(x) ((x) >> 2)
57 /* Reserve bits [18:16] [14:12] [1:0] for mode (same as for AES_MR) */
58 #define AES_FLAGS_ENCRYPT AES_MR_CYPHER_ENC
59 #define AES_FLAGS_GTAGEN AES_MR_GTAGEN
60 #define AES_FLAGS_OPMODE_MASK (AES_MR_OPMOD_MASK | AES_MR_CFBS_MASK)
61 #define AES_FLAGS_ECB AES_MR_OPMOD_ECB
62 #define AES_FLAGS_CBC AES_MR_OPMOD_CBC
63 #define AES_FLAGS_OFB AES_MR_OPMOD_OFB
64 #define AES_FLAGS_CFB128 (AES_MR_OPMOD_CFB | AES_MR_CFBS_128b)
65 #define AES_FLAGS_CFB64 (AES_MR_OPMOD_CFB | AES_MR_CFBS_64b)
66 #define AES_FLAGS_CFB32 (AES_MR_OPMOD_CFB | AES_MR_CFBS_32b)
67 #define AES_FLAGS_CFB16 (AES_MR_OPMOD_CFB | AES_MR_CFBS_16b)
68 #define AES_FLAGS_CFB8 (AES_MR_OPMOD_CFB | AES_MR_CFBS_8b)
69 #define AES_FLAGS_CTR AES_MR_OPMOD_CTR
70 #define AES_FLAGS_GCM AES_MR_OPMOD_GCM
72 #define AES_FLAGS_MODE_MASK (AES_FLAGS_OPMODE_MASK | \
76 #define AES_FLAGS_INIT BIT(2)
77 #define AES_FLAGS_BUSY BIT(3)
78 #define AES_FLAGS_DUMP_REG BIT(4)
80 #define AES_FLAGS_PERSISTENT (AES_FLAGS_INIT | AES_FLAGS_BUSY)
82 #define ATMEL_AES_QUEUE_LENGTH 50
84 #define ATMEL_AES_DMA_THRESHOLD 256
87 struct atmel_aes_caps
{
98 typedef int (*atmel_aes_fn_t
)(struct atmel_aes_dev
*);
101 struct atmel_aes_base_ctx
{
102 struct atmel_aes_dev
*dd
;
103 atmel_aes_fn_t start
;
105 u32 key
[AES_KEYSIZE_256
/ sizeof(u32
)];
109 struct atmel_aes_ctx
{
110 struct atmel_aes_base_ctx base
;
113 struct atmel_aes_ctr_ctx
{
114 struct atmel_aes_base_ctx base
;
116 u32 iv
[AES_BLOCK_SIZE
/ sizeof(u32
)];
118 struct scatterlist src
[2];
119 struct scatterlist dst
[2];
122 struct atmel_aes_gcm_ctx
{
123 struct atmel_aes_base_ctx base
;
125 struct scatterlist src
[2];
126 struct scatterlist dst
[2];
128 u32 j0
[AES_BLOCK_SIZE
/ sizeof(u32
)];
129 u32 tag
[AES_BLOCK_SIZE
/ sizeof(u32
)];
130 u32 ghash
[AES_BLOCK_SIZE
/ sizeof(u32
)];
135 atmel_aes_fn_t ghash_resume
;
138 struct atmel_aes_reqctx
{
142 struct atmel_aes_dma
{
143 struct dma_chan
*chan
;
144 struct scatterlist
*sg
;
146 unsigned int remainder
;
150 struct atmel_aes_dev
{
151 struct list_head list
;
152 unsigned long phys_base
;
153 void __iomem
*io_base
;
155 struct crypto_async_request
*areq
;
156 struct atmel_aes_base_ctx
*ctx
;
159 atmel_aes_fn_t resume
;
160 atmel_aes_fn_t cpu_transfer_complete
;
169 struct crypto_queue queue
;
171 struct tasklet_struct done_task
;
172 struct tasklet_struct queue_task
;
178 struct atmel_aes_dma src
;
179 struct atmel_aes_dma dst
;
183 struct scatterlist aligned_sg
;
184 struct scatterlist
*real_dst
;
186 struct atmel_aes_caps caps
;
191 struct atmel_aes_drv
{
192 struct list_head dev_list
;
196 static struct atmel_aes_drv atmel_aes
= {
197 .dev_list
= LIST_HEAD_INIT(atmel_aes
.dev_list
),
198 .lock
= __SPIN_LOCK_UNLOCKED(atmel_aes
.lock
),
202 static const char *atmel_aes_reg_name(u32 offset
, char *tmp
, size_t sz
)
231 snprintf(tmp
, sz
, "KEYWR[%u]", (offset
- AES_KEYWR(0)) >> 2);
238 snprintf(tmp
, sz
, "IDATAR[%u]", (offset
- AES_IDATAR(0)) >> 2);
245 snprintf(tmp
, sz
, "ODATAR[%u]", (offset
- AES_ODATAR(0)) >> 2);
252 snprintf(tmp
, sz
, "IVR[%u]", (offset
- AES_IVR(0)) >> 2);
265 snprintf(tmp
, sz
, "GHASHR[%u]", (offset
- AES_GHASHR(0)) >> 2);
272 snprintf(tmp
, sz
, "TAGR[%u]", (offset
- AES_TAGR(0)) >> 2);
282 snprintf(tmp
, sz
, "GCMHR[%u]", (offset
- AES_GCMHR(0)) >> 2);
286 snprintf(tmp
, sz
, "0x%02x", offset
);
292 #endif /* VERBOSE_DEBUG */
294 /* Shared functions */
296 static inline u32
atmel_aes_read(struct atmel_aes_dev
*dd
, u32 offset
)
298 u32 value
= readl_relaxed(dd
->io_base
+ offset
);
301 if (dd
->flags
& AES_FLAGS_DUMP_REG
) {
304 dev_vdbg(dd
->dev
, "read 0x%08x from %s\n", value
,
305 atmel_aes_reg_name(offset
, tmp
, sizeof(tmp
)));
307 #endif /* VERBOSE_DEBUG */
312 static inline void atmel_aes_write(struct atmel_aes_dev
*dd
,
313 u32 offset
, u32 value
)
316 if (dd
->flags
& AES_FLAGS_DUMP_REG
) {
319 dev_vdbg(dd
->dev
, "write 0x%08x into %s\n", value
,
320 atmel_aes_reg_name(offset
, tmp
));
322 #endif /* VERBOSE_DEBUG */
324 writel_relaxed(value
, dd
->io_base
+ offset
);
327 static void atmel_aes_read_n(struct atmel_aes_dev
*dd
, u32 offset
,
328 u32
*value
, int count
)
330 for (; count
--; value
++, offset
+= 4)
331 *value
= atmel_aes_read(dd
, offset
);
334 static void atmel_aes_write_n(struct atmel_aes_dev
*dd
, u32 offset
,
335 const u32
*value
, int count
)
337 for (; count
--; value
++, offset
+= 4)
338 atmel_aes_write(dd
, offset
, *value
);
341 static inline void atmel_aes_read_block(struct atmel_aes_dev
*dd
, u32 offset
,
344 atmel_aes_read_n(dd
, offset
, value
, SIZE_IN_WORDS(AES_BLOCK_SIZE
));
347 static inline void atmel_aes_write_block(struct atmel_aes_dev
*dd
, u32 offset
,
350 atmel_aes_write_n(dd
, offset
, value
, SIZE_IN_WORDS(AES_BLOCK_SIZE
));
353 static inline int atmel_aes_wait_for_data_ready(struct atmel_aes_dev
*dd
,
354 atmel_aes_fn_t resume
)
356 u32 isr
= atmel_aes_read(dd
, AES_ISR
);
358 if (unlikely(isr
& AES_INT_DATARDY
))
362 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
366 static inline size_t atmel_aes_padlen(size_t len
, size_t block_size
)
368 len
&= block_size
- 1;
369 return len
? block_size
- len
: 0;
372 static inline struct aead_request
*
373 aead_request_cast(struct crypto_async_request
*req
)
375 return container_of(req
, struct aead_request
, base
);
378 static struct atmel_aes_dev
*atmel_aes_find_dev(struct atmel_aes_base_ctx
*ctx
)
380 struct atmel_aes_dev
*aes_dd
= NULL
;
381 struct atmel_aes_dev
*tmp
;
383 spin_lock_bh(&atmel_aes
.lock
);
385 list_for_each_entry(tmp
, &atmel_aes
.dev_list
, list
) {
394 spin_unlock_bh(&atmel_aes
.lock
);
399 static int atmel_aes_hw_init(struct atmel_aes_dev
*dd
)
403 err
= clk_enable(dd
->iclk
);
407 if (!(dd
->flags
& AES_FLAGS_INIT
)) {
408 atmel_aes_write(dd
, AES_CR
, AES_CR_SWRST
);
409 atmel_aes_write(dd
, AES_MR
, 0xE << AES_MR_CKEY_OFFSET
);
410 dd
->flags
|= AES_FLAGS_INIT
;
416 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev
*dd
)
418 return atmel_aes_read(dd
, AES_HW_VERSION
) & 0x00000fff;
421 static int atmel_aes_hw_version_init(struct atmel_aes_dev
*dd
)
425 err
= atmel_aes_hw_init(dd
);
429 dd
->hw_version
= atmel_aes_get_version(dd
);
431 dev_info(dd
->dev
, "version: 0x%x\n", dd
->hw_version
);
433 clk_disable(dd
->iclk
);
437 static inline void atmel_aes_set_mode(struct atmel_aes_dev
*dd
,
438 const struct atmel_aes_reqctx
*rctx
)
440 /* Clear all but persistent flags and set request flags. */
441 dd
->flags
= (dd
->flags
& AES_FLAGS_PERSISTENT
) | rctx
->mode
;
444 static inline bool atmel_aes_is_encrypt(const struct atmel_aes_dev
*dd
)
446 return (dd
->flags
& AES_FLAGS_ENCRYPT
);
449 static inline int atmel_aes_complete(struct atmel_aes_dev
*dd
, int err
)
451 clk_disable(dd
->iclk
);
452 dd
->flags
&= ~AES_FLAGS_BUSY
;
455 dd
->areq
->complete(dd
->areq
, err
);
457 tasklet_schedule(&dd
->queue_task
);
462 static void atmel_aes_write_ctrl(struct atmel_aes_dev
*dd
, bool use_dma
,
467 /* MR register must be set before IV registers */
468 if (dd
->ctx
->keylen
== AES_KEYSIZE_128
)
469 valmr
|= AES_MR_KEYSIZE_128
;
470 else if (dd
->ctx
->keylen
== AES_KEYSIZE_192
)
471 valmr
|= AES_MR_KEYSIZE_192
;
473 valmr
|= AES_MR_KEYSIZE_256
;
475 valmr
|= dd
->flags
& AES_FLAGS_MODE_MASK
;
478 valmr
|= AES_MR_SMOD_IDATAR0
;
479 if (dd
->caps
.has_dualbuff
)
480 valmr
|= AES_MR_DUALBUFF
;
482 valmr
|= AES_MR_SMOD_AUTO
;
485 atmel_aes_write(dd
, AES_MR
, valmr
);
487 atmel_aes_write_n(dd
, AES_KEYWR(0), dd
->ctx
->key
,
488 SIZE_IN_WORDS(dd
->ctx
->keylen
));
490 if (iv
&& (valmr
& AES_MR_OPMOD_MASK
) != AES_MR_OPMOD_ECB
)
491 atmel_aes_write_block(dd
, AES_IVR(0), iv
);
497 static int atmel_aes_cpu_transfer(struct atmel_aes_dev
*dd
)
503 atmel_aes_read_block(dd
, AES_ODATAR(0), dd
->data
);
505 dd
->datalen
-= AES_BLOCK_SIZE
;
507 if (dd
->datalen
< AES_BLOCK_SIZE
)
510 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
512 isr
= atmel_aes_read(dd
, AES_ISR
);
513 if (!(isr
& AES_INT_DATARDY
)) {
514 dd
->resume
= atmel_aes_cpu_transfer
;
515 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
520 if (!sg_copy_from_buffer(dd
->real_dst
, sg_nents(dd
->real_dst
),
525 return atmel_aes_complete(dd
, err
);
527 return dd
->cpu_transfer_complete(dd
);
530 static int atmel_aes_cpu_start(struct atmel_aes_dev
*dd
,
531 struct scatterlist
*src
,
532 struct scatterlist
*dst
,
534 atmel_aes_fn_t resume
)
536 size_t padlen
= atmel_aes_padlen(len
, AES_BLOCK_SIZE
);
538 if (unlikely(len
== 0))
541 sg_copy_to_buffer(src
, sg_nents(src
), dd
->buf
, len
);
545 dd
->cpu_transfer_complete
= resume
;
546 dd
->datalen
= len
+ padlen
;
547 dd
->data
= (u32
*)dd
->buf
;
548 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
549 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_cpu_transfer
);
555 static void atmel_aes_dma_callback(void *data
);
557 static bool atmel_aes_check_aligned(struct atmel_aes_dev
*dd
,
558 struct scatterlist
*sg
,
560 struct atmel_aes_dma
*dma
)
564 if (!IS_ALIGNED(len
, dd
->ctx
->block_size
))
567 for (nents
= 0; sg
; sg
= sg_next(sg
), ++nents
) {
568 if (!IS_ALIGNED(sg
->offset
, sizeof(u32
)))
571 if (len
<= sg
->length
) {
572 if (!IS_ALIGNED(len
, dd
->ctx
->block_size
))
575 dma
->nents
= nents
+1;
576 dma
->remainder
= sg
->length
- len
;
581 if (!IS_ALIGNED(sg
->length
, dd
->ctx
->block_size
))
590 static inline void atmel_aes_restore_sg(const struct atmel_aes_dma
*dma
)
592 struct scatterlist
*sg
= dma
->sg
;
593 int nents
= dma
->nents
;
598 while (--nents
> 0 && sg
)
604 sg
->length
+= dma
->remainder
;
607 static int atmel_aes_map(struct atmel_aes_dev
*dd
,
608 struct scatterlist
*src
,
609 struct scatterlist
*dst
,
612 bool src_aligned
, dst_aligned
;
620 src_aligned
= atmel_aes_check_aligned(dd
, src
, len
, &dd
->src
);
622 dst_aligned
= src_aligned
;
624 dst_aligned
= atmel_aes_check_aligned(dd
, dst
, len
, &dd
->dst
);
625 if (!src_aligned
|| !dst_aligned
) {
626 padlen
= atmel_aes_padlen(len
, dd
->ctx
->block_size
);
628 if (dd
->buflen
< len
+ padlen
)
632 sg_copy_to_buffer(src
, sg_nents(src
), dd
->buf
, len
);
633 dd
->src
.sg
= &dd
->aligned_sg
;
635 dd
->src
.remainder
= 0;
639 dd
->dst
.sg
= &dd
->aligned_sg
;
641 dd
->dst
.remainder
= 0;
644 sg_init_table(&dd
->aligned_sg
, 1);
645 sg_set_buf(&dd
->aligned_sg
, dd
->buf
, len
+ padlen
);
648 if (dd
->src
.sg
== dd
->dst
.sg
) {
649 dd
->src
.sg_len
= dma_map_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
651 dd
->dst
.sg_len
= dd
->src
.sg_len
;
655 dd
->src
.sg_len
= dma_map_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
660 dd
->dst
.sg_len
= dma_map_sg(dd
->dev
, dd
->dst
.sg
, dd
->dst
.nents
,
662 if (!dd
->dst
.sg_len
) {
663 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
672 static void atmel_aes_unmap(struct atmel_aes_dev
*dd
)
674 if (dd
->src
.sg
== dd
->dst
.sg
) {
675 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
678 if (dd
->src
.sg
!= &dd
->aligned_sg
)
679 atmel_aes_restore_sg(&dd
->src
);
681 dma_unmap_sg(dd
->dev
, dd
->dst
.sg
, dd
->dst
.nents
,
684 if (dd
->dst
.sg
!= &dd
->aligned_sg
)
685 atmel_aes_restore_sg(&dd
->dst
);
687 dma_unmap_sg(dd
->dev
, dd
->src
.sg
, dd
->src
.nents
,
690 if (dd
->src
.sg
!= &dd
->aligned_sg
)
691 atmel_aes_restore_sg(&dd
->src
);
694 if (dd
->dst
.sg
== &dd
->aligned_sg
)
695 sg_copy_from_buffer(dd
->real_dst
, sg_nents(dd
->real_dst
),
699 static int atmel_aes_dma_transfer_start(struct atmel_aes_dev
*dd
,
700 enum dma_slave_buswidth addr_width
,
701 enum dma_transfer_direction dir
,
704 struct dma_async_tx_descriptor
*desc
;
705 struct dma_slave_config config
;
706 dma_async_tx_callback callback
;
707 struct atmel_aes_dma
*dma
;
710 memset(&config
, 0, sizeof(config
));
711 config
.direction
= dir
;
712 config
.src_addr_width
= addr_width
;
713 config
.dst_addr_width
= addr_width
;
714 config
.src_maxburst
= maxburst
;
715 config
.dst_maxburst
= maxburst
;
721 config
.dst_addr
= dd
->phys_base
+ AES_IDATAR(0);
726 callback
= atmel_aes_dma_callback
;
727 config
.src_addr
= dd
->phys_base
+ AES_ODATAR(0);
734 err
= dmaengine_slave_config(dma
->chan
, &config
);
738 desc
= dmaengine_prep_slave_sg(dma
->chan
, dma
->sg
, dma
->sg_len
, dir
,
739 DMA_PREP_INTERRUPT
| DMA_CTRL_ACK
);
743 desc
->callback
= callback
;
744 desc
->callback_param
= dd
;
745 dmaengine_submit(desc
);
746 dma_async_issue_pending(dma
->chan
);
751 static void atmel_aes_dma_transfer_stop(struct atmel_aes_dev
*dd
,
752 enum dma_transfer_direction dir
)
754 struct atmel_aes_dma
*dma
;
769 dmaengine_terminate_all(dma
->chan
);
772 static int atmel_aes_dma_start(struct atmel_aes_dev
*dd
,
773 struct scatterlist
*src
,
774 struct scatterlist
*dst
,
776 atmel_aes_fn_t resume
)
778 enum dma_slave_buswidth addr_width
;
782 switch (dd
->ctx
->block_size
) {
783 case CFB8_BLOCK_SIZE
:
784 addr_width
= DMA_SLAVE_BUSWIDTH_1_BYTE
;
788 case CFB16_BLOCK_SIZE
:
789 addr_width
= DMA_SLAVE_BUSWIDTH_2_BYTES
;
793 case CFB32_BLOCK_SIZE
:
794 case CFB64_BLOCK_SIZE
:
795 addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
800 addr_width
= DMA_SLAVE_BUSWIDTH_4_BYTES
;
801 maxburst
= dd
->caps
.max_burst_size
;
809 err
= atmel_aes_map(dd
, src
, dst
, len
);
815 /* Set output DMA transfer first */
816 err
= atmel_aes_dma_transfer_start(dd
, addr_width
, DMA_DEV_TO_MEM
,
821 /* Then set input DMA transfer */
822 err
= atmel_aes_dma_transfer_start(dd
, addr_width
, DMA_MEM_TO_DEV
,
825 goto output_transfer_stop
;
829 output_transfer_stop
:
830 atmel_aes_dma_transfer_stop(dd
, DMA_DEV_TO_MEM
);
834 return atmel_aes_complete(dd
, err
);
837 static void atmel_aes_dma_stop(struct atmel_aes_dev
*dd
)
839 atmel_aes_dma_transfer_stop(dd
, DMA_MEM_TO_DEV
);
840 atmel_aes_dma_transfer_stop(dd
, DMA_DEV_TO_MEM
);
844 static void atmel_aes_dma_callback(void *data
)
846 struct atmel_aes_dev
*dd
= data
;
848 atmel_aes_dma_stop(dd
);
850 (void)dd
->resume(dd
);
853 static int atmel_aes_handle_queue(struct atmel_aes_dev
*dd
,
854 struct crypto_async_request
*new_areq
)
856 struct crypto_async_request
*areq
, *backlog
;
857 struct atmel_aes_base_ctx
*ctx
;
861 spin_lock_irqsave(&dd
->lock
, flags
);
863 ret
= crypto_enqueue_request(&dd
->queue
, new_areq
);
864 if (dd
->flags
& AES_FLAGS_BUSY
) {
865 spin_unlock_irqrestore(&dd
->lock
, flags
);
868 backlog
= crypto_get_backlog(&dd
->queue
);
869 areq
= crypto_dequeue_request(&dd
->queue
);
871 dd
->flags
|= AES_FLAGS_BUSY
;
872 spin_unlock_irqrestore(&dd
->lock
, flags
);
878 backlog
->complete(backlog
, -EINPROGRESS
);
880 ctx
= crypto_tfm_ctx(areq
->tfm
);
884 dd
->is_async
= (areq
!= new_areq
);
886 err
= ctx
->start(dd
);
887 return (dd
->is_async
) ? ret
: err
;
891 /* AES async block ciphers */
893 static int atmel_aes_transfer_complete(struct atmel_aes_dev
*dd
)
895 return atmel_aes_complete(dd
, 0);
898 static int atmel_aes_start(struct atmel_aes_dev
*dd
)
900 struct ablkcipher_request
*req
= ablkcipher_request_cast(dd
->areq
);
901 struct atmel_aes_reqctx
*rctx
= ablkcipher_request_ctx(req
);
902 bool use_dma
= (req
->nbytes
>= ATMEL_AES_DMA_THRESHOLD
||
903 dd
->ctx
->block_size
!= AES_BLOCK_SIZE
);
906 atmel_aes_set_mode(dd
, rctx
);
908 err
= atmel_aes_hw_init(dd
);
910 return atmel_aes_complete(dd
, err
);
912 atmel_aes_write_ctrl(dd
, use_dma
, req
->info
);
914 return atmel_aes_dma_start(dd
, req
->src
, req
->dst
, req
->nbytes
,
915 atmel_aes_transfer_complete
);
917 return atmel_aes_cpu_start(dd
, req
->src
, req
->dst
, req
->nbytes
,
918 atmel_aes_transfer_complete
);
921 static inline struct atmel_aes_ctr_ctx
*
922 atmel_aes_ctr_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
924 return container_of(ctx
, struct atmel_aes_ctr_ctx
, base
);
927 static int atmel_aes_ctr_transfer(struct atmel_aes_dev
*dd
)
929 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
930 struct ablkcipher_request
*req
= ablkcipher_request_cast(dd
->areq
);
931 struct scatterlist
*src
, *dst
;
934 bool use_dma
, fragmented
= false;
936 /* Check for transfer completion. */
937 ctx
->offset
+= dd
->total
;
938 if (ctx
->offset
>= req
->nbytes
)
939 return atmel_aes_transfer_complete(dd
);
941 /* Compute data length. */
942 datalen
= req
->nbytes
- ctx
->offset
;
943 blocks
= DIV_ROUND_UP(datalen
, AES_BLOCK_SIZE
);
944 ctr
= be32_to_cpu(ctx
->iv
[3]);
945 if (dd
->caps
.has_ctr32
) {
946 /* Check 32bit counter overflow. */
948 u32 end
= start
+ blocks
- 1;
952 datalen
= AES_BLOCK_SIZE
* -start
;
956 /* Check 16bit counter overflow. */
957 u16 start
= ctr
& 0xffff;
958 u16 end
= start
+ (u16
)blocks
- 1;
960 if (blocks
>> 16 || end
< start
) {
962 datalen
= AES_BLOCK_SIZE
* (0x10000-start
);
966 use_dma
= (datalen
>= ATMEL_AES_DMA_THRESHOLD
);
968 /* Jump to offset. */
969 src
= scatterwalk_ffwd(ctx
->src
, req
->src
, ctx
->offset
);
970 dst
= ((req
->src
== req
->dst
) ? src
:
971 scatterwalk_ffwd(ctx
->dst
, req
->dst
, ctx
->offset
));
973 /* Configure hardware. */
974 atmel_aes_write_ctrl(dd
, use_dma
, ctx
->iv
);
975 if (unlikely(fragmented
)) {
977 * Increment the counter manually to cope with the hardware
980 ctx
->iv
[3] = cpu_to_be32(ctr
);
981 crypto_inc((u8
*)ctx
->iv
, AES_BLOCK_SIZE
);
985 return atmel_aes_dma_start(dd
, src
, dst
, datalen
,
986 atmel_aes_ctr_transfer
);
988 return atmel_aes_cpu_start(dd
, src
, dst
, datalen
,
989 atmel_aes_ctr_transfer
);
992 static int atmel_aes_ctr_start(struct atmel_aes_dev
*dd
)
994 struct atmel_aes_ctr_ctx
*ctx
= atmel_aes_ctr_ctx_cast(dd
->ctx
);
995 struct ablkcipher_request
*req
= ablkcipher_request_cast(dd
->areq
);
996 struct atmel_aes_reqctx
*rctx
= ablkcipher_request_ctx(req
);
999 atmel_aes_set_mode(dd
, rctx
);
1001 err
= atmel_aes_hw_init(dd
);
1003 return atmel_aes_complete(dd
, err
);
1005 memcpy(ctx
->iv
, req
->info
, AES_BLOCK_SIZE
);
1008 return atmel_aes_ctr_transfer(dd
);
1011 static int atmel_aes_crypt(struct ablkcipher_request
*req
, unsigned long mode
)
1013 struct atmel_aes_base_ctx
*ctx
;
1014 struct atmel_aes_reqctx
*rctx
;
1015 struct atmel_aes_dev
*dd
;
1017 ctx
= crypto_ablkcipher_ctx(crypto_ablkcipher_reqtfm(req
));
1018 switch (mode
& AES_FLAGS_OPMODE_MASK
) {
1019 case AES_FLAGS_CFB8
:
1020 ctx
->block_size
= CFB8_BLOCK_SIZE
;
1023 case AES_FLAGS_CFB16
:
1024 ctx
->block_size
= CFB16_BLOCK_SIZE
;
1027 case AES_FLAGS_CFB32
:
1028 ctx
->block_size
= CFB32_BLOCK_SIZE
;
1031 case AES_FLAGS_CFB64
:
1032 ctx
->block_size
= CFB64_BLOCK_SIZE
;
1036 ctx
->block_size
= AES_BLOCK_SIZE
;
1040 dd
= atmel_aes_find_dev(ctx
);
1044 rctx
= ablkcipher_request_ctx(req
);
1047 return atmel_aes_handle_queue(dd
, &req
->base
);
1050 static int atmel_aes_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
1051 unsigned int keylen
)
1053 struct atmel_aes_base_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
1055 if (keylen
!= AES_KEYSIZE_128
&&
1056 keylen
!= AES_KEYSIZE_192
&&
1057 keylen
!= AES_KEYSIZE_256
) {
1058 crypto_ablkcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
1062 memcpy(ctx
->key
, key
, keylen
);
1063 ctx
->keylen
= keylen
;
1068 static int atmel_aes_ecb_encrypt(struct ablkcipher_request
*req
)
1070 return atmel_aes_crypt(req
, AES_FLAGS_ECB
| AES_FLAGS_ENCRYPT
);
1073 static int atmel_aes_ecb_decrypt(struct ablkcipher_request
*req
)
1075 return atmel_aes_crypt(req
, AES_FLAGS_ECB
);
1078 static int atmel_aes_cbc_encrypt(struct ablkcipher_request
*req
)
1080 return atmel_aes_crypt(req
, AES_FLAGS_CBC
| AES_FLAGS_ENCRYPT
);
1083 static int atmel_aes_cbc_decrypt(struct ablkcipher_request
*req
)
1085 return atmel_aes_crypt(req
, AES_FLAGS_CBC
);
1088 static int atmel_aes_ofb_encrypt(struct ablkcipher_request
*req
)
1090 return atmel_aes_crypt(req
, AES_FLAGS_OFB
| AES_FLAGS_ENCRYPT
);
1093 static int atmel_aes_ofb_decrypt(struct ablkcipher_request
*req
)
1095 return atmel_aes_crypt(req
, AES_FLAGS_OFB
);
1098 static int atmel_aes_cfb_encrypt(struct ablkcipher_request
*req
)
1100 return atmel_aes_crypt(req
, AES_FLAGS_CFB128
| AES_FLAGS_ENCRYPT
);
1103 static int atmel_aes_cfb_decrypt(struct ablkcipher_request
*req
)
1105 return atmel_aes_crypt(req
, AES_FLAGS_CFB128
);
1108 static int atmel_aes_cfb64_encrypt(struct ablkcipher_request
*req
)
1110 return atmel_aes_crypt(req
, AES_FLAGS_CFB64
| AES_FLAGS_ENCRYPT
);
1113 static int atmel_aes_cfb64_decrypt(struct ablkcipher_request
*req
)
1115 return atmel_aes_crypt(req
, AES_FLAGS_CFB64
);
1118 static int atmel_aes_cfb32_encrypt(struct ablkcipher_request
*req
)
1120 return atmel_aes_crypt(req
, AES_FLAGS_CFB32
| AES_FLAGS_ENCRYPT
);
1123 static int atmel_aes_cfb32_decrypt(struct ablkcipher_request
*req
)
1125 return atmel_aes_crypt(req
, AES_FLAGS_CFB32
);
1128 static int atmel_aes_cfb16_encrypt(struct ablkcipher_request
*req
)
1130 return atmel_aes_crypt(req
, AES_FLAGS_CFB16
| AES_FLAGS_ENCRYPT
);
1133 static int atmel_aes_cfb16_decrypt(struct ablkcipher_request
*req
)
1135 return atmel_aes_crypt(req
, AES_FLAGS_CFB16
);
1138 static int atmel_aes_cfb8_encrypt(struct ablkcipher_request
*req
)
1140 return atmel_aes_crypt(req
, AES_FLAGS_CFB8
| AES_FLAGS_ENCRYPT
);
1143 static int atmel_aes_cfb8_decrypt(struct ablkcipher_request
*req
)
1145 return atmel_aes_crypt(req
, AES_FLAGS_CFB8
);
1148 static int atmel_aes_ctr_encrypt(struct ablkcipher_request
*req
)
1150 return atmel_aes_crypt(req
, AES_FLAGS_CTR
| AES_FLAGS_ENCRYPT
);
1153 static int atmel_aes_ctr_decrypt(struct ablkcipher_request
*req
)
1155 return atmel_aes_crypt(req
, AES_FLAGS_CTR
);
1158 static int atmel_aes_cra_init(struct crypto_tfm
*tfm
)
1160 struct atmel_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1162 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct atmel_aes_reqctx
);
1163 ctx
->base
.start
= atmel_aes_start
;
1168 static int atmel_aes_ctr_cra_init(struct crypto_tfm
*tfm
)
1170 struct atmel_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1172 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct atmel_aes_reqctx
);
1173 ctx
->base
.start
= atmel_aes_ctr_start
;
1178 static void atmel_aes_cra_exit(struct crypto_tfm
*tfm
)
1182 static struct crypto_alg aes_algs
[] = {
1184 .cra_name
= "ecb(aes)",
1185 .cra_driver_name
= "atmel-ecb-aes",
1186 .cra_priority
= ATMEL_AES_PRIORITY
,
1187 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1188 .cra_blocksize
= AES_BLOCK_SIZE
,
1189 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1190 .cra_alignmask
= 0xf,
1191 .cra_type
= &crypto_ablkcipher_type
,
1192 .cra_module
= THIS_MODULE
,
1193 .cra_init
= atmel_aes_cra_init
,
1194 .cra_exit
= atmel_aes_cra_exit
,
1195 .cra_u
.ablkcipher
= {
1196 .min_keysize
= AES_MIN_KEY_SIZE
,
1197 .max_keysize
= AES_MAX_KEY_SIZE
,
1198 .setkey
= atmel_aes_setkey
,
1199 .encrypt
= atmel_aes_ecb_encrypt
,
1200 .decrypt
= atmel_aes_ecb_decrypt
,
1204 .cra_name
= "cbc(aes)",
1205 .cra_driver_name
= "atmel-cbc-aes",
1206 .cra_priority
= ATMEL_AES_PRIORITY
,
1207 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1208 .cra_blocksize
= AES_BLOCK_SIZE
,
1209 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1210 .cra_alignmask
= 0xf,
1211 .cra_type
= &crypto_ablkcipher_type
,
1212 .cra_module
= THIS_MODULE
,
1213 .cra_init
= atmel_aes_cra_init
,
1214 .cra_exit
= atmel_aes_cra_exit
,
1215 .cra_u
.ablkcipher
= {
1216 .min_keysize
= AES_MIN_KEY_SIZE
,
1217 .max_keysize
= AES_MAX_KEY_SIZE
,
1218 .ivsize
= AES_BLOCK_SIZE
,
1219 .setkey
= atmel_aes_setkey
,
1220 .encrypt
= atmel_aes_cbc_encrypt
,
1221 .decrypt
= atmel_aes_cbc_decrypt
,
1225 .cra_name
= "ofb(aes)",
1226 .cra_driver_name
= "atmel-ofb-aes",
1227 .cra_priority
= ATMEL_AES_PRIORITY
,
1228 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1229 .cra_blocksize
= AES_BLOCK_SIZE
,
1230 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1231 .cra_alignmask
= 0xf,
1232 .cra_type
= &crypto_ablkcipher_type
,
1233 .cra_module
= THIS_MODULE
,
1234 .cra_init
= atmel_aes_cra_init
,
1235 .cra_exit
= atmel_aes_cra_exit
,
1236 .cra_u
.ablkcipher
= {
1237 .min_keysize
= AES_MIN_KEY_SIZE
,
1238 .max_keysize
= AES_MAX_KEY_SIZE
,
1239 .ivsize
= AES_BLOCK_SIZE
,
1240 .setkey
= atmel_aes_setkey
,
1241 .encrypt
= atmel_aes_ofb_encrypt
,
1242 .decrypt
= atmel_aes_ofb_decrypt
,
1246 .cra_name
= "cfb(aes)",
1247 .cra_driver_name
= "atmel-cfb-aes",
1248 .cra_priority
= ATMEL_AES_PRIORITY
,
1249 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1250 .cra_blocksize
= AES_BLOCK_SIZE
,
1251 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1252 .cra_alignmask
= 0xf,
1253 .cra_type
= &crypto_ablkcipher_type
,
1254 .cra_module
= THIS_MODULE
,
1255 .cra_init
= atmel_aes_cra_init
,
1256 .cra_exit
= atmel_aes_cra_exit
,
1257 .cra_u
.ablkcipher
= {
1258 .min_keysize
= AES_MIN_KEY_SIZE
,
1259 .max_keysize
= AES_MAX_KEY_SIZE
,
1260 .ivsize
= AES_BLOCK_SIZE
,
1261 .setkey
= atmel_aes_setkey
,
1262 .encrypt
= atmel_aes_cfb_encrypt
,
1263 .decrypt
= atmel_aes_cfb_decrypt
,
1267 .cra_name
= "cfb32(aes)",
1268 .cra_driver_name
= "atmel-cfb32-aes",
1269 .cra_priority
= ATMEL_AES_PRIORITY
,
1270 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1271 .cra_blocksize
= CFB32_BLOCK_SIZE
,
1272 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1273 .cra_alignmask
= 0x3,
1274 .cra_type
= &crypto_ablkcipher_type
,
1275 .cra_module
= THIS_MODULE
,
1276 .cra_init
= atmel_aes_cra_init
,
1277 .cra_exit
= atmel_aes_cra_exit
,
1278 .cra_u
.ablkcipher
= {
1279 .min_keysize
= AES_MIN_KEY_SIZE
,
1280 .max_keysize
= AES_MAX_KEY_SIZE
,
1281 .ivsize
= AES_BLOCK_SIZE
,
1282 .setkey
= atmel_aes_setkey
,
1283 .encrypt
= atmel_aes_cfb32_encrypt
,
1284 .decrypt
= atmel_aes_cfb32_decrypt
,
1288 .cra_name
= "cfb16(aes)",
1289 .cra_driver_name
= "atmel-cfb16-aes",
1290 .cra_priority
= ATMEL_AES_PRIORITY
,
1291 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1292 .cra_blocksize
= CFB16_BLOCK_SIZE
,
1293 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1294 .cra_alignmask
= 0x1,
1295 .cra_type
= &crypto_ablkcipher_type
,
1296 .cra_module
= THIS_MODULE
,
1297 .cra_init
= atmel_aes_cra_init
,
1298 .cra_exit
= atmel_aes_cra_exit
,
1299 .cra_u
.ablkcipher
= {
1300 .min_keysize
= AES_MIN_KEY_SIZE
,
1301 .max_keysize
= AES_MAX_KEY_SIZE
,
1302 .ivsize
= AES_BLOCK_SIZE
,
1303 .setkey
= atmel_aes_setkey
,
1304 .encrypt
= atmel_aes_cfb16_encrypt
,
1305 .decrypt
= atmel_aes_cfb16_decrypt
,
1309 .cra_name
= "cfb8(aes)",
1310 .cra_driver_name
= "atmel-cfb8-aes",
1311 .cra_priority
= ATMEL_AES_PRIORITY
,
1312 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1313 .cra_blocksize
= CFB8_BLOCK_SIZE
,
1314 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1315 .cra_alignmask
= 0x0,
1316 .cra_type
= &crypto_ablkcipher_type
,
1317 .cra_module
= THIS_MODULE
,
1318 .cra_init
= atmel_aes_cra_init
,
1319 .cra_exit
= atmel_aes_cra_exit
,
1320 .cra_u
.ablkcipher
= {
1321 .min_keysize
= AES_MIN_KEY_SIZE
,
1322 .max_keysize
= AES_MAX_KEY_SIZE
,
1323 .ivsize
= AES_BLOCK_SIZE
,
1324 .setkey
= atmel_aes_setkey
,
1325 .encrypt
= atmel_aes_cfb8_encrypt
,
1326 .decrypt
= atmel_aes_cfb8_decrypt
,
1330 .cra_name
= "ctr(aes)",
1331 .cra_driver_name
= "atmel-ctr-aes",
1332 .cra_priority
= ATMEL_AES_PRIORITY
,
1333 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1335 .cra_ctxsize
= sizeof(struct atmel_aes_ctr_ctx
),
1336 .cra_alignmask
= 0xf,
1337 .cra_type
= &crypto_ablkcipher_type
,
1338 .cra_module
= THIS_MODULE
,
1339 .cra_init
= atmel_aes_ctr_cra_init
,
1340 .cra_exit
= atmel_aes_cra_exit
,
1341 .cra_u
.ablkcipher
= {
1342 .min_keysize
= AES_MIN_KEY_SIZE
,
1343 .max_keysize
= AES_MAX_KEY_SIZE
,
1344 .ivsize
= AES_BLOCK_SIZE
,
1345 .setkey
= atmel_aes_setkey
,
1346 .encrypt
= atmel_aes_ctr_encrypt
,
1347 .decrypt
= atmel_aes_ctr_decrypt
,
1352 static struct crypto_alg aes_cfb64_alg
= {
1353 .cra_name
= "cfb64(aes)",
1354 .cra_driver_name
= "atmel-cfb64-aes",
1355 .cra_priority
= ATMEL_AES_PRIORITY
,
1356 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1357 .cra_blocksize
= CFB64_BLOCK_SIZE
,
1358 .cra_ctxsize
= sizeof(struct atmel_aes_ctx
),
1359 .cra_alignmask
= 0x7,
1360 .cra_type
= &crypto_ablkcipher_type
,
1361 .cra_module
= THIS_MODULE
,
1362 .cra_init
= atmel_aes_cra_init
,
1363 .cra_exit
= atmel_aes_cra_exit
,
1364 .cra_u
.ablkcipher
= {
1365 .min_keysize
= AES_MIN_KEY_SIZE
,
1366 .max_keysize
= AES_MAX_KEY_SIZE
,
1367 .ivsize
= AES_BLOCK_SIZE
,
1368 .setkey
= atmel_aes_setkey
,
1369 .encrypt
= atmel_aes_cfb64_encrypt
,
1370 .decrypt
= atmel_aes_cfb64_decrypt
,
1375 /* gcm aead functions */
1377 static int atmel_aes_gcm_ghash(struct atmel_aes_dev
*dd
,
1378 const u32
*data
, size_t datalen
,
1379 const u32
*ghash_in
, u32
*ghash_out
,
1380 atmel_aes_fn_t resume
);
1381 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev
*dd
);
1382 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev
*dd
);
1384 static int atmel_aes_gcm_start(struct atmel_aes_dev
*dd
);
1385 static int atmel_aes_gcm_process(struct atmel_aes_dev
*dd
);
1386 static int atmel_aes_gcm_length(struct atmel_aes_dev
*dd
);
1387 static int atmel_aes_gcm_data(struct atmel_aes_dev
*dd
);
1388 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev
*dd
);
1389 static int atmel_aes_gcm_tag(struct atmel_aes_dev
*dd
);
1390 static int atmel_aes_gcm_finalize(struct atmel_aes_dev
*dd
);
1392 static inline struct atmel_aes_gcm_ctx
*
1393 atmel_aes_gcm_ctx_cast(struct atmel_aes_base_ctx
*ctx
)
1395 return container_of(ctx
, struct atmel_aes_gcm_ctx
, base
);
1398 static int atmel_aes_gcm_ghash(struct atmel_aes_dev
*dd
,
1399 const u32
*data
, size_t datalen
,
1400 const u32
*ghash_in
, u32
*ghash_out
,
1401 atmel_aes_fn_t resume
)
1403 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1405 dd
->data
= (u32
*)data
;
1406 dd
->datalen
= datalen
;
1407 ctx
->ghash_in
= ghash_in
;
1408 ctx
->ghash_out
= ghash_out
;
1409 ctx
->ghash_resume
= resume
;
1411 atmel_aes_write_ctrl(dd
, false, NULL
);
1412 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_ghash_init
);
1415 static int atmel_aes_gcm_ghash_init(struct atmel_aes_dev
*dd
)
1417 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1419 /* Set the data length. */
1420 atmel_aes_write(dd
, AES_AADLENR
, dd
->total
);
1421 atmel_aes_write(dd
, AES_CLENR
, 0);
1423 /* If needed, overwrite the GCM Intermediate Hash Word Registers */
1425 atmel_aes_write_block(dd
, AES_GHASHR(0), ctx
->ghash_in
);
1427 return atmel_aes_gcm_ghash_finalize(dd
);
1430 static int atmel_aes_gcm_ghash_finalize(struct atmel_aes_dev
*dd
)
1432 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1435 /* Write data into the Input Data Registers. */
1436 while (dd
->datalen
> 0) {
1437 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
1439 dd
->datalen
-= AES_BLOCK_SIZE
;
1441 isr
= atmel_aes_read(dd
, AES_ISR
);
1442 if (!(isr
& AES_INT_DATARDY
)) {
1443 dd
->resume
= atmel_aes_gcm_ghash_finalize
;
1444 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
1445 return -EINPROGRESS
;
1449 /* Read the computed hash from GHASHRx. */
1450 atmel_aes_read_block(dd
, AES_GHASHR(0), ctx
->ghash_out
);
1452 return ctx
->ghash_resume(dd
);
1456 static int atmel_aes_gcm_start(struct atmel_aes_dev
*dd
)
1458 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1459 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1460 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1461 struct atmel_aes_reqctx
*rctx
= aead_request_ctx(req
);
1462 size_t ivsize
= crypto_aead_ivsize(tfm
);
1463 size_t datalen
, padlen
;
1464 const void *iv
= req
->iv
;
1468 atmel_aes_set_mode(dd
, rctx
);
1470 err
= atmel_aes_hw_init(dd
);
1472 return atmel_aes_complete(dd
, err
);
1474 if (likely(ivsize
== 12)) {
1475 memcpy(ctx
->j0
, iv
, ivsize
);
1476 ctx
->j0
[3] = cpu_to_be32(1);
1477 return atmel_aes_gcm_process(dd
);
1480 padlen
= atmel_aes_padlen(ivsize
, AES_BLOCK_SIZE
);
1481 datalen
= ivsize
+ padlen
+ AES_BLOCK_SIZE
;
1482 if (datalen
> dd
->buflen
)
1483 return atmel_aes_complete(dd
, -EINVAL
);
1485 memcpy(data
, iv
, ivsize
);
1486 memset(data
+ ivsize
, 0, padlen
+ sizeof(u64
));
1487 ((u64
*)(data
+ datalen
))[-1] = cpu_to_be64(ivsize
* 8);
1489 return atmel_aes_gcm_ghash(dd
, (const u32
*)data
, datalen
,
1490 NULL
, ctx
->j0
, atmel_aes_gcm_process
);
1493 static int atmel_aes_gcm_process(struct atmel_aes_dev
*dd
)
1495 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1496 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1497 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1498 bool enc
= atmel_aes_is_encrypt(dd
);
1501 /* Compute text length. */
1502 authsize
= crypto_aead_authsize(tfm
);
1503 ctx
->textlen
= req
->cryptlen
- (enc
? 0 : authsize
);
1506 * According to tcrypt test suite, the GCM Automatic Tag Generation
1507 * fails when both the message and its associated data are empty.
1509 if (likely(req
->assoclen
!= 0 || ctx
->textlen
!= 0))
1510 dd
->flags
|= AES_FLAGS_GTAGEN
;
1512 atmel_aes_write_ctrl(dd
, false, NULL
);
1513 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_length
);
1516 static int atmel_aes_gcm_length(struct atmel_aes_dev
*dd
)
1518 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1519 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1520 u32 j0_lsw
, *j0
= ctx
->j0
;
1523 /* Write incr32(J0) into IV. */
1525 j0
[3] = cpu_to_be32(be32_to_cpu(j0
[3]) + 1);
1526 atmel_aes_write_block(dd
, AES_IVR(0), j0
);
1529 /* Set aad and text lengths. */
1530 atmel_aes_write(dd
, AES_AADLENR
, req
->assoclen
);
1531 atmel_aes_write(dd
, AES_CLENR
, ctx
->textlen
);
1533 /* Check whether AAD are present. */
1534 if (unlikely(req
->assoclen
== 0)) {
1536 return atmel_aes_gcm_data(dd
);
1539 /* Copy assoc data and add padding. */
1540 padlen
= atmel_aes_padlen(req
->assoclen
, AES_BLOCK_SIZE
);
1541 if (unlikely(req
->assoclen
+ padlen
> dd
->buflen
))
1542 return atmel_aes_complete(dd
, -EINVAL
);
1543 sg_copy_to_buffer(req
->src
, sg_nents(req
->src
), dd
->buf
, req
->assoclen
);
1545 /* Write assoc data into the Input Data register. */
1546 dd
->data
= (u32
*)dd
->buf
;
1547 dd
->datalen
= req
->assoclen
+ padlen
;
1548 return atmel_aes_gcm_data(dd
);
1551 static int atmel_aes_gcm_data(struct atmel_aes_dev
*dd
)
1553 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1554 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1555 bool use_dma
= (ctx
->textlen
>= ATMEL_AES_DMA_THRESHOLD
);
1556 struct scatterlist
*src
, *dst
;
1559 /* Write AAD first. */
1560 while (dd
->datalen
> 0) {
1561 atmel_aes_write_block(dd
, AES_IDATAR(0), dd
->data
);
1563 dd
->datalen
-= AES_BLOCK_SIZE
;
1565 isr
= atmel_aes_read(dd
, AES_ISR
);
1566 if (!(isr
& AES_INT_DATARDY
)) {
1567 dd
->resume
= atmel_aes_gcm_data
;
1568 atmel_aes_write(dd
, AES_IER
, AES_INT_DATARDY
);
1569 return -EINPROGRESS
;
1574 if (unlikely(ctx
->textlen
== 0))
1575 return atmel_aes_gcm_tag_init(dd
);
1577 /* Prepare src and dst scatter lists to transfer cipher/plain texts */
1578 src
= scatterwalk_ffwd(ctx
->src
, req
->src
, req
->assoclen
);
1579 dst
= ((req
->src
== req
->dst
) ? src
:
1580 scatterwalk_ffwd(ctx
->dst
, req
->dst
, req
->assoclen
));
1583 /* Update the Mode Register for DMA transfers. */
1584 mr
= atmel_aes_read(dd
, AES_MR
);
1585 mr
&= ~(AES_MR_SMOD_MASK
| AES_MR_DUALBUFF
);
1586 mr
|= AES_MR_SMOD_IDATAR0
;
1587 if (dd
->caps
.has_dualbuff
)
1588 mr
|= AES_MR_DUALBUFF
;
1589 atmel_aes_write(dd
, AES_MR
, mr
);
1591 return atmel_aes_dma_start(dd
, src
, dst
, ctx
->textlen
,
1592 atmel_aes_gcm_tag_init
);
1595 return atmel_aes_cpu_start(dd
, src
, dst
, ctx
->textlen
,
1596 atmel_aes_gcm_tag_init
);
1599 static int atmel_aes_gcm_tag_init(struct atmel_aes_dev
*dd
)
1601 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1602 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1603 u64
*data
= dd
->buf
;
1605 if (likely(dd
->flags
& AES_FLAGS_GTAGEN
)) {
1606 if (!(atmel_aes_read(dd
, AES_ISR
) & AES_INT_TAGRDY
)) {
1607 dd
->resume
= atmel_aes_gcm_tag_init
;
1608 atmel_aes_write(dd
, AES_IER
, AES_INT_TAGRDY
);
1609 return -EINPROGRESS
;
1612 return atmel_aes_gcm_finalize(dd
);
1615 /* Read the GCM Intermediate Hash Word Registers. */
1616 atmel_aes_read_block(dd
, AES_GHASHR(0), ctx
->ghash
);
1618 data
[0] = cpu_to_be64(req
->assoclen
* 8);
1619 data
[1] = cpu_to_be64(ctx
->textlen
* 8);
1621 return atmel_aes_gcm_ghash(dd
, (const u32
*)data
, AES_BLOCK_SIZE
,
1622 ctx
->ghash
, ctx
->ghash
, atmel_aes_gcm_tag
);
1625 static int atmel_aes_gcm_tag(struct atmel_aes_dev
*dd
)
1627 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1628 unsigned long flags
;
1631 * Change mode to CTR to complete the tag generation.
1632 * Use J0 as Initialization Vector.
1635 dd
->flags
&= ~(AES_FLAGS_OPMODE_MASK
| AES_FLAGS_GTAGEN
);
1636 dd
->flags
|= AES_FLAGS_CTR
;
1637 atmel_aes_write_ctrl(dd
, false, ctx
->j0
);
1640 atmel_aes_write_block(dd
, AES_IDATAR(0), ctx
->ghash
);
1641 return atmel_aes_wait_for_data_ready(dd
, atmel_aes_gcm_finalize
);
1644 static int atmel_aes_gcm_finalize(struct atmel_aes_dev
*dd
)
1646 struct atmel_aes_gcm_ctx
*ctx
= atmel_aes_gcm_ctx_cast(dd
->ctx
);
1647 struct aead_request
*req
= aead_request_cast(dd
->areq
);
1648 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1649 bool enc
= atmel_aes_is_encrypt(dd
);
1650 u32 offset
, authsize
, itag
[4], *otag
= ctx
->tag
;
1653 /* Read the computed tag. */
1654 if (likely(dd
->flags
& AES_FLAGS_GTAGEN
))
1655 atmel_aes_read_block(dd
, AES_TAGR(0), ctx
->tag
);
1657 atmel_aes_read_block(dd
, AES_ODATAR(0), ctx
->tag
);
1659 offset
= req
->assoclen
+ ctx
->textlen
;
1660 authsize
= crypto_aead_authsize(tfm
);
1662 scatterwalk_map_and_copy(otag
, req
->dst
, offset
, authsize
, 1);
1665 scatterwalk_map_and_copy(itag
, req
->src
, offset
, authsize
, 0);
1666 err
= crypto_memneq(itag
, otag
, authsize
) ? -EBADMSG
: 0;
1669 return atmel_aes_complete(dd
, err
);
1672 static int atmel_aes_gcm_crypt(struct aead_request
*req
,
1675 struct atmel_aes_base_ctx
*ctx
;
1676 struct atmel_aes_reqctx
*rctx
;
1677 struct atmel_aes_dev
*dd
;
1679 ctx
= crypto_aead_ctx(crypto_aead_reqtfm(req
));
1680 ctx
->block_size
= AES_BLOCK_SIZE
;
1682 dd
= atmel_aes_find_dev(ctx
);
1686 rctx
= aead_request_ctx(req
);
1687 rctx
->mode
= AES_FLAGS_GCM
| mode
;
1689 return atmel_aes_handle_queue(dd
, &req
->base
);
1692 static int atmel_aes_gcm_setkey(struct crypto_aead
*tfm
, const u8
*key
,
1693 unsigned int keylen
)
1695 struct atmel_aes_base_ctx
*ctx
= crypto_aead_ctx(tfm
);
1697 if (keylen
!= AES_KEYSIZE_256
&&
1698 keylen
!= AES_KEYSIZE_192
&&
1699 keylen
!= AES_KEYSIZE_128
) {
1700 crypto_aead_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
1704 memcpy(ctx
->key
, key
, keylen
);
1705 ctx
->keylen
= keylen
;
1710 static int atmel_aes_gcm_setauthsize(struct crypto_aead
*tfm
,
1711 unsigned int authsize
)
1713 /* Same as crypto_gcm_authsize() from crypto/gcm.c */
1730 static int atmel_aes_gcm_encrypt(struct aead_request
*req
)
1732 return atmel_aes_gcm_crypt(req
, AES_FLAGS_ENCRYPT
);
1735 static int atmel_aes_gcm_decrypt(struct aead_request
*req
)
1737 return atmel_aes_gcm_crypt(req
, 0);
1740 static int atmel_aes_gcm_init(struct crypto_aead
*tfm
)
1742 struct atmel_aes_gcm_ctx
*ctx
= crypto_aead_ctx(tfm
);
1744 crypto_aead_set_reqsize(tfm
, sizeof(struct atmel_aes_reqctx
));
1745 ctx
->base
.start
= atmel_aes_gcm_start
;
1750 static void atmel_aes_gcm_exit(struct crypto_aead
*tfm
)
1755 static struct aead_alg aes_gcm_alg
= {
1756 .setkey
= atmel_aes_gcm_setkey
,
1757 .setauthsize
= atmel_aes_gcm_setauthsize
,
1758 .encrypt
= atmel_aes_gcm_encrypt
,
1759 .decrypt
= atmel_aes_gcm_decrypt
,
1760 .init
= atmel_aes_gcm_init
,
1761 .exit
= atmel_aes_gcm_exit
,
1763 .maxauthsize
= AES_BLOCK_SIZE
,
1766 .cra_name
= "gcm(aes)",
1767 .cra_driver_name
= "atmel-gcm-aes",
1768 .cra_priority
= ATMEL_AES_PRIORITY
,
1769 .cra_flags
= CRYPTO_ALG_ASYNC
,
1771 .cra_ctxsize
= sizeof(struct atmel_aes_gcm_ctx
),
1772 .cra_alignmask
= 0xf,
1773 .cra_module
= THIS_MODULE
,
1778 /* Probe functions */
1780 static int atmel_aes_buff_init(struct atmel_aes_dev
*dd
)
1782 dd
->buf
= (void *)__get_free_pages(GFP_KERNEL
, ATMEL_AES_BUFFER_ORDER
);
1783 dd
->buflen
= ATMEL_AES_BUFFER_SIZE
;
1784 dd
->buflen
&= ~(AES_BLOCK_SIZE
- 1);
1787 dev_err(dd
->dev
, "unable to alloc pages.\n");
1794 static void atmel_aes_buff_cleanup(struct atmel_aes_dev
*dd
)
1796 free_page((unsigned long)dd
->buf
);
1799 static bool atmel_aes_filter(struct dma_chan
*chan
, void *slave
)
1801 struct at_dma_slave
*sl
= slave
;
1803 if (sl
&& sl
->dma_dev
== chan
->device
->dev
) {
1811 static int atmel_aes_dma_init(struct atmel_aes_dev
*dd
,
1812 struct crypto_platform_data
*pdata
)
1814 struct at_dma_slave
*slave
;
1816 dma_cap_mask_t mask
;
1819 dma_cap_set(DMA_SLAVE
, mask
);
1821 /* Try to grab 2 DMA channels */
1822 slave
= &pdata
->dma_slave
->rxdata
;
1823 dd
->src
.chan
= dma_request_slave_channel_compat(mask
, atmel_aes_filter
,
1824 slave
, dd
->dev
, "tx");
1828 slave
= &pdata
->dma_slave
->txdata
;
1829 dd
->dst
.chan
= dma_request_slave_channel_compat(mask
, atmel_aes_filter
,
1830 slave
, dd
->dev
, "rx");
1837 dma_release_channel(dd
->src
.chan
);
1839 dev_warn(dd
->dev
, "no DMA channel available\n");
1843 static void atmel_aes_dma_cleanup(struct atmel_aes_dev
*dd
)
1845 dma_release_channel(dd
->dst
.chan
);
1846 dma_release_channel(dd
->src
.chan
);
1849 static void atmel_aes_queue_task(unsigned long data
)
1851 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
1853 atmel_aes_handle_queue(dd
, NULL
);
1856 static void atmel_aes_done_task(unsigned long data
)
1858 struct atmel_aes_dev
*dd
= (struct atmel_aes_dev
*)data
;
1860 dd
->is_async
= true;
1861 (void)dd
->resume(dd
);
1864 static irqreturn_t
atmel_aes_irq(int irq
, void *dev_id
)
1866 struct atmel_aes_dev
*aes_dd
= dev_id
;
1869 reg
= atmel_aes_read(aes_dd
, AES_ISR
);
1870 if (reg
& atmel_aes_read(aes_dd
, AES_IMR
)) {
1871 atmel_aes_write(aes_dd
, AES_IDR
, reg
);
1872 if (AES_FLAGS_BUSY
& aes_dd
->flags
)
1873 tasklet_schedule(&aes_dd
->done_task
);
1875 dev_warn(aes_dd
->dev
, "AES interrupt when no active requests.\n");
1882 static void atmel_aes_unregister_algs(struct atmel_aes_dev
*dd
)
1886 if (dd
->caps
.has_gcm
)
1887 crypto_unregister_aead(&aes_gcm_alg
);
1889 if (dd
->caps
.has_cfb64
)
1890 crypto_unregister_alg(&aes_cfb64_alg
);
1892 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++)
1893 crypto_unregister_alg(&aes_algs
[i
]);
1896 static int atmel_aes_register_algs(struct atmel_aes_dev
*dd
)
1900 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
1901 err
= crypto_register_alg(&aes_algs
[i
]);
1906 if (dd
->caps
.has_cfb64
) {
1907 err
= crypto_register_alg(&aes_cfb64_alg
);
1909 goto err_aes_cfb64_alg
;
1912 if (dd
->caps
.has_gcm
) {
1913 err
= crypto_register_aead(&aes_gcm_alg
);
1915 goto err_aes_gcm_alg
;
1921 crypto_unregister_alg(&aes_cfb64_alg
);
1923 i
= ARRAY_SIZE(aes_algs
);
1925 for (j
= 0; j
< i
; j
++)
1926 crypto_unregister_alg(&aes_algs
[j
]);
1931 static void atmel_aes_get_cap(struct atmel_aes_dev
*dd
)
1933 dd
->caps
.has_dualbuff
= 0;
1934 dd
->caps
.has_cfb64
= 0;
1935 dd
->caps
.has_ctr32
= 0;
1936 dd
->caps
.has_gcm
= 0;
1937 dd
->caps
.max_burst_size
= 1;
1939 /* keep only major version number */
1940 switch (dd
->hw_version
& 0xff0) {
1942 dd
->caps
.has_dualbuff
= 1;
1943 dd
->caps
.has_cfb64
= 1;
1944 dd
->caps
.has_ctr32
= 1;
1945 dd
->caps
.has_gcm
= 1;
1946 dd
->caps
.max_burst_size
= 4;
1949 dd
->caps
.has_dualbuff
= 1;
1950 dd
->caps
.has_cfb64
= 1;
1951 dd
->caps
.has_ctr32
= 1;
1952 dd
->caps
.has_gcm
= 1;
1953 dd
->caps
.max_burst_size
= 4;
1956 dd
->caps
.has_dualbuff
= 1;
1957 dd
->caps
.has_cfb64
= 1;
1958 dd
->caps
.max_burst_size
= 4;
1964 "Unmanaged aes version, set minimum capabilities\n");
1969 #if defined(CONFIG_OF)
1970 static const struct of_device_id atmel_aes_dt_ids
[] = {
1971 { .compatible
= "atmel,at91sam9g46-aes" },
1974 MODULE_DEVICE_TABLE(of
, atmel_aes_dt_ids
);
1976 static struct crypto_platform_data
*atmel_aes_of_init(struct platform_device
*pdev
)
1978 struct device_node
*np
= pdev
->dev
.of_node
;
1979 struct crypto_platform_data
*pdata
;
1982 dev_err(&pdev
->dev
, "device node not found\n");
1983 return ERR_PTR(-EINVAL
);
1986 pdata
= devm_kzalloc(&pdev
->dev
, sizeof(*pdata
), GFP_KERNEL
);
1988 dev_err(&pdev
->dev
, "could not allocate memory for pdata\n");
1989 return ERR_PTR(-ENOMEM
);
1992 pdata
->dma_slave
= devm_kzalloc(&pdev
->dev
,
1993 sizeof(*(pdata
->dma_slave
)),
1995 if (!pdata
->dma_slave
) {
1996 dev_err(&pdev
->dev
, "could not allocate memory for dma_slave\n");
1997 devm_kfree(&pdev
->dev
, pdata
);
1998 return ERR_PTR(-ENOMEM
);
2004 static inline struct crypto_platform_data
*atmel_aes_of_init(struct platform_device
*pdev
)
2006 return ERR_PTR(-EINVAL
);
2010 static int atmel_aes_probe(struct platform_device
*pdev
)
2012 struct atmel_aes_dev
*aes_dd
;
2013 struct crypto_platform_data
*pdata
;
2014 struct device
*dev
= &pdev
->dev
;
2015 struct resource
*aes_res
;
2018 pdata
= pdev
->dev
.platform_data
;
2020 pdata
= atmel_aes_of_init(pdev
);
2021 if (IS_ERR(pdata
)) {
2022 err
= PTR_ERR(pdata
);
2027 if (!pdata
->dma_slave
) {
2032 aes_dd
= devm_kzalloc(&pdev
->dev
, sizeof(*aes_dd
), GFP_KERNEL
);
2033 if (aes_dd
== NULL
) {
2034 dev_err(dev
, "unable to alloc data struct.\n");
2041 platform_set_drvdata(pdev
, aes_dd
);
2043 INIT_LIST_HEAD(&aes_dd
->list
);
2044 spin_lock_init(&aes_dd
->lock
);
2046 tasklet_init(&aes_dd
->done_task
, atmel_aes_done_task
,
2047 (unsigned long)aes_dd
);
2048 tasklet_init(&aes_dd
->queue_task
, atmel_aes_queue_task
,
2049 (unsigned long)aes_dd
);
2051 crypto_init_queue(&aes_dd
->queue
, ATMEL_AES_QUEUE_LENGTH
);
2055 /* Get the base address */
2056 aes_res
= platform_get_resource(pdev
, IORESOURCE_MEM
, 0);
2058 dev_err(dev
, "no MEM resource info\n");
2062 aes_dd
->phys_base
= aes_res
->start
;
2065 aes_dd
->irq
= platform_get_irq(pdev
, 0);
2066 if (aes_dd
->irq
< 0) {
2067 dev_err(dev
, "no IRQ resource info\n");
2072 err
= devm_request_irq(&pdev
->dev
, aes_dd
->irq
, atmel_aes_irq
,
2073 IRQF_SHARED
, "atmel-aes", aes_dd
);
2075 dev_err(dev
, "unable to request aes irq.\n");
2079 /* Initializing the clock */
2080 aes_dd
->iclk
= devm_clk_get(&pdev
->dev
, "aes_clk");
2081 if (IS_ERR(aes_dd
->iclk
)) {
2082 dev_err(dev
, "clock initialization failed.\n");
2083 err
= PTR_ERR(aes_dd
->iclk
);
2087 aes_dd
->io_base
= devm_ioremap_resource(&pdev
->dev
, aes_res
);
2088 if (!aes_dd
->io_base
) {
2089 dev_err(dev
, "can't ioremap\n");
2094 err
= clk_prepare(aes_dd
->iclk
);
2098 err
= atmel_aes_hw_version_init(aes_dd
);
2100 goto iclk_unprepare
;
2102 atmel_aes_get_cap(aes_dd
);
2104 err
= atmel_aes_buff_init(aes_dd
);
2108 err
= atmel_aes_dma_init(aes_dd
, pdata
);
2112 spin_lock(&atmel_aes
.lock
);
2113 list_add_tail(&aes_dd
->list
, &atmel_aes
.dev_list
);
2114 spin_unlock(&atmel_aes
.lock
);
2116 err
= atmel_aes_register_algs(aes_dd
);
2120 dev_info(dev
, "Atmel AES - Using %s, %s for DMA transfers\n",
2121 dma_chan_name(aes_dd
->src
.chan
),
2122 dma_chan_name(aes_dd
->dst
.chan
));
2127 spin_lock(&atmel_aes
.lock
);
2128 list_del(&aes_dd
->list
);
2129 spin_unlock(&atmel_aes
.lock
);
2130 atmel_aes_dma_cleanup(aes_dd
);
2132 atmel_aes_buff_cleanup(aes_dd
);
2135 clk_unprepare(aes_dd
->iclk
);
2137 tasklet_kill(&aes_dd
->done_task
);
2138 tasklet_kill(&aes_dd
->queue_task
);
2140 dev_err(dev
, "initialization failed.\n");
2145 static int atmel_aes_remove(struct platform_device
*pdev
)
2147 static struct atmel_aes_dev
*aes_dd
;
2149 aes_dd
= platform_get_drvdata(pdev
);
2152 spin_lock(&atmel_aes
.lock
);
2153 list_del(&aes_dd
->list
);
2154 spin_unlock(&atmel_aes
.lock
);
2156 atmel_aes_unregister_algs(aes_dd
);
2158 tasklet_kill(&aes_dd
->done_task
);
2159 tasklet_kill(&aes_dd
->queue_task
);
2161 atmel_aes_dma_cleanup(aes_dd
);
2162 atmel_aes_buff_cleanup(aes_dd
);
2164 clk_unprepare(aes_dd
->iclk
);
2169 static struct platform_driver atmel_aes_driver
= {
2170 .probe
= atmel_aes_probe
,
2171 .remove
= atmel_aes_remove
,
2173 .name
= "atmel_aes",
2174 .of_match_table
= of_match_ptr(atmel_aes_dt_ids
),
2178 module_platform_driver(atmel_aes_driver
);
2180 MODULE_DESCRIPTION("Atmel AES hw acceleration support.");
2181 MODULE_LICENSE("GPL v2");
2182 MODULE_AUTHOR("Nicolas Royer - Eukréa Electromatique");