1 /******************************************************************************
3 ** FILE NAME : ifxmips_async_aes.c
5 ** MODULES : DEU Module
7 ** DATE : October 11, 2010
8 ** AUTHOR : Mohammad Firdaus
9 ** DESCRIPTION : Data Encryption Unit Driver for AES Algorithm
10 ** COPYRIGHT : Copyright (c) 2010
11 ** Infineon Technologies AG
12 ** Am Campeon 1-12, 85579 Neubiberg, Germany
14 ** This program is free software; you can redistribute it and/or modify
15 ** it under the terms of the GNU General Public License as published by
16 ** the Free Software Foundation; either version 2 of the License, or
17 ** (at your option) any later version.
20 ** $Date $Author $Comment
21 ** 08,Sept 2009 Mohammad Firdaus Initial UEIP release
22 ** 11, Oct 2010 Mohammad Firdaus Kernel Port incl. Async. Ablkcipher mode
23 ** 21,March 2011 Mohammad Firdaus Changes for Kernel 2.6.32 and IPSec integration
24 *******************************************************************************/
26 \defgroup IFX_DEU IFX_DEU_DRIVERS
28 \brief ifx DEU driver module
32 \file ifxmips_async_aes.c
34 \brief AES Encryption Driver main file
38 \defgroup IFX_AES_FUNCTIONS IFX_AES_FUNCTIONS
40 \brief IFX AES driver Functions
45 #include <linux/wait.h>
46 #include <linux/crypto.h>
47 #include <linux/kernel.h>
48 #include <linux/kthread.h>
49 #include <linux/interrupt.h>
50 #include <linux/spinlock.h>
51 #include <linux/list.h>
52 #include <crypto/ctr.h>
53 #include <crypto/aes.h>
54 #include <crypto/algapi.h>
55 #include <crypto/scatterwalk.h>
57 #include <asm/ifx/ifx_regs.h>
58 #include <asm/ifx/ifx_types.h>
59 #include <asm/ifx/common_routines.h>
60 #include <asm/ifx/irq.h>
61 #include <asm/ifx/ifx_pmu.h>
62 #include <asm/ifx/ifx_gpio.h>
63 #include <asm/kmap_types.h>
65 #include "ifxmips_deu.h"
67 #if defined(CONFIG_DANUBE)
68 #include "ifxmips_deu_danube.h"
69 extern int ifx_danube_pre_1_4
;
70 #elif defined(CONFIG_AR9)
71 #include "ifxmips_deu_ar9.h"
72 #elif defined(CONFIG_VR9) || defined(CONFIG_AR10)
73 #include "ifxmips_deu_vr9.h"
75 #error "Unkown platform"
78 /* DMA related header and variables */
81 #define CRTCL_SECT_INIT spin_lock_init(&aes_lock)
82 #define CRTCL_SECT_START spin_lock_irqsave(&aes_lock, flag)
83 #define CRTCL_SECT_END spin_unlock_irqrestore(&aes_lock, flag)
85 /* Definition of constants */
86 //#define AES_START IFX_AES_CON
87 #define AES_MIN_KEY_SIZE 16
88 #define AES_MAX_KEY_SIZE 32
89 #define AES_BLOCK_SIZE 16
90 #define CTR_RFC3686_NONCE_SIZE 4
91 #define CTR_RFC3686_IV_SIZE 8
92 #define CTR_RFC3686_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE)
95 extern char debug_level
;
96 #define DPRINTF(level, format, args...) if (level < debug_level) printk(KERN_INFO "[%s %s %d]: " format, __FILE__, __func__, __LINE__, ##args);
98 #define DPRINTF(level, format, args...)
99 #endif /* CRYPTO_DEBUG */
102 static int disable_multiblock
= 0;
103 #if LINUX_VERSION_CODE >= KERNEL_VERSION(2,4,0)
104 module_param(disable_multiblock
, int, 0);
106 MODULE_PARM_DESC(disable_multiblock
, "Disable encryption of whole multiblock buffers");
109 static int disable_deudma
= 1;
111 /* Function decleration */
112 int aes_chip_init(void);
113 u32
endian_swap(u32 input
);
114 u32
input_swap(u32 input
);
115 u32
* memory_alignment(const u8
*arg
, u32
*buff_alloc
, int in_out
, int nbytes
);
116 void aes_dma_memory_copy(u32
*outcopy
, u32
*out_dma
, u8
*out_arg
, int nbytes
);
117 int aes_memory_allocate(int value
);
118 int des_memory_allocate(int value
);
119 void memory_release(u32
*addr
);
124 u32 buf
[AES_MAX_KEY_SIZE
];
125 u8 nonce
[CTR_RFC3686_NONCE_SIZE
];
129 struct aes_container
{
142 struct ablkcipher_request arequest
;
146 aes_priv_t
*aes_queue
;
147 extern deu_drv_priv_t deu_dma_priv
;
149 void hexdump(unsigned char *buf
, unsigned int len
)
151 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
156 /*! \fn void lq_deu_aes_core (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg,
157 size_t nbytes, int encdec, int mode)
158 * \ingroup IFX_AES_FUNCTIONS
159 * \brief main interface to AES hardware
160 * \param ctx_arg crypto algo context
161 * \param out_arg output bytestream
162 * \param in_arg input bytestream
163 * \param iv_arg initialization vector
164 * \param nbytes length of bytestream
165 * \param encdec 1 for encrypt; 0 for decrypt
166 * \param mode operation mode such as ebc, cbc, ctr
170 static int lq_deu_aes_core (void *ctx_arg
, u8
*out_arg
, const u8
*in_arg
,
171 u8
*iv_arg
, size_t nbytes
, int encdec
, int mode
)
173 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
174 volatile struct aes_t
*aes
= (volatile struct aes_t
*) AES_START
;
175 struct aes_ctx
*ctx
= (struct aes_ctx
*)ctx_arg
;
176 u32
*in_key
= ctx
->buf
;
178 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
179 int key_len
= ctx
->key_length
;
181 volatile struct deu_dma_t
*dma
= (struct deu_dma_t
*) IFX_DEU_DMA_CON
;
182 struct dma_device_info
*dma_device
= ifx_deu
[0].dma_device
;
183 deu_drv_priv_t
*deu_priv
= (deu_drv_priv_t
*)dma_device
->priv
;
185 //u32 *outcopy = NULL;
186 u32
*dword_mem_aligned_in
= NULL
;
190 /* 128, 192 or 256 bit key length */
191 aes
->controlr
.K
= key_len
/ 8 - 2;
192 if (key_len
== 128 / 8) {
193 aes
->K3R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 0));
194 aes
->K2R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 1));
195 aes
->K1R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 2));
196 aes
->K0R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 3));
198 else if (key_len
== 192 / 8) {
199 aes
->K5R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 0));
200 aes
->K4R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 1));
201 aes
->K3R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 2));
202 aes
->K2R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 3));
203 aes
->K1R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 4));
204 aes
->K0R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 5));
206 else if (key_len
== 256 / 8) {
207 aes
->K7R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 0));
208 aes
->K6R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 1));
209 aes
->K5R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 2));
210 aes
->K4R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 3));
211 aes
->K3R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 4));
212 aes
->K2R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 5));
213 aes
->K1R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 6));
214 aes
->K0R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 7));
217 printk (KERN_ERR
"[%s %s %d]: Invalid key_len : %d\n", __FILE__
, __func__
, __LINE__
, key_len
);
222 /* let HW pre-process DEcryption key in any case (even if
223 ENcryption is used). Key Valid (KV) bit is then only
224 checked in decryption routine! */
225 aes
->controlr
.PNK
= 1;
227 while (aes
->controlr
.BUS
) {
228 // this will not take long
230 AES_DMA_MISC_CONFIG();
232 aes
->controlr
.E_D
= !encdec
; //encryption
233 aes
->controlr
.O
= mode
; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
235 //aes->controlr.F = 128; //default; only for CFB and OFB modes; change only for customer-specific apps
237 aes
->IV3R
= DEU_ENDIAN_SWAP(*(u32
*) iv_arg
);
238 aes
->IV2R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 1));
239 aes
->IV1R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 2));
240 aes
->IV0R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 3));
244 /* Prepare Rx buf length used in dma psuedo interrupt */
245 deu_priv
->deu_rx_buf
= (u32
*)out_arg
;
246 deu_priv
->deu_rx_len
= nbytes
;
248 /* memory alignment issue */
249 dword_mem_aligned_in
= (u32
*) DEU_DWORD_REORDERING(in_arg
, aes_buff_in
, BUFFER_IN
, nbytes
);
251 dma
->controlr
.ALGO
= 1; //AES
252 dma
->controlr
.BS
= 0;
253 aes
->controlr
.DAU
= 0;
254 dma
->controlr
.EN
= 1;
256 while (aes
->controlr
.BUS
) {
257 // wait for AES to be ready
260 deu_priv
->outcopy
= (u32
*) DEU_DWORD_REORDERING(out_arg
, aes_buff_out
, BUFFER_OUT
, nbytes
);
261 deu_priv
->event_src
= AES_ASYNC_EVENT
;
263 wlen
= dma_device_write (dma_device
, (u8
*)dword_mem_aligned_in
, nbytes
, NULL
);
264 if (wlen
!= nbytes
) {
265 dma
->controlr
.EN
= 0;
267 printk (KERN_ERR
"[%s %s %d]: dma_device_write fail!\n", __FILE__
, __func__
, __LINE__
);
271 // WAIT_AES_DMA_READY();
276 *((u32
*) iv_arg
) = DEU_ENDIAN_SWAP(*((u32
*) iv_arg
));
277 *((u32
*) iv_arg
+ 1) = DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 1));
278 *((u32
*) iv_arg
+ 2) = DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 2));
279 *((u32
*) iv_arg
+ 3) = DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 3));
285 /* \fn static int count_sgs(struct scatterlist *sl, unsigned int total_bytes)
286 * \ingroup IFX_AES_FUNCTIONS
287 * \brief Counts and return the number of scatterlists
288 * \param *sl Function pointer to the scatterlist
289 * \param total_bytes The total number of bytes that needs to be encrypted/decrypted
290 * \return The number of scatterlists
293 static int count_sgs(struct scatterlist
*sl
, unsigned int total_bytes
)
298 total_bytes
-= sl
[i
].length
;
301 } while (total_bytes
> 0);
306 /* \fn void lq_sg_init(struct scatterlist *src,
307 * struct scatterlist *dst)
308 * \ingroup IFX_AES_FUNCTIONS
309 * \brief Maps the scatterlists into a source/destination page.
310 * \param *src Pointer to the source scatterlist
311 * \param *dst Pointer to the destination scatterlist
314 static void lq_sg_init(struct aes_container
*aes_con
,struct scatterlist
*src
,
315 struct scatterlist
*dst
)
318 struct page
*dst_page
, *src_page
;
320 src_page
= sg_virt(src
);
321 aes_con
->src_buf
= (char *) src_page
;
323 dst_page
= sg_virt(dst
);
324 aes_con
->dst_buf
= (char *) dst_page
;
329 /* \fn static void lq_sg_complete(struct aes_container *aes_con)
330 * \ingroup IFX_AES_FUNCTIONS
331 * \brief Free the used up memory after encryt/decrypt.
334 static void lq_sg_complete(struct aes_container
*aes_con
)
336 unsigned long queue_flag
;
338 spin_lock_irqsave(&aes_queue
->lock
, queue_flag
);
340 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
343 /* \fn static inline struct aes_container *aes_container_cast (
344 * struct scatterlist *dst)
345 * \ingroup IFX_AES_FUNCTIONS
346 * \brief Locate the structure aes_container in memory.
347 * \param *areq Pointer to memory location where ablkcipher_request is located
348 * \return *aes_cointainer The function pointer to aes_container
350 static inline struct aes_container
*aes_container_cast (
351 struct ablkcipher_request
*areq
)
353 return container_of(areq
, struct aes_container
, arequest
);
357 /* \fn static int process_next_packet(struct aes_container *aes_con, struct ablkcipher_request *areq,
359 * \ingroup IFX_AES_FUNCTIONS
360 * \brief Process next packet to be encrypt/decrypt
361 * \param *aes_con AES container structure
362 * \param *areq Pointer to memory location where ablkcipher_request is located
363 * \param state The state of the current packet (part of scatterlist or new packet)
364 * \return -EINVAL: error, -EINPROGRESS: Crypto still running, 1: no more scatterlist
367 static int process_next_packet(struct aes_container
*aes_con
, struct ablkcipher_request
*areq
,
371 int mode
, dir
, err
= -EINVAL
;
372 unsigned long queue_flag
;
373 u32 inc
, nbytes
, remain
, chunk_size
;
374 struct scatterlist
*src
= NULL
;
375 struct scatterlist
*dst
= NULL
;
376 struct crypto_ablkcipher
*cipher
;
379 spin_lock_irqsave(&aes_queue
->lock
, queue_flag
);
381 dir
= aes_con
->encdec
;
382 mode
= aes_con
->mode
;
385 if (state
& PROCESS_SCATTER
) {
386 src
= scatterwalk_sg_next(areq
->src
);
387 dst
= scatterwalk_sg_next(areq
->dst
);
390 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
394 else if (state
& PROCESS_NEW_PACKET
) {
399 remain
= aes_con
->bytes_processed
;
400 chunk_size
= src
->length
;
402 if (remain
> DEU_MAX_PACKET_SIZE
)
403 inc
= DEU_MAX_PACKET_SIZE
;
404 else if (remain
> chunk_size
)
410 aes_con
->nbytes
= inc
;
412 if (state
& PROCESS_SCATTER
) {
413 aes_con
->src_buf
+= aes_con
->nbytes
;
414 aes_con
->dst_buf
+= aes_con
->nbytes
;
417 lq_sg_init(aes_con
, src
, dst
);
419 nbytes
= aes_con
->nbytes
;
421 //printk("debug - Line: %d, func: %s, reqsize: %d, scattersize: %d\n",
422 // __LINE__, __func__, nbytes, chunk_size);
424 cipher
= crypto_ablkcipher_reqtfm(areq
);
425 ctx
= crypto_ablkcipher_ctx(cipher
);
428 if (aes_queue
->hw_status
== AES_IDLE
)
429 aes_queue
->hw_status
= AES_STARTED
;
431 aes_con
->bytes_processed
-= aes_con
->nbytes
;
432 err
= ablkcipher_enqueue_request(&aes_queue
->list
, &aes_con
->arequest
);
434 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
435 printk("Failed to enqueue request, ln: %d, err: %d\n",
440 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
442 err
= lq_deu_aes_core(ctx
, aes_con
->dst_buf
, aes_con
->src_buf
, iv
, nbytes
, dir
, mode
);
447 /* \fn static void process_queue (unsigned long data)
448 * \ingroup IFX_AES_FUNCTIONS
449 * \brief tasklet to signal the dequeuing of the next packet to be processed
450 * \param unsigned long data Not used
454 static void process_queue(unsigned long data
)
457 DEU_WAKEUP_EVENT(deu_dma_priv
.deu_thread_wait
, AES_ASYNC_EVENT
,
458 deu_dma_priv
.aes_event_flags
);
462 /* \fn static int aes_crypto_thread (void *data)
463 * \ingroup IFX_AES_FUNCTIONS
464 * \brief AES thread that handles crypto requests from upper layer & DMA
465 * \param *data Not used
466 * \return -EINVAL: DEU failure, -EBUSY: DEU HW busy, 0: exit thread
468 static int aes_crypto_thread (void *data
)
470 struct aes_container
*aes_con
= NULL
;
471 struct ablkcipher_request
*areq
= NULL
;
473 unsigned long queue_flag
;
475 daemonize("lq_aes_thread");
476 printk("AES Queue Manager Starting\n");
480 DEU_WAIT_EVENT(deu_dma_priv
.deu_thread_wait
, AES_ASYNC_EVENT
,
481 deu_dma_priv
.aes_event_flags
);
483 spin_lock_irqsave(&aes_queue
->lock
, queue_flag
);
485 /* wait to prevent starting a crypto session before
486 * exiting the dma interrupt thread.
488 if (aes_queue
->hw_status
== AES_STARTED
) {
489 areq
= ablkcipher_dequeue_request(&aes_queue
->list
);
490 aes_con
= aes_container_cast(areq
);
491 aes_queue
->hw_status
= AES_BUSY
;
493 else if (aes_queue
->hw_status
== AES_IDLE
) {
494 areq
= ablkcipher_dequeue_request(&aes_queue
->list
);
495 aes_con
= aes_container_cast(areq
);
496 aes_queue
->hw_status
= AES_STARTED
;
498 else if (aes_queue
->hw_status
== AES_BUSY
) {
499 areq
= ablkcipher_dequeue_request(&aes_queue
->list
);
500 aes_con
= aes_container_cast(areq
);
502 else if (aes_queue
->hw_status
== AES_COMPLETED
) {
503 lq_sg_complete(aes_con
);
504 aes_queue
->hw_status
= AES_IDLE
;
505 areq
->base
.complete(&areq
->base
, 0);
506 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
509 //printk("debug ln: %d, bytes proc: %d\n", __LINE__, aes_con->bytes_processed);
510 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
513 printk("AES_CON return null\n");
517 if (aes_con
->bytes_processed
== 0) {
521 /* Process new packet or the next packet in a scatterlist */
522 if (aes_con
->flag
& PROCESS_NEW_PACKET
) {
523 aes_con
->flag
= PROCESS_SCATTER
;
524 err
= process_next_packet(aes_con
, areq
, PROCESS_NEW_PACKET
);
527 err
= process_next_packet(aes_con
, areq
, PROCESS_SCATTER
);
529 if (err
== -EINVAL
) {
530 areq
->base
.complete(&areq
->base
, err
);
531 lq_sg_complete(aes_con
);
532 printk("src/dst returned -EINVAL in func: %s\n", __func__
);
535 printk("src/dst returned zero in func: %s\n", __func__
);
542 //printk("debug line - %d, func: %s, qlen: %d\n", __LINE__, __func__, aes_queue->list.qlen);
543 areq
->base
.complete(&areq
->base
, 0);
544 lq_sg_complete(aes_con
);
546 spin_lock_irqsave(&aes_queue
->lock
, queue_flag
);
547 if (aes_queue
->list
.qlen
> 0) {
548 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
549 tasklet_schedule(&aes_queue
->aes_task
);
552 aes_queue
->hw_status
= AES_IDLE
;
553 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
560 /* \fn static int lq_aes_queue_mgr(struct aes_ctx *ctx, struct ablkcipher_request *areq,
561 u8 *iv, int dir, int mode)
562 * \ingroup IFX_AES_FUNCTIONS
563 * \brief starts the process of queuing DEU requests
564 * \param *ctx crypto algo contax
565 * \param *areq Pointer to the balkcipher requests
566 * \param *iv Pointer to intput vector location
567 * \param dir Encrypt/Decrypt
568 * \mode The mode AES algo is running
569 * \return 0 if success
572 static int lq_aes_queue_mgr(struct aes_ctx
*ctx
, struct ablkcipher_request
*areq
,
573 u8
*iv
, int dir
, int mode
)
576 unsigned long queue_flag
;
577 struct scatterlist
*src
= areq
->src
;
578 struct scatterlist
*dst
= areq
->dst
;
579 struct aes_container
*aes_con
= NULL
;
580 u32 remain
, inc
, nbytes
= areq
->nbytes
;
581 u32 chunk_bytes
= src
->length
;
584 aes_con
= (struct aes_container
*)kmalloc(sizeof(struct aes_container
),
588 printk("Cannot allocate memory for AES container, fn %s, ln %d\n",
593 /* AES encrypt/decrypt mode */
595 nbytes
= AES_BLOCK_SIZE
;
596 chunk_bytes
= AES_BLOCK_SIZE
;
600 aes_con
->bytes_processed
= nbytes
;
601 aes_con
->arequest
= *(areq
);
604 //printk("debug - Line: %d, func: %s, reqsize: %d, scattersize: %d\n",
605 // __LINE__, __func__, nbytes, chunk_bytes);
607 if (remain
> DEU_MAX_PACKET_SIZE
)
608 inc
= DEU_MAX_PACKET_SIZE
;
609 else if (remain
> chunk_bytes
)
615 lq_sg_init(aes_con
, src
, dst
);
618 aes_con
->complete
= 1;
620 aes_con
->complete
= 0;
622 aes_con
->nbytes
= inc
;
624 aes_con
->mode
= mode
;
625 aes_con
->encdec
= dir
;
627 spin_lock_irqsave(&aes_queue
->lock
, queue_flag
);
629 if (aes_queue
->hw_status
== AES_STARTED
|| aes_queue
->hw_status
== AES_BUSY
||
630 aes_queue
->list
.qlen
> 0) {
632 aes_con
->flag
= PROCESS_NEW_PACKET
;
633 err
= ablkcipher_enqueue_request(&aes_queue
->list
, &aes_con
->arequest
);
635 /* max queue length reached */
637 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
638 printk("Unable to enqueue request ln: %d, err: %d\n", __LINE__
, err
);
642 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
645 else if (aes_queue
->hw_status
== AES_IDLE
)
646 aes_queue
->hw_status
= AES_STARTED
;
648 aes_con
->flag
= PROCESS_SCATTER
;
649 aes_con
->bytes_processed
-= aes_con
->nbytes
;
650 /* or enqueue the whole structure so as to get back the info
651 * at the moment that it's queued. nbytes might be different */
652 err
= ablkcipher_enqueue_request(&aes_queue
->list
, &aes_con
->arequest
);
655 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
656 printk("Unable to enqueue request ln: %d, err: %d\n", __LINE__
, err
);
660 spin_unlock_irqrestore(&aes_queue
->lock
, queue_flag
);
661 return lq_deu_aes_core(ctx
, aes_con
->dst_buf
, aes_con
->src_buf
, iv
, inc
, dir
, mode
);
665 /* \fn static int aes_setkey(struct crypto_ablkcipher *tfm, const u8 *in_key,
666 * unsigned int keylen)
667 * \ingroup IFX_AES_FUNCTIONS
668 * \brief Sets AES key
669 * \param *tfm Pointer to the ablkcipher transform
670 * \param *in_key Pointer to input keys
671 * \param key_len Length of the AES keys
672 * \return 0 is success, -EINVAL if bad key length
675 static int aes_setkey(struct crypto_ablkcipher
*tfm
, const u8
*in_key
,
678 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
679 unsigned long *flags
= (unsigned long *) &tfm
->base
.crt_flags
;
681 DPRINTF(2, "set_key in %s\n", __FILE__
);
683 if (keylen
!= 16 && keylen
!= 24 && keylen
!= 32) {
684 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
688 ctx
->key_length
= keylen
;
689 DPRINTF(0, "ctx @%p, keylen %d, ctx->key_length %d\n", ctx
, keylen
, ctx
->key_length
);
690 memcpy ((u8
*) (ctx
->buf
), in_key
, keylen
);
696 /* \fn static int aes_generic_setkey(struct crypto_ablkcipher *tfm, const u8 *in_key,
697 * unsigned int keylen)
698 * \ingroup IFX_AES_FUNCTIONS
699 * \brief Sets AES key
700 * \param *tfm Pointer to the ablkcipher transform
701 * \param *key Pointer to input keys
702 * \param keylen Length of AES keys
703 * \return 0 is success, -EINVAL if bad key length
706 static int aes_generic_setkey(struct crypto_ablkcipher
*tfm
, const u8
*key
,
709 return aes_setkey(tfm
, key
, keylen
);
712 /* \fn static int rfc3686_aes_setkey(struct crypto_ablkcipher *tfm, const u8 *in_key,
713 * unsigned int keylen)
714 * \ingroup IFX_AES_FUNCTIONS
715 * \brief Sets AES key
716 * \param *tfm Pointer to the ablkcipher transform
717 * \param *in_key Pointer to input keys
718 * \param key_len Length of the AES keys
719 * \return 0 is success, -EINVAL if bad key length
722 static int rfc3686_aes_setkey(struct crypto_ablkcipher
*tfm
,
723 const u8
*in_key
, unsigned int keylen
)
725 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
726 unsigned long *flags
= (unsigned long *)&tfm
->base
.crt_flags
;
728 DPRINTF(2, "ctr_rfc3686_aes_set_key in %s\n", __FILE__
);
730 memcpy(ctx
->nonce
, in_key
+ (keylen
- CTR_RFC3686_NONCE_SIZE
),
731 CTR_RFC3686_NONCE_SIZE
);
733 keylen
-= CTR_RFC3686_NONCE_SIZE
; // remove 4 bytes of nonce
735 if (keylen
!= 16 && keylen
!= 24 && keylen
!= 32) {
736 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
740 ctx
->key_length
= keylen
;
742 memcpy ((u8
*) (ctx
->buf
), in_key
, keylen
);
747 /* \fn static int aes_encrypt(struct ablkcipher_request *areq)
748 * \ingroup IFX_AES_FUNCTIONS
749 * \brief Encrypt function for AES algo
750 * \param *areq Pointer to ablkcipher request in memory
751 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
754 static int aes_encrypt (struct ablkcipher_request
*areq
)
756 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
757 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
759 return lq_aes_queue_mgr(ctx
, areq
, NULL
, CRYPTO_DIR_ENCRYPT
, 5);
763 /* \fn static int aes_decrypt(struct ablkcipher_request *areq)
764 * \ingroup IFX_AES_FUNCTIONS
765 * \brief Decrypt function for AES algo
766 * \param *areq Pointer to ablkcipher request in memory
767 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
769 static int aes_decrypt (struct ablkcipher_request
*areq
)
771 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
772 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
774 return lq_aes_queue_mgr(ctx
, areq
, NULL
, CRYPTO_DIR_DECRYPT
, 5);
777 /* \fn static int ecb_aes_decrypt(struct ablkcipher_request *areq)
778 * \ingroup IFX_AES_FUNCTIONS
779 * \brief Encrypt function for AES algo
780 * \param *areq Pointer to ablkcipher request in memory
781 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
784 static int ecb_aes_encrypt (struct ablkcipher_request
*areq
)
786 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
787 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
789 return lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_ENCRYPT
, 0);
792 /* \fn static int ecb_aes_decrypt(struct ablkcipher_request *areq)
793 * \ingroup IFX_AES_FUNCTIONS
794 * \brief Decrypt function for AES algo
795 * \param *areq Pointer to ablkcipher request in memory
796 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
798 static int ecb_aes_decrypt(struct ablkcipher_request
*areq
)
801 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
802 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
804 return lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_DECRYPT
, 0);
807 /* \fn static int cbc_aes_encrypt(struct ablkcipher_request *areq)
808 * \ingroup IFX_AES_FUNCTIONS
809 * \brief Encrypt function for AES algo
810 * \param *areq Pointer to ablkcipher request in memory
811 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
814 static int cbc_aes_encrypt (struct ablkcipher_request
*areq
)
816 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
817 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
819 return lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_ENCRYPT
, 1);
823 /* \fn static int cbc_aes_decrypt(struct ablkcipher_request *areq)
824 * \ingroup IFX_AES_FUNCTIONS
825 * \brief Decrypt function for AES algo
826 * \param *areq Pointer to ablkcipher request in memory
827 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
830 static int cbc_aes_decrypt(struct ablkcipher_request
*areq
)
832 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
833 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
835 return lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_DECRYPT
, 1);
838 static int ofb_aes_encrypt (struct ablkcipher_request
*areq
)
840 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
841 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
843 return lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_ENCRYPT
, 2);
847 static int ofb_aes_decrypt(struct ablkcipher_request
*areq
)
849 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
850 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
852 return lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_DECRYPT
, 2);
855 static int cfb_aes_encrypt (struct ablkcipher_request
*areq
)
857 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
858 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
860 return lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_ENCRYPT
, 3);
864 static int cfb_aes_decrypt(struct ablkcipher_request
*areq
)
866 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
867 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
869 return lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_DECRYPT
, 3);
873 /* \fn static int ctr_aes_encrypt(struct ablkcipher_request *areq)
874 * \ingroup IFX_AES_FUNCTIONS
875 * \brief Encrypt function for AES algo
876 * \param *areq Pointer to ablkcipher request in memory
877 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
880 static int ctr_aes_encrypt (struct ablkcipher_request
*areq
)
882 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
883 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
885 return lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_ENCRYPT
, 4);
889 /* \fn static int ctr_aes_decrypt(struct ablkcipher_request *areq)
890 * \ingroup IFX_AES_FUNCTIONS
891 * \brief Decrypt function for AES algo
892 * \param *areq Pointer to ablkcipher request in memory
893 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
896 static int ctr_aes_decrypt(struct ablkcipher_request
*areq
)
898 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
899 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
901 return lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_DECRYPT
, 4);
904 /* \fn static int rfc3686_aes_encrypt(struct ablkcipher_request *areq)
905 * \ingroup IFX_AES_FUNCTIONS
906 * \brief Encrypt function for AES algo
907 * \param *areq Pointer to ablkcipher request in memory
908 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
911 static int rfc3686_aes_encrypt(struct ablkcipher_request
*areq
)
913 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
914 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
916 u8
*info
= areq
->info
;
919 memcpy(rfc3686_iv
, ctx
->nonce
, CTR_RFC3686_NONCE_SIZE
);
920 memcpy(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
, info
, CTR_RFC3686_IV_SIZE
);
922 /* initialize counter portion of counter block */
923 *(__be32
*)(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
+ CTR_RFC3686_IV_SIZE
) =
926 areq
->info
= rfc3686_iv
;
927 ret
= lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_ENCRYPT
, 4);
932 /* \fn static int rfc3686_aes_decrypt(struct ablkcipher_request *areq)
933 * \ingroup IFX_AES_FUNCTIONS
934 * \brief Decrypt function for AES algo
935 * \param *areq Pointer to ablkcipher request in memory
936 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
939 static int rfc3686_aes_decrypt(struct ablkcipher_request
*areq
)
941 struct crypto_ablkcipher
*cipher
= crypto_ablkcipher_reqtfm(areq
);
942 struct aes_ctx
*ctx
= crypto_ablkcipher_ctx(cipher
);
944 u8
*info
= areq
->info
;
947 /* set up counter block */
948 memcpy(rfc3686_iv
, ctx
->nonce
, CTR_RFC3686_NONCE_SIZE
);
949 memcpy(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
, info
, CTR_RFC3686_IV_SIZE
);
951 /* initialize counter portion of counter block */
952 *(__be32
*)(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
+ CTR_RFC3686_IV_SIZE
) =
955 areq
->info
= rfc3686_iv
;
956 ret
= lq_aes_queue_mgr(ctx
, areq
, areq
->info
, CRYPTO_DIR_DECRYPT
, 4);
962 struct crypto_alg alg
;
965 /* AES supported algo array */
966 static struct lq_aes_alg aes_drivers_alg
[] = {
970 .cra_driver_name
= "ifxdeu-aes",
971 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
972 .cra_blocksize
= AES_BLOCK_SIZE
,
973 .cra_ctxsize
= sizeof(struct aes_ctx
),
974 .cra_type
= &crypto_ablkcipher_type
,
976 .cra_module
= THIS_MODULE
,
978 .setkey
= aes_setkey
,
979 .encrypt
= aes_encrypt
,
980 .decrypt
= aes_decrypt
,
982 .min_keysize
= AES_MIN_KEY_SIZE
,
983 .max_keysize
= AES_MAX_KEY_SIZE
,
984 .ivsize
= AES_BLOCK_SIZE
,
989 .cra_name
= "ecb(aes)",
990 .cra_driver_name
= "ifxdeu-ecb(aes)",
991 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
992 .cra_blocksize
= AES_BLOCK_SIZE
,
993 .cra_ctxsize
= sizeof(struct aes_ctx
),
994 .cra_type
= &crypto_ablkcipher_type
,
996 .cra_module
= THIS_MODULE
,
998 .setkey
= aes_generic_setkey
,
999 .encrypt
= ecb_aes_encrypt
,
1000 .decrypt
= ecb_aes_decrypt
,
1002 .min_keysize
= AES_MIN_KEY_SIZE
,
1003 .max_keysize
= AES_MAX_KEY_SIZE
,
1004 .ivsize
= AES_BLOCK_SIZE
,
1009 .cra_name
= "cbc(aes)",
1010 .cra_driver_name
= "ifxdeu-cbc(aes)",
1011 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1012 .cra_blocksize
= AES_BLOCK_SIZE
,
1013 .cra_ctxsize
= sizeof(struct aes_ctx
),
1014 .cra_type
= &crypto_ablkcipher_type
,
1015 .cra_priority
= 300,
1016 .cra_module
= THIS_MODULE
,
1018 .setkey
= aes_generic_setkey
,
1019 .encrypt
= cbc_aes_encrypt
,
1020 .decrypt
= cbc_aes_decrypt
,
1022 .min_keysize
= AES_MIN_KEY_SIZE
,
1023 .max_keysize
= AES_MAX_KEY_SIZE
,
1024 .ivsize
= AES_BLOCK_SIZE
,
1029 .cra_name
= "ctr(aes)",
1030 .cra_driver_name
= "ifxdeu-ctr(aes)",
1031 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1032 .cra_blocksize
= AES_BLOCK_SIZE
,
1033 .cra_ctxsize
= sizeof(struct aes_ctx
),
1034 .cra_type
= &crypto_ablkcipher_type
,
1035 .cra_priority
= 300,
1036 .cra_module
= THIS_MODULE
,
1038 .setkey
= aes_generic_setkey
,
1039 .encrypt
= ctr_aes_encrypt
,
1040 .decrypt
= ctr_aes_decrypt
,
1042 .min_keysize
= AES_MIN_KEY_SIZE
,
1043 .max_keysize
= AES_MAX_KEY_SIZE
,
1044 .ivsize
= AES_BLOCK_SIZE
,
1049 .cra_name
= "rfc3686(ctr(aes))",
1050 .cra_driver_name
= "ifxdeu-rfc3686(ctr(aes))",
1051 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1052 .cra_blocksize
= AES_BLOCK_SIZE
,
1053 .cra_ctxsize
= sizeof(struct aes_ctx
),
1054 .cra_type
= &crypto_ablkcipher_type
,
1055 .cra_priority
= 300,
1056 .cra_module
= THIS_MODULE
,
1058 .setkey
= rfc3686_aes_setkey
,
1059 .encrypt
= rfc3686_aes_encrypt
,
1060 .decrypt
= rfc3686_aes_decrypt
,
1062 .min_keysize
= AES_MIN_KEY_SIZE
,
1063 .max_keysize
= CTR_RFC3686_MAX_KEY_SIZE
,
1064 //.max_keysize = AES_MAX_KEY_SIZE,
1065 //.ivsize = CTR_RFC3686_IV_SIZE,
1066 .ivsize
= AES_BLOCK_SIZE
, // else cannot reg
1072 /* \fn int __init lqdeu_async_aes_init (void)
1073 * \ingroup IFX_AES_FUNCTIONS
1074 * \brief Initializes the Async. AES driver
1075 * \return 0 is success, -EINPROGRESS if encryting, EINVAL if failure
1078 int __init
lqdeu_async_aes_init (void)
1080 int i
, j
, ret
= -EINVAL
;
1082 #define IFX_DEU_DRV_VERSION "2.0.0"
1083 printk(KERN_INFO
"Lantiq Technologies DEU Driver version %s\n", IFX_DEU_DRV_VERSION
);
1085 for (i
= 0; i
< ARRAY_SIZE(aes_drivers_alg
); i
++) {
1086 ret
= crypto_register_alg(&aes_drivers_alg
[i
].alg
);
1087 printk("driver: %s\n", aes_drivers_alg
[i
].alg
.cra_name
);
1097 printk (KERN_NOTICE
"Lantiq DEU AES initialized %s %s.\n",
1098 disable_multiblock
? "" : " (multiblock)", disable_deudma
? "" : " (DMA)");
1104 for (j
= 0; j
< i
; j
++)
1105 crypto_unregister_alg(&aes_drivers_alg
[j
].alg
);
1107 printk(KERN_ERR
"Lantiq %s driver initialization failed!\n", (char *)&aes_drivers_alg
[i
].alg
.cra_driver_name
);
1110 ctr_rfc3686_aes_err
:
1111 for (i
= 0; i
< ARRAY_SIZE(aes_drivers_alg
); i
++) {
1112 if (!strcmp((char *)&aes_drivers_alg
[i
].alg
.cra_name
, "rfc3686(ctr(aes))"))
1113 crypto_unregister_alg(&aes_drivers_alg
[j
].alg
);
1115 printk (KERN_ERR
"Lantiq ctr_rfc3686_aes initialization failed!\n");
1119 /*! \fn void __exit ifxdeu_fini_aes (void)
1120 * \ingroup IFX_AES_FUNCTIONS
1121 * \brief unregister aes driver
1123 void __exit
lqdeu_fini_async_aes (void)
1127 for (i
= 0; i
< ARRAY_SIZE(aes_drivers_alg
); i
++)
1128 crypto_unregister_alg(&aes_drivers_alg
[i
].alg
);
1130 aes_queue
->hw_status
= AES_COMPLETED
;
1132 DEU_WAKEUP_EVENT(deu_dma_priv
.deu_thread_wait
, AES_ASYNC_EVENT
,
1133 deu_dma_priv
.aes_event_flags
);