1 /******************************************************************************
3 ** FILE NAME : ifxmips_aes.c
5 ** MODULES : DEU Module
7 ** DATE : September 8, 2009
8 ** AUTHOR : Mohammad Firdaus
9 ** DESCRIPTION : Data Encryption Unit Driver for AES Algorithm
10 ** COPYRIGHT : Copyright (c) 2009
11 ** Infineon Technologies AG
12 ** Am Campeon 1-12, 85579 Neubiberg, Germany
14 ** This program is free software; you can redistribute it and/or modify
15 ** it under the terms of the GNU General Public License as published by
16 ** the Free Software Foundation; either version 2 of the License, or
17 ** (at your option) any later version.
20 ** $Date $Author $Comment
21 ** 08,Sept 2009 Mohammad Firdaus Initial UEIP release
22 *******************************************************************************/
24 \defgroup IFX_DEU IFX_DEU_DRIVERS
26 \brief ifx DEU driver module
32 \brief AES Encryption Driver main file
36 \defgroup IFX_AES_FUNCTIONS IFX_AES_FUNCTIONS
38 \brief IFX AES driver Functions
42 /* Project Header Files */
43 #if defined(CONFIG_MODVERSIONS)
45 #include <linux/modeversions>
48 #include <linux/version.h>
49 #include <linux/module.h>
50 #include <linux/init.h>
51 #include <linux/proc_fs.h>
53 #include <linux/types.h>
54 #include <linux/errno.h>
55 #include <linux/crypto.h>
56 #include <linux/interrupt.h>
57 #include <linux/delay.h>
58 #include <asm/byteorder.h>
59 #include <crypto/algapi.h>
60 #include <crypto/b128ops.h>
61 #include <crypto/gcm.h>
62 #include <crypto/gf128mul.h>
63 #include <crypto/scatterwalk.h>
64 #include <crypto/xts.h>
65 #include <crypto/internal/aead.h>
66 #include <crypto/internal/hash.h>
67 #include <crypto/internal/skcipher.h>
69 #include "ifxmips_deu.h"
71 #if defined(CONFIG_DANUBE)
72 #include "ifxmips_deu_danube.h"
73 extern int ifx_danube_pre_1_4
;
74 #elif defined(CONFIG_AR9)
75 #include "ifxmips_deu_ar9.h"
76 #elif defined(CONFIG_VR9) || defined(CONFIG_AR10)
77 #include "ifxmips_deu_vr9.h"
79 #error "Unkown platform"
82 /* DMA related header and variables */
85 #define CRTCL_SECT_INIT spin_lock_init(&aes_lock)
86 #define CRTCL_SECT_START spin_lock_irqsave(&aes_lock, flag)
87 #define CRTCL_SECT_END spin_unlock_irqrestore(&aes_lock, flag)
89 /* Definition of constants */
90 #define AES_START IFX_AES_CON
91 #define AES_MIN_KEY_SIZE 16
92 #define AES_MAX_KEY_SIZE 32
93 #define AES_BLOCK_SIZE 16
94 #define AES_BLOCK_WORDS 4
95 #define CTR_RFC3686_NONCE_SIZE 4
96 #define CTR_RFC3686_IV_SIZE 8
97 #define CTR_RFC3686_MIN_KEY_SIZE (AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE)
98 #define CTR_RFC3686_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE)
99 #define AES_CBCMAC_DBN_TEMP_SIZE 128
102 extern char debug_level
;
103 #define DPRINTF(level, format, args...) if (level < debug_level) printk(KERN_INFO "[%s %s %d]: " format, __FILE__, __func__, __LINE__, ##args);
105 #define DPRINTF(level, format, args...)
106 #endif /* CRYPTO_DEBUG */
108 /* Function decleration */
109 int aes_chip_init(void);
110 u32
endian_swap(u32 input
);
111 u32
input_swap(u32 input
);
112 u32
* memory_alignment(const u8
*arg
, u32
*buff_alloc
, int in_out
, int nbytes
);
113 void aes_dma_memory_copy(u32
*outcopy
, u32
*out_dma
, u8
*out_arg
, int nbytes
);
114 void des_dma_memory_copy(u32
*outcopy
, u32
*out_dma
, u8
*out_arg
, int nbytes
);
115 int aes_memory_allocate(int value
);
116 int des_memory_allocate(int value
);
117 void memory_release(u32
*addr
);
120 extern void ifx_deu_aes (void *ctx_arg
, uint8_t *out_arg
, const uint8_t *in_arg
,
121 uint8_t *iv_arg
, size_t nbytes
, int encdec
, int mode
);
122 /* End of function decleration */
126 u8 buf
[AES_MAX_KEY_SIZE
];
127 u8 tweakkey
[AES_MAX_KEY_SIZE
];
128 u8 nonce
[CTR_RFC3686_NONCE_SIZE
];
129 u8 lastbuffer
[4 * XTS_BLOCK_SIZE
];
134 u32 (*temp
)[AES_BLOCK_WORDS
];
135 u8 block
[AES_BLOCK_SIZE
];
136 u8 hash
[AES_BLOCK_SIZE
];
137 struct gf128mul_4k
*gf128
;
140 extern int disable_deudma
;
141 extern int disable_multiblock
;
143 /*! \fn int aes_set_key (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
144 * \ingroup IFX_AES_FUNCTIONS
145 * \brief sets the AES keys
146 * \param tfm linux crypto algo transform
147 * \param in_key input key
148 * \param key_len key lengths of 16, 24 and 32 bytes supported
149 * \return -EINVAL - bad key length, 0 - SUCCESS
151 int aes_set_key (struct crypto_tfm
*tfm
, const u8
*in_key
, unsigned int key_len
)
153 struct aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
155 //printk("set_key in %s\n", __FILE__);
159 if (key_len
!= 16 && key_len
!= 24 && key_len
!= 32) {
163 ctx
->key_length
= key_len
;
165 DPRINTF(0, "ctx @%p, key_len %d, ctx->key_length %d\n", ctx
, key_len
, ctx
->key_length
);
166 memcpy ((u8
*) (ctx
->buf
), in_key
, key_len
);
172 /*! \fn int aes_set_key_skcipher (struct crypto_skcipher *tfm, const uint8_t *in_key, unsigned int key_len)
173 * \ingroup IFX_AES_FUNCTIONS
174 * \brief sets the AES keys for skcipher
175 * \param tfm linux crypto skcipher
176 * \param in_key input key
177 * \param key_len key lengths of 16, 24 and 32 bytes supported
178 * \return -EINVAL - bad key length, 0 - SUCCESS
180 int aes_set_key_skcipher (struct crypto_skcipher
*tfm
, const u8
*in_key
, unsigned int key_len
)
182 return aes_set_key(crypto_skcipher_tfm(tfm
), in_key
, key_len
);
186 /*! \fn void aes_set_key_skcipher (void *ctx_arg)
187 * \ingroup IFX_AES_FUNCTIONS
188 * \brief sets the AES key to the hardware, requires spinlock to be set by caller
189 * \param ctx_arg crypto algo context
192 void aes_set_key_hw (void *ctx_arg
)
194 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
195 volatile struct aes_t
*aes
= (volatile struct aes_t
*) AES_START
;
196 struct aes_ctx
*ctx
= (struct aes_ctx
*)ctx_arg
;
197 u8
*in_key
= ctx
->buf
;
198 int key_len
= ctx
->key_length
;
199 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
201 if (ctx
->use_tweak
) in_key
= ctx
->tweakkey
;
203 /* 128, 192 or 256 bit key length */
204 aes
->controlr
.K
= key_len
/ 8 - 2;
205 if (key_len
== 128 / 8) {
206 aes
->K3R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 0));
207 aes
->K2R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 1));
208 aes
->K1R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 2));
209 aes
->K0R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 3));
211 else if (key_len
== 192 / 8) {
212 aes
->K5R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 0));
213 aes
->K4R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 1));
214 aes
->K3R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 2));
215 aes
->K2R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 3));
216 aes
->K1R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 4));
217 aes
->K0R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 5));
219 else if (key_len
== 256 / 8) {
220 aes
->K7R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 0));
221 aes
->K6R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 1));
222 aes
->K5R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 2));
223 aes
->K4R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 3));
224 aes
->K3R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 4));
225 aes
->K2R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 5));
226 aes
->K1R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 6));
227 aes
->K0R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 7));
230 printk (KERN_ERR
"[%s %s %d]: Invalid key_len : %d\n", __FILE__
, __func__
, __LINE__
, key_len
);
234 /* let HW pre-process DEcryption key in any case (even if
235 ENcryption is used). Key Valid (KV) bit is then only
236 checked in decryption routine! */
237 aes
->controlr
.PNK
= 1;
242 /*! \fn void ifx_deu_aes (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg, size_t nbytes, int encdec, int mode)
243 * \ingroup IFX_AES_FUNCTIONS
244 * \brief main interface to AES hardware
245 * \param ctx_arg crypto algo context
246 * \param out_arg output bytestream
247 * \param in_arg input bytestream
248 * \param iv_arg initialization vector
249 * \param nbytes length of bytestream
250 * \param encdec 1 for encrypt; 0 for decrypt
251 * \param mode operation mode such as ebc, cbc, ctr
254 void ifx_deu_aes (void *ctx_arg
, u8
*out_arg
, const u8
*in_arg
,
255 u8
*iv_arg
, size_t nbytes
, int encdec
, int mode
)
258 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
259 volatile struct aes_t
*aes
= (volatile struct aes_t
*) AES_START
;
260 //struct aes_ctx *ctx = (struct aes_ctx *)ctx_arg;
262 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
264 int byte_cnt
= nbytes
;
268 aes_set_key_hw (ctx_arg
);
270 aes
->controlr
.E_D
= !encdec
; //encryption
271 aes
->controlr
.O
= mode
; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
273 //aes->controlr.F = 128; //default; only for CFB and OFB modes; change only for customer-specific apps
275 aes
->IV3R
= DEU_ENDIAN_SWAP(*(u32
*) iv_arg
);
276 aes
->IV2R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 1));
277 aes
->IV1R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 2));
278 aes
->IV0R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 3));
283 while (byte_cnt
>= 16) {
285 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 0));
286 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 1));
287 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 2));
288 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 3)); /* start crypto */
290 while (aes
->controlr
.BUS
) {
291 // this will not take long
294 *((volatile u32
*) out_arg
+ (i
* 4) + 0) = aes
->OD3R
;
295 *((volatile u32
*) out_arg
+ (i
* 4) + 1) = aes
->OD2R
;
296 *((volatile u32
*) out_arg
+ (i
* 4) + 2) = aes
->OD1R
;
297 *((volatile u32
*) out_arg
+ (i
* 4) + 3) = aes
->OD0R
;
303 /* To handle all non-aligned bytes (not aligned to 16B size) */
305 u8 temparea
[16] = {0,};
307 memcpy(temparea
, ((u32
*) in_arg
+ (i
* 4)), byte_cnt
);
309 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) temparea
+ 0));
310 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) temparea
+ 1));
311 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) temparea
+ 2));
312 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) temparea
+ 3)); /* start crypto */
314 while (aes
->controlr
.BUS
) {
317 *((volatile u32
*) temparea
+ 0) = aes
->OD3R
;
318 *((volatile u32
*) temparea
+ 1) = aes
->OD2R
;
319 *((volatile u32
*) temparea
+ 2) = aes
->OD1R
;
320 *((volatile u32
*) temparea
+ 3) = aes
->OD0R
;
322 memcpy(((u32
*) out_arg
+ (i
* 4)), temparea
, byte_cnt
);
325 //tc.chen : copy iv_arg back
327 *((u32
*) iv_arg
) = DEU_ENDIAN_SWAP(aes
->IV3R
);
328 *((u32
*) iv_arg
+ 1) = DEU_ENDIAN_SWAP(aes
->IV2R
);
329 *((u32
*) iv_arg
+ 2) = DEU_ENDIAN_SWAP(aes
->IV1R
);
330 *((u32
*) iv_arg
+ 3) = DEU_ENDIAN_SWAP(aes
->IV0R
);
337 * \fn int ctr_rfc3686_aes_set_key (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
338 * \ingroup IFX_AES_FUNCTIONS
339 * \brief sets RFC3686 key
340 * \param tfm linux crypto algo transform
341 * \param in_key input key
342 * \param key_len key lengths of 20, 28 and 36 bytes supported; last 4 bytes is nonce
343 * \return 0 - SUCCESS
344 * -EINVAL - bad key length
346 int ctr_rfc3686_aes_set_key (struct crypto_tfm
*tfm
, const uint8_t *in_key
, unsigned int key_len
)
348 struct aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
350 //printk("ctr_rfc3686_aes_set_key in %s\n", __FILE__);
352 memcpy(ctx
->nonce
, in_key
+ (key_len
- CTR_RFC3686_NONCE_SIZE
),
353 CTR_RFC3686_NONCE_SIZE
);
355 key_len
-= CTR_RFC3686_NONCE_SIZE
; // remove 4 bytes of nonce
357 if (key_len
!= 16 && key_len
!= 24 && key_len
!= 32) {
361 ctx
->key_length
= key_len
;
364 memcpy ((u8
*) (ctx
->buf
), in_key
, key_len
);
370 * \fn int ctr_rfc3686_aes_set_key_skcipher (struct crypto_skcipher *tfm, const uint8_t *in_key, unsigned int key_len)
371 * \ingroup IFX_AES_FUNCTIONS
372 * \brief sets RFC3686 key for skcipher
373 * \param tfm linux crypto skcipher
374 * \param in_key input key
375 * \param key_len key lengths of 20, 28 and 36 bytes supported; last 4 bytes is nonce
376 * \return 0 - SUCCESS
377 * -EINVAL - bad key length
379 int ctr_rfc3686_aes_set_key_skcipher (struct crypto_skcipher
*tfm
, const uint8_t *in_key
, unsigned int key_len
)
381 return ctr_rfc3686_aes_set_key(crypto_skcipher_tfm(tfm
), in_key
, key_len
);
384 /*! \fn void ifx_deu_aes (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg, u32 nbytes, int encdec, int mode)
385 * \ingroup IFX_AES_FUNCTIONS
386 * \brief main interface with deu hardware in DMA mode
387 * \param ctx_arg crypto algo context
388 * \param out_arg output bytestream
389 * \param in_arg input bytestream
390 * \param iv_arg initialization vector
391 * \param nbytes length of bytestream
392 * \param encdec 1 for encrypt; 0 for decrypt
393 * \param mode operation mode such as ebc, cbc, ctr
397 //definitions from linux/include/crypto.h:
398 //#define CRYPTO_TFM_MODE_ECB 0x00000001
399 //#define CRYPTO_TFM_MODE_CBC 0x00000002
400 //#define CRYPTO_TFM_MODE_CFB 0x00000004
401 //#define CRYPTO_TFM_MODE_CTR 0x00000008
402 //#define CRYPTO_TFM_MODE_OFB 0x00000010 // not even defined
403 //but hardware definition: 0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
405 /*! \fn void ifx_deu_aes_ecb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
406 * \ingroup IFX_AES_FUNCTIONS
407 * \brief sets AES hardware to ECB mode
408 * \param ctx crypto algo context
409 * \param dst output bytestream
410 * \param src input bytestream
411 * \param iv initialization vector
412 * \param nbytes length of bytestream
413 * \param encdec 1 for encrypt; 0 for decrypt
414 * \param inplace not used
416 void ifx_deu_aes_ecb (void *ctx
, uint8_t *dst
, const uint8_t *src
,
417 uint8_t *iv
, size_t nbytes
, int encdec
, int inplace
)
419 ifx_deu_aes (ctx
, dst
, src
, NULL
, nbytes
, encdec
, 0);
422 /*! \fn void ifx_deu_aes_cbc (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
423 * \ingroup IFX_AES_FUNCTIONS
424 * \brief sets AES hardware to CBC mode
425 * \param ctx crypto algo context
426 * \param dst output bytestream
427 * \param src input bytestream
428 * \param iv initialization vector
429 * \param nbytes length of bytestream
430 * \param encdec 1 for encrypt; 0 for decrypt
431 * \param inplace not used
433 void ifx_deu_aes_cbc (void *ctx
, uint8_t *dst
, const uint8_t *src
,
434 uint8_t *iv
, size_t nbytes
, int encdec
, int inplace
)
436 ifx_deu_aes (ctx
, dst
, src
, iv
, nbytes
, encdec
, 1);
439 /*! \fn void ifx_deu_aes_ofb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
440 * \ingroup IFX_AES_FUNCTIONS
441 * \brief sets AES hardware to OFB mode
442 * \param ctx crypto algo context
443 * \param dst output bytestream
444 * \param src input bytestream
445 * \param iv initialization vector
446 * \param nbytes length of bytestream
447 * \param encdec 1 for encrypt; 0 for decrypt
448 * \param inplace not used
450 void ifx_deu_aes_ofb (void *ctx
, uint8_t *dst
, const uint8_t *src
,
451 uint8_t *iv
, size_t nbytes
, int encdec
, int inplace
)
453 ifx_deu_aes (ctx
, dst
, src
, iv
, nbytes
, encdec
, 2);
456 /*! \fn void ifx_deu_aes_cfb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
457 * \ingroup IFX_AES_FUNCTIONS
458 * \brief sets AES hardware to CFB mode
459 * \param ctx crypto algo context
460 * \param dst output bytestream
461 * \param src input bytestream
462 * \param iv initialization vector
463 * \param nbytes length of bytestream
464 * \param encdec 1 for encrypt; 0 for decrypt
465 * \param inplace not used
467 void ifx_deu_aes_cfb (void *ctx
, uint8_t *dst
, const uint8_t *src
,
468 uint8_t *iv
, size_t nbytes
, int encdec
, int inplace
)
470 ifx_deu_aes (ctx
, dst
, src
, iv
, nbytes
, encdec
, 3);
473 /*! \fn void ifx_deu_aes_ctr (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
474 * \ingroup IFX_AES_FUNCTIONS
475 * \brief sets AES hardware to CTR mode
476 * \param ctx crypto algo context
477 * \param dst output bytestream
478 * \param src input bytestream
479 * \param iv initialization vector
480 * \param nbytes length of bytestream
481 * \param encdec 1 for encrypt; 0 for decrypt
482 * \param inplace not used
484 void ifx_deu_aes_ctr (void *ctx
, uint8_t *dst
, const uint8_t *src
,
485 uint8_t *iv
, size_t nbytes
, int encdec
, int inplace
)
487 ifx_deu_aes (ctx
, dst
, src
, iv
, nbytes
, encdec
, 4);
490 /*! \fn void aes_encrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
491 * \ingroup IFX_AES_FUNCTIONS
492 * \brief encrypt AES_BLOCK_SIZE of data
493 * \param tfm linux crypto algo transform
494 * \param out output bytestream
495 * \param in input bytestream
497 void aes_encrypt (struct crypto_tfm
*tfm
, uint8_t *out
, const uint8_t *in
)
499 struct aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
500 ifx_deu_aes (ctx
, out
, in
, NULL
, AES_BLOCK_SIZE
,
501 CRYPTO_DIR_ENCRYPT
, 0);
504 /*! \fn void aes_decrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
505 * \ingroup IFX_AES_FUNCTIONS
506 * \brief decrypt AES_BLOCK_SIZE of data
507 * \param tfm linux crypto algo transform
508 * \param out output bytestream
509 * \param in input bytestream
511 void aes_decrypt (struct crypto_tfm
*tfm
, uint8_t *out
, const uint8_t *in
)
513 struct aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
514 ifx_deu_aes (ctx
, out
, in
, NULL
, AES_BLOCK_SIZE
,
515 CRYPTO_DIR_DECRYPT
, 0);
519 * \brief AES function mappings
521 struct crypto_alg ifxdeu_aes_alg
= {
523 .cra_driver_name
= "ifxdeu-aes",
525 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
526 .cra_blocksize
= AES_BLOCK_SIZE
,
527 .cra_ctxsize
= sizeof(struct aes_ctx
),
528 .cra_module
= THIS_MODULE
,
529 .cra_list
= LIST_HEAD_INIT(ifxdeu_aes_alg
.cra_list
),
532 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
533 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
534 .cia_setkey
= aes_set_key
,
535 .cia_encrypt
= aes_encrypt
,
536 .cia_decrypt
= aes_decrypt
,
541 /*! \fn int ecb_aes_encrypt(struct skcipher_req *req)
542 * \ingroup IFX_AES_FUNCTIONS
543 * \brief ECB AES encrypt using linux crypto skcipher
544 * \param req skcipher request
547 int ecb_aes_encrypt(struct skcipher_request
*req
)
549 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
550 struct skcipher_walk walk
;
552 unsigned int enc_bytes
, nbytes
;
554 err
= skcipher_walk_virt(&walk
, req
, false);
556 while ((nbytes
= enc_bytes
= walk
.nbytes
)) {
557 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
558 ifx_deu_aes_ecb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
559 NULL
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
560 nbytes
&= AES_BLOCK_SIZE
- 1;
561 err
= skcipher_walk_done(&walk
, nbytes
);
567 /*! \fn int ecb_aes_decrypt(struct skcipher_req *req)
568 * \ingroup IFX_AES_FUNCTIONS
569 * \brief ECB AES decrypt using linux crypto skcipher
570 * \param req skcipher request
573 int ecb_aes_decrypt(struct skcipher_request
*req
)
575 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
576 struct skcipher_walk walk
;
578 unsigned int dec_bytes
, nbytes
;
580 err
= skcipher_walk_virt(&walk
, req
, false);
582 while ((nbytes
= dec_bytes
= walk
.nbytes
)) {
583 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
584 ifx_deu_aes_ecb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
585 NULL
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
586 nbytes
&= AES_BLOCK_SIZE
- 1;
587 err
= skcipher_walk_done(&walk
, nbytes
);
594 * \brief AES function mappings
596 struct skcipher_alg ifxdeu_ecb_aes_alg
= {
597 .base
.cra_name
= "ecb(aes)",
598 .base
.cra_driver_name
= "ifxdeu-ecb(aes)",
599 .base
.cra_priority
= 400,
600 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
601 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
602 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
603 .base
.cra_module
= THIS_MODULE
,
604 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_ecb_aes_alg
.base
.cra_list
),
605 .min_keysize
= AES_MIN_KEY_SIZE
,
606 .max_keysize
= AES_MAX_KEY_SIZE
,
607 .setkey
= aes_set_key_skcipher
,
608 .encrypt
= ecb_aes_encrypt
,
609 .decrypt
= ecb_aes_decrypt
,
612 /*! \fn int ecb_aes_encrypt(struct skcipher_req *req)
613 * \ingroup IFX_AES_FUNCTIONS
614 * \brief CBC AES encrypt using linux crypto skcipher
615 * \param req skcipher request
618 int cbc_aes_encrypt(struct skcipher_request
*req
)
620 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
621 struct skcipher_walk walk
;
623 unsigned int enc_bytes
, nbytes
;
625 err
= skcipher_walk_virt(&walk
, req
, false);
627 while ((nbytes
= enc_bytes
= walk
.nbytes
)) {
629 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
630 ifx_deu_aes_cbc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
631 iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
632 nbytes
&= AES_BLOCK_SIZE
- 1;
633 err
= skcipher_walk_done(&walk
, nbytes
);
639 /*! \fn int cbc_aes_decrypt(struct skcipher_req *req)
640 * \ingroup IFX_AES_FUNCTIONS
641 * \brief CBC AES decrypt using linux crypto skcipher
642 * \param req skcipher request
645 int cbc_aes_decrypt(struct skcipher_request
*req
)
647 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
648 struct skcipher_walk walk
;
650 unsigned int dec_bytes
, nbytes
;
652 err
= skcipher_walk_virt(&walk
, req
, false);
654 while ((nbytes
= dec_bytes
= walk
.nbytes
)) {
656 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
657 ifx_deu_aes_cbc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
658 iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
659 nbytes
&= AES_BLOCK_SIZE
- 1;
660 err
= skcipher_walk_done(&walk
, nbytes
);
667 * \brief AES function mappings
669 struct skcipher_alg ifxdeu_cbc_aes_alg
= {
670 .base
.cra_name
= "cbc(aes)",
671 .base
.cra_driver_name
= "ifxdeu-cbc(aes)",
672 .base
.cra_priority
= 400,
673 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
674 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
675 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
676 .base
.cra_module
= THIS_MODULE
,
677 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_cbc_aes_alg
.base
.cra_list
),
678 .min_keysize
= AES_MIN_KEY_SIZE
,
679 .max_keysize
= AES_MAX_KEY_SIZE
,
680 .ivsize
= AES_BLOCK_SIZE
,
681 .setkey
= aes_set_key_skcipher
,
682 .encrypt
= cbc_aes_encrypt
,
683 .decrypt
= cbc_aes_decrypt
,
686 /*! \fn void ifx_deu_aes_xts (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg, size_t nbytes, int encdec)
687 * \ingroup IFX_AES_FUNCTIONS
688 * \brief main interface to AES hardware for XTS impl
689 * \param ctx_arg crypto algo context
690 * \param out_arg output bytestream
691 * \param in_arg input bytestream
692 * \param iv_arg initialization vector
693 * \param nbytes length of bytestream
694 * \param encdec 1 for encrypt; 0 for decrypt
697 void ifx_deu_aes_xts (void *ctx_arg
, u8
*out_arg
, const u8
*in_arg
,
698 u8
*iv_arg
, size_t nbytes
, int encdec
)
700 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
701 volatile struct aes_t
*aes
= (volatile struct aes_t
*) AES_START
;
702 //struct aes_ctx *ctx = (struct aes_ctx *)ctx_arg;
704 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
707 int byte_cnt
= nbytes
;
711 aes_set_key_hw (ctx_arg
);
713 aes
->controlr
.E_D
= !encdec
; //encryption
714 aes
->controlr
.O
= 1; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR - CBC mode for xts
717 while (byte_cnt
>= 16) {
720 if (((byte_cnt
% 16) > 0) && (byte_cnt
< (2*XTS_BLOCK_SIZE
))) {
721 memcpy(oldiv
, iv_arg
, 16);
722 gf128mul_x_ble((le128
*)iv_arg
, (le128
*)iv_arg
);
724 u128_xor((u128
*)((u32
*) in_arg
+ (i
* 4) + 0), (u128
*)((u32
*) in_arg
+ (i
* 4) + 0), (u128
*)iv_arg
);
727 aes
->IV3R
= DEU_ENDIAN_SWAP(*(u32
*) iv_arg
);
728 aes
->IV2R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 1));
729 aes
->IV1R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 2));
730 aes
->IV0R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 3));
732 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 0));
733 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 1));
734 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 2));
735 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 3)); /* start crypto */
737 while (aes
->controlr
.BUS
) {
738 // this will not take long
741 *((volatile u32
*) out_arg
+ (i
* 4) + 0) = aes
->OD3R
;
742 *((volatile u32
*) out_arg
+ (i
* 4) + 1) = aes
->OD2R
;
743 *((volatile u32
*) out_arg
+ (i
* 4) + 2) = aes
->OD1R
;
744 *((volatile u32
*) out_arg
+ (i
* 4) + 3) = aes
->OD0R
;
747 u128_xor((u128
*)((volatile u32
*) out_arg
+ (i
* 4) + 0), (u128
*)((volatile u32
*) out_arg
+ (i
* 4) + 0), (u128
*)iv_arg
);
749 gf128mul_x_ble((le128
*)iv_arg
, (le128
*)iv_arg
);
755 u8 state
[XTS_BLOCK_SIZE
] = {0,};
757 if (!encdec
) memcpy(iv_arg
, oldiv
, 16);
759 aes
->IV3R
= DEU_ENDIAN_SWAP(*(u32
*) iv_arg
);
760 aes
->IV2R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 1));
761 aes
->IV1R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 2));
762 aes
->IV0R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 3));
764 memcpy(state
, ((u32
*) in_arg
+ (i
* 4) + 0), byte_cnt
);
765 memcpy((state
+ byte_cnt
), (out_arg
+ ((i
- 1) * 16) + byte_cnt
), (XTS_BLOCK_SIZE
- byte_cnt
));
767 u128_xor((u128
*)state
, (u128
*)state
, (u128
*)iv_arg
);
770 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) state
+ 0));
771 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) state
+ 1));
772 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) state
+ 2));
773 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) state
+ 3)); /* start crypto */
775 memcpy(((u32
*) out_arg
+ (i
* 4) + 0), ((u32
*) out_arg
+ ((i
- 1) * 4) + 0), byte_cnt
);
777 while (aes
->controlr
.BUS
) {
778 // this will not take long
781 *((volatile u32
*) out_arg
+ ((i
-1) * 4) + 0) = aes
->OD3R
;
782 *((volatile u32
*) out_arg
+ ((i
-1) * 4) + 1) = aes
->OD2R
;
783 *((volatile u32
*) out_arg
+ ((i
-1) * 4) + 2) = aes
->OD1R
;
784 *((volatile u32
*) out_arg
+ ((i
-1) * 4) + 3) = aes
->OD0R
;
787 u128_xor((u128
*)((volatile u32
*) out_arg
+ ((i
-1) * 4) + 0), (u128
*)((volatile u32
*) out_arg
+ ((i
-1) * 4) + 0), (u128
*)iv_arg
);
794 /*! \fn int xts_aes_encrypt(struct skcipher_req *req)
795 * \ingroup IFX_AES_FUNCTIONS
796 * \brief XTS AES encrypt using linux crypto skcipher
797 * \param req skcipher request
800 int xts_aes_encrypt(struct skcipher_request
*req
)
802 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
803 struct skcipher_walk walk
;
805 unsigned int enc_bytes
, nbytes
, processed
;
807 err
= skcipher_walk_virt(&walk
, req
, false);
809 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
813 aes_encrypt(req
->base
.tfm
, walk
.iv
, walk
.iv
);
817 while ((nbytes
= walk
.nbytes
) && (walk
.nbytes
>= (XTS_BLOCK_SIZE
* 2)) ) {
819 if (nbytes
== walk
.total
) {
822 enc_bytes
= nbytes
& ~(XTS_BLOCK_SIZE
- 1);
823 if ((req
->cryptlen
- processed
- enc_bytes
) < (XTS_BLOCK_SIZE
)) {
824 if (enc_bytes
> (2 * XTS_BLOCK_SIZE
)) {
825 enc_bytes
-= XTS_BLOCK_SIZE
;
831 ifx_deu_aes_xts(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
832 iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
);
833 err
= skcipher_walk_done(&walk
, nbytes
- enc_bytes
);
834 processed
+= enc_bytes
;
839 nbytes
= req
->cryptlen
- processed
;
840 scatterwalk_map_and_copy(ctx
->lastbuffer
, req
->src
, (req
->cryptlen
- nbytes
), nbytes
, 0);
841 ifx_deu_aes_xts(ctx
, ctx
->lastbuffer
, ctx
->lastbuffer
,
842 iv
, nbytes
, CRYPTO_DIR_ENCRYPT
);
843 scatterwalk_map_and_copy(ctx
->lastbuffer
, req
->dst
, (req
->cryptlen
- nbytes
), nbytes
, 1);
844 skcipher_request_complete(req
, 0);
850 /*! \fn int xts_aes_decrypt(struct skcipher_req *req)
851 * \ingroup IFX_AES_FUNCTIONS
852 * \brief XTS AES decrypt using linux crypto skcipher
853 * \param req skcipher request
856 int xts_aes_decrypt(struct skcipher_request
*req
)
858 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
859 struct skcipher_walk walk
;
861 unsigned int dec_bytes
, nbytes
, processed
;
863 err
= skcipher_walk_virt(&walk
, req
, false);
865 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
869 aes_encrypt(req
->base
.tfm
, walk
.iv
, walk
.iv
);
873 while ((nbytes
= walk
.nbytes
) && (walk
.nbytes
>= (XTS_BLOCK_SIZE
* 2))) {
875 if (nbytes
== walk
.total
) {
878 dec_bytes
= nbytes
& ~(XTS_BLOCK_SIZE
- 1);
879 if ((req
->cryptlen
- processed
- dec_bytes
) < (XTS_BLOCK_SIZE
)) {
880 if (dec_bytes
> (2 * XTS_BLOCK_SIZE
)) {
881 dec_bytes
-= XTS_BLOCK_SIZE
;
887 ifx_deu_aes_xts(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
888 iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
);
889 err
= skcipher_walk_done(&walk
, nbytes
- dec_bytes
);
890 processed
+= dec_bytes
;
895 nbytes
= req
->cryptlen
- processed
;
896 scatterwalk_map_and_copy(ctx
->lastbuffer
, req
->src
, (req
->cryptlen
- nbytes
), nbytes
, 0);
897 ifx_deu_aes_xts(ctx
, ctx
->lastbuffer
, ctx
->lastbuffer
,
898 iv
, nbytes
, CRYPTO_DIR_DECRYPT
);
899 scatterwalk_map_and_copy(ctx
->lastbuffer
, req
->dst
, (req
->cryptlen
- nbytes
), nbytes
, 1);
900 skcipher_request_complete(req
, 0);
906 /*! \fn int xts_aes_set_key_skcipher (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
907 * \ingroup IFX_AES_FUNCTIONS
908 * \brief sets the AES keys for XTS
909 * \param tfm linux crypto algo transform
910 * \param in_key input key
911 * \param key_len key lengths of 16, 24 and 32 bytes supported
912 * \return -EINVAL - bad key length, 0 - SUCCESS
914 int xts_aes_set_key_skcipher (struct crypto_skcipher
*tfm
, const u8
*in_key
, unsigned int key_len
)
916 struct aes_ctx
*ctx
= crypto_tfm_ctx(crypto_skcipher_tfm(tfm
));
917 unsigned int keylen
= (key_len
/ 2);
919 if (key_len
% 2) return -EINVAL
;
921 if (keylen
!= 16 && keylen
!= 24 && keylen
!= 32) {
925 ctx
->key_length
= keylen
;
927 DPRINTF(0, "ctx @%p, key_len %d, ctx->key_length %d\n", ctx
, key_len
, ctx
->key_length
);
928 memcpy ((u8
*) (ctx
->buf
), in_key
, keylen
);
929 memcpy ((u8
*) (ctx
->tweakkey
), in_key
+ keylen
, keylen
);
935 * \brief AES function mappings
937 struct skcipher_alg ifxdeu_xts_aes_alg
= {
938 .base
.cra_name
= "xts(aes)",
939 .base
.cra_driver_name
= "ifxdeu-xts(aes)",
940 .base
.cra_priority
= 400,
941 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
942 .base
.cra_blocksize
= XTS_BLOCK_SIZE
,
943 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
944 .base
.cra_module
= THIS_MODULE
,
945 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_xts_aes_alg
.base
.cra_list
),
946 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
947 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
948 .ivsize
= XTS_BLOCK_SIZE
,
949 .walksize
= 2 * XTS_BLOCK_SIZE
,
950 .setkey
= xts_aes_set_key_skcipher
,
951 .encrypt
= xts_aes_encrypt
,
952 .decrypt
= xts_aes_decrypt
,
955 /*! \fn int ofb_aes_encrypt(struct skcipher_req *req)
956 * \ingroup IFX_AES_FUNCTIONS
957 * \brief OFB AES encrypt using linux crypto skcipher
958 * \param req skcipher request
961 int ofb_aes_encrypt(struct skcipher_request
*req
)
963 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
964 struct skcipher_walk walk
;
966 unsigned int enc_bytes
, nbytes
;
968 err
= skcipher_walk_virt(&walk
, req
, false);
970 while ((nbytes
= enc_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
971 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
972 ifx_deu_aes_ofb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
973 walk
.iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
974 nbytes
&= AES_BLOCK_SIZE
- 1;
975 err
= skcipher_walk_done(&walk
, nbytes
);
978 /* to handle remaining bytes < AES_BLOCK_SIZE */
980 ifx_deu_aes_ofb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
981 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_ENCRYPT
, 0);
982 err
= skcipher_walk_done(&walk
, 0);
988 /*! \fn int ofb_aes_decrypt(struct skcipher_req *req)
989 * \ingroup IFX_AES_FUNCTIONS
990 * \brief OFB AES decrypt using linux crypto skcipher
991 * \param req skcipher request
994 int ofb_aes_decrypt(struct skcipher_request
*req
)
996 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
997 struct skcipher_walk walk
;
999 unsigned int dec_bytes
, nbytes
;
1001 err
= skcipher_walk_virt(&walk
, req
, false);
1003 while ((nbytes
= dec_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1004 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1005 ifx_deu_aes_ofb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1006 walk
.iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
1007 nbytes
&= AES_BLOCK_SIZE
- 1;
1008 err
= skcipher_walk_done(&walk
, nbytes
);
1011 /* to handle remaining bytes < AES_BLOCK_SIZE */
1013 ifx_deu_aes_ofb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1014 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_DECRYPT
, 0);
1015 err
= skcipher_walk_done(&walk
, 0);
1022 * \brief AES function mappings
1024 struct skcipher_alg ifxdeu_ofb_aes_alg
= {
1025 .base
.cra_name
= "ofb(aes)",
1026 .base
.cra_driver_name
= "ifxdeu-ofb(aes)",
1027 .base
.cra_priority
= 400,
1028 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1029 .base
.cra_blocksize
= 1,
1030 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
1031 .base
.cra_module
= THIS_MODULE
,
1032 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_ofb_aes_alg
.base
.cra_list
),
1033 .min_keysize
= AES_MIN_KEY_SIZE
,
1034 .max_keysize
= AES_MAX_KEY_SIZE
,
1035 .ivsize
= AES_BLOCK_SIZE
,
1036 .chunksize
= AES_BLOCK_SIZE
,
1037 .walksize
= AES_BLOCK_SIZE
,
1038 .setkey
= aes_set_key_skcipher
,
1039 .encrypt
= ofb_aes_encrypt
,
1040 .decrypt
= ofb_aes_decrypt
,
1043 /*! \fn int cfb_aes_encrypt(struct skcipher_req *req)
1044 * \ingroup IFX_AES_FUNCTIONS
1045 * \brief CFB AES encrypt using linux crypto skcipher
1046 * \param req skcipher request
1049 int cfb_aes_encrypt(struct skcipher_request
*req
)
1051 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1052 struct skcipher_walk walk
;
1054 unsigned int enc_bytes
, nbytes
;
1056 err
= skcipher_walk_virt(&walk
, req
, false);
1058 while ((nbytes
= enc_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1059 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1060 ifx_deu_aes_cfb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1061 walk
.iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
1062 nbytes
&= AES_BLOCK_SIZE
- 1;
1063 err
= skcipher_walk_done(&walk
, nbytes
);
1066 /* to handle remaining bytes < AES_BLOCK_SIZE */
1068 ifx_deu_aes_cfb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1069 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_ENCRYPT
, 0);
1070 err
= skcipher_walk_done(&walk
, 0);
1076 /*! \fn int cfb_aes_decrypt(struct skcipher_req *req)
1077 * \ingroup IFX_AES_FUNCTIONS
1078 * \brief CFB AES decrypt using linux crypto skcipher
1079 * \param req skcipher request
1082 int cfb_aes_decrypt(struct skcipher_request
*req
)
1084 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1085 struct skcipher_walk walk
;
1087 unsigned int dec_bytes
, nbytes
;
1089 err
= skcipher_walk_virt(&walk
, req
, false);
1091 while ((nbytes
= dec_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1092 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1093 ifx_deu_aes_cfb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1094 walk
.iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
1095 nbytes
&= AES_BLOCK_SIZE
- 1;
1096 err
= skcipher_walk_done(&walk
, nbytes
);
1099 /* to handle remaining bytes < AES_BLOCK_SIZE */
1101 ifx_deu_aes_cfb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1102 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_DECRYPT
, 0);
1103 err
= skcipher_walk_done(&walk
, 0);
1110 * \brief AES function mappings
1112 struct skcipher_alg ifxdeu_cfb_aes_alg
= {
1113 .base
.cra_name
= "cfb(aes)",
1114 .base
.cra_driver_name
= "ifxdeu-cfb(aes)",
1115 .base
.cra_priority
= 400,
1116 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1117 .base
.cra_blocksize
= 1,
1118 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
1119 .base
.cra_module
= THIS_MODULE
,
1120 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_cfb_aes_alg
.base
.cra_list
),
1121 .min_keysize
= AES_MIN_KEY_SIZE
,
1122 .max_keysize
= AES_MAX_KEY_SIZE
,
1123 .ivsize
= AES_BLOCK_SIZE
,
1124 .chunksize
= AES_BLOCK_SIZE
,
1125 .walksize
= AES_BLOCK_SIZE
,
1126 .setkey
= aes_set_key_skcipher
,
1127 .encrypt
= cfb_aes_encrypt
,
1128 .decrypt
= cfb_aes_decrypt
,
1131 /*! \fn int ctr_basic_aes_encrypt(struct skcipher_req *req)
1132 * \ingroup IFX_AES_FUNCTIONS
1133 * \brief Counter mode AES encrypt using linux crypto skcipher
1134 * \param req skcipher request
1137 int ctr_basic_aes_encrypt(struct skcipher_request
*req
)
1139 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1140 struct skcipher_walk walk
;
1142 unsigned int enc_bytes
, nbytes
;
1144 err
= skcipher_walk_virt(&walk
, req
, false);
1146 while ((nbytes
= enc_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1147 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1148 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1149 walk
.iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
1150 nbytes
&= AES_BLOCK_SIZE
- 1;
1151 err
= skcipher_walk_done(&walk
, nbytes
);
1154 /* to handle remaining bytes < AES_BLOCK_SIZE */
1156 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1157 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_ENCRYPT
, 0);
1158 err
= skcipher_walk_done(&walk
, 0);
1164 /*! \fn int ctr_basic_aes_encrypt(struct skcipher_req *req)
1165 * \ingroup IFX_AES_FUNCTIONS
1166 * \brief Counter mode AES decrypt using linux crypto skcipher
1167 * \param req skcipher request
1170 int ctr_basic_aes_decrypt(struct skcipher_request
*req
)
1172 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1173 struct skcipher_walk walk
;
1175 unsigned int dec_bytes
, nbytes
;
1177 err
= skcipher_walk_virt(&walk
, req
, false);
1179 while ((nbytes
= dec_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1180 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1181 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1182 walk
.iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
1183 nbytes
&= AES_BLOCK_SIZE
- 1;
1184 err
= skcipher_walk_done(&walk
, nbytes
);
1187 /* to handle remaining bytes < AES_BLOCK_SIZE */
1189 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1190 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_DECRYPT
, 0);
1191 err
= skcipher_walk_done(&walk
, 0);
1198 * \brief AES function mappings
1200 struct skcipher_alg ifxdeu_ctr_basic_aes_alg
= {
1201 .base
.cra_name
= "ctr(aes)",
1202 .base
.cra_driver_name
= "ifxdeu-ctr(aes)",
1203 .base
.cra_priority
= 400,
1204 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1205 .base
.cra_blocksize
= 1,
1206 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
1207 .base
.cra_module
= THIS_MODULE
,
1208 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_ctr_basic_aes_alg
.base
.cra_list
),
1209 .min_keysize
= AES_MIN_KEY_SIZE
,
1210 .max_keysize
= AES_MAX_KEY_SIZE
,
1211 .ivsize
= AES_BLOCK_SIZE
,
1212 .walksize
= AES_BLOCK_SIZE
,
1213 .setkey
= aes_set_key_skcipher
,
1214 .encrypt
= ctr_basic_aes_encrypt
,
1215 .decrypt
= ctr_basic_aes_decrypt
,
1218 /*! \fn int ctr_rfc3686_aes_encrypt(struct skcipher_req *req)
1219 * \ingroup IFX_AES_FUNCTIONS
1220 * \brief Counter mode AES (rfc3686) encrypt using linux crypto skcipher
1221 * \param req skcipher request
1224 int ctr_rfc3686_aes_encrypt(struct skcipher_request
*req
)
1226 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1227 struct skcipher_walk walk
;
1228 unsigned int nbytes
, enc_bytes
;
1232 err
= skcipher_walk_virt(&walk
, req
, false);
1233 nbytes
= walk
.nbytes
;
1235 /* set up counter block */
1236 memcpy(rfc3686_iv
, ctx
->nonce
, CTR_RFC3686_NONCE_SIZE
);
1237 memcpy(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
, walk
.iv
, CTR_RFC3686_IV_SIZE
);
1239 /* initialize counter portion of counter block */
1240 *(__be32
*)(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
+ CTR_RFC3686_IV_SIZE
) =
1243 while ((nbytes
= enc_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1244 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1245 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1246 rfc3686_iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
1247 nbytes
&= AES_BLOCK_SIZE
- 1;
1248 err
= skcipher_walk_done(&walk
, nbytes
);
1251 /* to handle remaining bytes < AES_BLOCK_SIZE */
1253 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1254 rfc3686_iv
, walk
.nbytes
, CRYPTO_DIR_ENCRYPT
, 0);
1255 err
= skcipher_walk_done(&walk
, 0);
1261 /*! \fn int ctr_rfc3686_aes_decrypt(struct skcipher_req *req)
1262 * \ingroup IFX_AES_FUNCTIONS
1263 * \brief Counter mode AES (rfc3686) decrypt using linux crypto skcipher
1264 * \param req skcipher request
1267 int ctr_rfc3686_aes_decrypt(struct skcipher_request
*req
)
1269 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1270 struct skcipher_walk walk
;
1271 unsigned int nbytes
, dec_bytes
;
1275 err
= skcipher_walk_virt(&walk
, req
, false);
1276 nbytes
= walk
.nbytes
;
1278 /* set up counter block */
1279 memcpy(rfc3686_iv
, ctx
->nonce
, CTR_RFC3686_NONCE_SIZE
);
1280 memcpy(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
, walk
.iv
, CTR_RFC3686_IV_SIZE
);
1282 /* initialize counter portion of counter block */
1283 *(__be32
*)(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
+ CTR_RFC3686_IV_SIZE
) =
1286 while ((nbytes
= dec_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1287 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1288 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1289 rfc3686_iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
1290 nbytes
&= AES_BLOCK_SIZE
- 1;
1291 err
= skcipher_walk_done(&walk
, nbytes
);
1294 /* to handle remaining bytes < AES_BLOCK_SIZE */
1296 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1297 rfc3686_iv
, walk
.nbytes
, CRYPTO_DIR_DECRYPT
, 0);
1298 err
= skcipher_walk_done(&walk
, 0);
1305 * \brief AES function mappings
1307 struct skcipher_alg ifxdeu_ctr_rfc3686_aes_alg
= {
1308 .base
.cra_name
= "rfc3686(ctr(aes))",
1309 .base
.cra_driver_name
= "ifxdeu-ctr-rfc3686(aes)",
1310 .base
.cra_priority
= 400,
1311 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1312 .base
.cra_blocksize
= 1,
1313 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
1314 .base
.cra_module
= THIS_MODULE
,
1315 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_ctr_rfc3686_aes_alg
.base
.cra_list
),
1316 .min_keysize
= CTR_RFC3686_MIN_KEY_SIZE
,
1317 .max_keysize
= CTR_RFC3686_MAX_KEY_SIZE
,
1318 .ivsize
= CTR_RFC3686_IV_SIZE
,
1319 .walksize
= AES_BLOCK_SIZE
,
1320 .setkey
= ctr_rfc3686_aes_set_key_skcipher
,
1321 .encrypt
= ctr_rfc3686_aes_encrypt
,
1322 .decrypt
= ctr_rfc3686_aes_decrypt
,
1325 static int aes_cbcmac_final_impl(struct shash_desc
*desc
, u8
*out
, bool hash_final
);
1327 /*! \fn static void aes_cbcmac_transform(struct shash_desc *desc, u8 const *in)
1328 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1329 * \brief save input block to context
1330 * \param desc linux crypto shash descriptor
1331 * \param in 16-byte block of input
1333 static void aes_cbcmac_transform(struct shash_desc
*desc
, u8
const *in
)
1335 struct aes_ctx
*mctx
= crypto_shash_ctx(desc
->tfm
);
1337 if ( ((mctx
->dbn
)+1) > AES_CBCMAC_DBN_TEMP_SIZE
)
1339 //printk("aes_cbcmac_DBN_TEMP_SIZE exceeded\n");
1340 aes_cbcmac_final_impl(desc
, (u8
*)mctx
->hash
, false);
1343 memcpy(&mctx
->temp
[mctx
->dbn
], in
, 16); //dbn workaround
1347 /*! \fn int aes_cbcmac_setkey(struct crypto_shash *tfm, const u8 *key, unsigned int keylen)
1348 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1349 * \brief sets cbcmac aes key
1350 * \param tfm linux crypto shash transform
1351 * \param key input key
1354 static int aes_cbcmac_setkey(struct crypto_shash
*tfm
, const u8
*key
, unsigned int keylen
)
1356 return aes_set_key(crypto_shash_tfm(tfm
), key
, keylen
);
1361 /*! \fn void aes_cbcmac_init(struct shash_desc *desc)
1362 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1363 * \brief initialize md5 hmac context
1364 * \param desc linux crypto shash descriptor
1366 static int aes_cbcmac_init(struct shash_desc
*desc
)
1369 struct aes_ctx
*mctx
= crypto_shash_ctx(desc
->tfm
);
1371 mctx
->dbn
= 0; //dbn workaround
1373 mctx
->byte_count
= 0;
1374 memset(mctx
->hash
, 0, AES_BLOCK_SIZE
);
1379 /*! \fn void aes_cbcmac_update(struct shash_desc *desc, const u8 *data, unsigned int len)
1380 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1381 * \brief on-the-fly cbcmac aes computation
1382 * \param desc linux crypto shash descriptor
1383 * \param data input data
1384 * \param len size of input data
1386 static int aes_cbcmac_update(struct shash_desc
*desc
, const u8
*data
, unsigned int len
)
1388 struct aes_ctx
*mctx
= crypto_shash_ctx(desc
->tfm
);
1389 const u32 avail
= sizeof(mctx
->block
) - (mctx
->byte_count
& 0x0f);
1391 mctx
->byte_count
+= len
;
1394 memcpy((char *)mctx
->block
+ (sizeof(mctx
->block
) - avail
),
1399 memcpy((char *)mctx
->block
+ (sizeof(mctx
->block
) - avail
),
1402 aes_cbcmac_transform(desc
, mctx
->block
);
1406 while (len
>= sizeof(mctx
->block
)) {
1407 memcpy(mctx
->block
, data
, sizeof(mctx
->block
));
1408 aes_cbcmac_transform(desc
, mctx
->block
);
1409 data
+= sizeof(mctx
->block
);
1410 len
-= sizeof(mctx
->block
);
1413 memcpy(mctx
->block
, data
, len
);
1417 /*! \fn static int aes_cbcmac_final_impl(struct shash_desc *desc, u8 *out, bool hash_final)
1418 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1419 * \brief compute final or intermediate md5 hmac value
1420 * \param desc linux crypto shash descriptor
1421 * \param out final cbcmac aes output value
1422 * \param in finalize or intermediate processing
1424 static int aes_cbcmac_final_impl(struct shash_desc
*desc
, u8
*out
, bool hash_final
)
1426 struct aes_ctx
*mctx
= crypto_shash_ctx(desc
->tfm
);
1427 const unsigned int offset
= mctx
->byte_count
& 0x0f;
1428 char *p
= (char *)mctx
->block
+ offset
;
1429 volatile struct aes_t
*aes
= (volatile struct aes_t
*) AES_START
;
1433 u32
*in
= mctx
->temp
[0];
1437 aes_set_key_hw (mctx
);
1439 aes
->controlr
.E_D
= !CRYPTO_DIR_ENCRYPT
; //encryption
1440 aes
->controlr
.O
= 1; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
1442 //aes->controlr.F = 128; //default; only for CFB and OFB modes; change only for customer-specific apps
1444 //printk("\ndbn = %d\n", mctx->dbn);
1446 if (mctx
->started
) {
1447 aes
->IV3R
= DEU_ENDIAN_SWAP(*(u32
*) mctx
->hash
);
1448 aes
->IV2R
= DEU_ENDIAN_SWAP(*((u32
*) mctx
->hash
+ 1));
1449 aes
->IV1R
= DEU_ENDIAN_SWAP(*((u32
*) mctx
->hash
+ 2));
1450 aes
->IV0R
= DEU_ENDIAN_SWAP(*((u32
*) mctx
->hash
+ 3));
1460 for (dbn
= 0; dbn
< mctx
->dbn
; dbn
++)
1462 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) in
+ (i
* 4) + 0));
1463 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) in
+ (i
* 4) + 1));
1464 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) in
+ (i
* 4) + 2));
1465 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) in
+ (i
* 4) + 3)); /* start crypto */
1467 while (aes
->controlr
.BUS
) {
1468 // this will not take long
1474 *((u32
*) mctx
->hash
) = DEU_ENDIAN_SWAP(aes
->IV3R
);
1475 *((u32
*) mctx
->hash
+ 1) = DEU_ENDIAN_SWAP(aes
->IV2R
);
1476 *((u32
*) mctx
->hash
+ 2) = DEU_ENDIAN_SWAP(aes
->IV1R
);
1477 *((u32
*) mctx
->hash
+ 3) = DEU_ENDIAN_SWAP(aes
->IV0R
);
1479 if (hash_final
&& offset
) {
1480 aes
->controlr
.O
= 0; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
1481 crypto_xor(mctx
->block
, mctx
->hash
, offset
);
1483 memcpy(p
, mctx
->hash
+ offset
, (AES_BLOCK_SIZE
- offset
));
1485 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) mctx
->block
+ 0));
1486 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) mctx
->block
+ 1));
1487 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) mctx
->block
+ 2));
1488 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) mctx
->block
+ 3)); /* start crypto */
1490 while (aes
->controlr
.BUS
) {
1491 // this will not take long
1494 *((u32
*) mctx
->hash
) = DEU_ENDIAN_SWAP(aes
->OD3R
);
1495 *((u32
*) mctx
->hash
+ 1) = DEU_ENDIAN_SWAP(aes
->OD2R
);
1496 *((u32
*) mctx
->hash
+ 2) = DEU_ENDIAN_SWAP(aes
->OD1R
);
1497 *((u32
*) mctx
->hash
+ 3) = DEU_ENDIAN_SWAP(aes
->OD0R
);
1503 memcpy(out
, mctx
->hash
, AES_BLOCK_SIZE
);
1504 /* reset the context after we finish with the hash */
1505 aes_cbcmac_init(desc
);
1512 /*! \fn static int aes_cbcmac_final(struct crypto_tfm *tfm, u8 *out)
1513 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1514 * \brief call aes_cbcmac_final_impl with hash_final true
1515 * \param tfm linux crypto algo transform
1516 * \param out final md5 hmac output value
1518 static int aes_cbcmac_final(struct shash_desc
*desc
, u8
*out
)
1520 return aes_cbcmac_final_impl(desc
, out
, true);
1523 /*! \fn void aes_cbcmac_init_tfm(struct crypto_tfm *tfm)
1524 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1525 * \brief initialize pointers in aes_ctx
1526 * \param tfm linux crypto shash transform
1528 static int aes_cbcmac_init_tfm(struct crypto_tfm
*tfm
)
1530 struct aes_ctx
*mctx
= crypto_tfm_ctx(tfm
);
1531 mctx
->temp
= kzalloc(AES_BLOCK_SIZE
* AES_CBCMAC_DBN_TEMP_SIZE
, GFP_KERNEL
);
1532 if (IS_ERR(mctx
->temp
)) return PTR_ERR(mctx
->temp
);
1537 /*! \fn void aes_cbcmac_exit_tfm(struct crypto_tfm *tfm)
1538 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1539 * \brief free pointers in aes_ctx
1540 * \param tfm linux crypto shash transform
1542 static void aes_cbcmac_exit_tfm(struct crypto_tfm
*tfm
)
1544 struct aes_ctx
*mctx
= crypto_tfm_ctx(tfm
);
1549 * \brief aes_cbcmac function mappings
1551 static struct shash_alg ifxdeu_cbcmac_aes_alg
= {
1552 .digestsize
= AES_BLOCK_SIZE
,
1553 .init
= aes_cbcmac_init
,
1554 .update
= aes_cbcmac_update
,
1555 .final
= aes_cbcmac_final
,
1556 .setkey
= aes_cbcmac_setkey
,
1557 .descsize
= sizeof(struct aes_ctx
),
1559 .cra_name
= "cbcmac(aes)",
1560 .cra_driver_name
= "ifxdeu-cbcmac(aes)",
1561 .cra_priority
= 400,
1562 .cra_ctxsize
= sizeof(struct aes_ctx
),
1563 .cra_flags
= CRYPTO_ALG_TYPE_HASH
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1565 .cra_module
= THIS_MODULE
,
1566 .cra_init
= aes_cbcmac_init_tfm
,
1567 .cra_exit
= aes_cbcmac_exit_tfm
,
1571 /*! \fn int aes_set_key_aead (struct crypto_aead *aead, const uint8_t *in_key, unsigned int key_len)
1572 * \ingroup IFX_AES_FUNCTIONS
1573 * \brief sets the AES keys for aead gcm
1574 * \param aead linux crypto aead
1575 * \param in_key input key
1576 * \param key_len key lengths of 16, 24 and 32 bytes supported
1577 * \return -EINVAL - bad key length, 0 - SUCCESS
1579 int aes_set_key_aead (struct crypto_aead
*aead
, const u8
*in_key
, unsigned int key_len
)
1581 struct aes_ctx
*ctx
= crypto_aead_ctx(aead
);
1584 err
= aes_set_key(&aead
->base
, in_key
, key_len
);
1585 if (err
) return err
;
1587 memset(ctx
->block
, 0, sizeof(ctx
->block
));
1588 memset(ctx
->lastbuffer
, 0, AES_BLOCK_SIZE
);
1589 ifx_deu_aes_ctr(ctx
, ctx
->block
, ctx
->block
,
1590 ctx
->lastbuffer
, AES_BLOCK_SIZE
, CRYPTO_DIR_ENCRYPT
, 0);
1591 if (ctx
->gf128
) gf128mul_free_4k(ctx
->gf128
);
1592 ctx
->gf128
= gf128mul_init_4k_lle((be128
*)ctx
->block
);
1597 /*! \fn int gcm_aes_setauthsize (struct crypto_aead *aead, unsigned int authsize)
1598 * \ingroup IFX_AES_FUNCTIONS
1599 * \brief sets the AES keys for aead gcm
1600 * \param aead linux crypto aead
1601 * \param in_key input authsize
1602 * \return -EINVAL - bad authsize length, 0 - SUCCESS
1604 int gcm_aes_setauthsize (struct crypto_aead
*aead
, unsigned int authsize
)
1606 return crypto_gcm_check_authsize(authsize
);
1609 /*! \fn int gcm_aes_encrypt(struct aead_request *req)
1610 * \ingroup IFX_AES_FUNCTIONS
1611 * \brief GCM AES encrypt using linux crypto aead
1612 * \param req aead request
1615 int gcm_aes_encrypt(struct aead_request
*req
)
1617 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1618 struct skcipher_walk walk
;
1619 struct skcipher_request request
;
1621 unsigned int enc_bytes
, nbytes
;
1623 u8 iv
[AES_BLOCK_SIZE
];
1625 lengths
.a
= cpu_to_be64(req
->assoclen
* 8);
1626 lengths
.b
= cpu_to_be64(req
->cryptlen
* 8);
1628 memset(ctx
->hash
, 0, sizeof(ctx
->hash
));
1629 memset(ctx
->block
, 0, sizeof(ctx
->block
));
1630 memcpy(iv
, req
->iv
, GCM_AES_IV_SIZE
);
1631 *(__be32
*)((void *)iv
+ GCM_AES_IV_SIZE
) = cpu_to_be32(1);
1632 ifx_deu_aes_ctr(ctx
, ctx
->block
, ctx
->block
,
1633 iv
, 16, CRYPTO_DIR_ENCRYPT
, 0);
1635 request
.cryptlen
= req
->cryptlen
+ req
->assoclen
;
1636 request
.src
= req
->src
;
1637 request
.dst
= req
->dst
;
1638 request
.base
= req
->base
;
1640 crypto_skcipher_alg(crypto_skcipher_reqtfm(&request
))->walksize
= AES_BLOCK_SIZE
;
1642 if (req
->assoclen
&& (req
->assoclen
< AES_BLOCK_SIZE
))
1643 crypto_skcipher_alg(crypto_skcipher_reqtfm(&request
))->walksize
= req
->assoclen
;
1645 err
= skcipher_walk_virt(&walk
, &request
, false);
1647 //process assoc data if available
1648 if (req
->assoclen
> 0) {
1649 unsigned int assoc_remain
, ghashlen
;
1651 assoc_remain
= req
->assoclen
;
1652 ghashlen
= min(req
->assoclen
, walk
.nbytes
);
1653 while ((nbytes
= enc_bytes
= ghashlen
) && (ghashlen
>= AES_BLOCK_SIZE
)) {
1655 if (nbytes
> req
->assoclen
) nbytes
= enc_bytes
= req
->assoclen
;
1656 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1657 memcpy(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, enc_bytes
);
1658 assoc_remain
-= enc_bytes
;
1659 temp
= walk
.dst
.virt
.addr
;
1660 while (enc_bytes
> 0) {
1661 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)temp
);
1662 gf128mul_4k_lle((be128
*)ctx
->hash
, ctx
->gf128
);
1663 enc_bytes
-= AES_BLOCK_SIZE
;
1666 if (assoc_remain
< AES_BLOCK_SIZE
) walk
.stride
= assoc_remain
;
1667 if (assoc_remain
== 0) walk
.stride
= AES_BLOCK_SIZE
;
1668 enc_bytes
= nbytes
- (nbytes
% AES_BLOCK_SIZE
);
1669 err
= skcipher_walk_done(&walk
, (walk
.nbytes
- enc_bytes
));
1670 ghashlen
= min(assoc_remain
, walk
.nbytes
);
1673 if ((enc_bytes
= ghashlen
)) {
1674 memcpy(ctx
->lastbuffer
, walk
.src
.virt
.addr
, enc_bytes
);
1675 memset(ctx
->lastbuffer
+ enc_bytes
, 0, (AES_BLOCK_SIZE
- enc_bytes
));
1676 memcpy(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, ghashlen
);
1677 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)ctx
->lastbuffer
);
1678 gf128mul_4k_lle((be128
*)ctx
->hash
, ctx
->gf128
);
1679 walk
.stride
= AES_BLOCK_SIZE
;
1680 err
= skcipher_walk_done(&walk
, (walk
.nbytes
- ghashlen
));
1685 while ((nbytes
= enc_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1687 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1688 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1689 iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
1690 nbytes
&= AES_BLOCK_SIZE
- 1;
1691 temp
= walk
.dst
.virt
.addr
;
1693 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)temp
);
1694 gf128mul_4k_lle((be128
*)ctx
->hash
, ctx
->gf128
);
1695 enc_bytes
-= AES_BLOCK_SIZE
;
1698 err
= skcipher_walk_done(&walk
, nbytes
);
1701 /* crypt and hash remaining bytes < AES_BLOCK_SIZE */
1702 if ((enc_bytes
= walk
.nbytes
)) {
1703 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1704 iv
, walk
.nbytes
, CRYPTO_DIR_ENCRYPT
, 0);
1705 memcpy(ctx
->lastbuffer
, walk
.dst
.virt
.addr
, enc_bytes
);
1706 memset(ctx
->lastbuffer
+ enc_bytes
, 0, (AES_BLOCK_SIZE
- enc_bytes
));
1707 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)ctx
->lastbuffer
);
1708 gf128mul_4k_lle((be128
*)ctx
->hash
, ctx
->gf128
);
1709 err
= skcipher_walk_done(&walk
, 0);
1712 //finalize and copy hash
1713 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)&lengths
);
1714 gf128mul_4k_lle((be128
*)ctx
->hash
, ctx
->gf128
);
1715 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)ctx
->block
);
1716 scatterwalk_map_and_copy(ctx
->hash
, req
->dst
, req
->cryptlen
+ req
->assoclen
, crypto_aead_authsize(crypto_aead_reqtfm(req
)), 1);
1718 aead_request_complete(req
, 0);
1723 /*! \fn int gcm_aes_decrypt(struct aead_request *req)
1724 * \ingroup IFX_AES_FUNCTIONS
1725 * \brief GCM AES decrypt using linux crypto aead
1726 * \param req aead request
1729 int gcm_aes_decrypt(struct aead_request
*req
)
1731 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1732 struct skcipher_walk walk
;
1733 struct skcipher_request request
;
1735 unsigned int dec_bytes
, nbytes
, authsize
;
1737 u8 iv
[AES_BLOCK_SIZE
];
1739 authsize
= crypto_aead_authsize(crypto_aead_reqtfm(req
));
1741 lengths
.a
= cpu_to_be64(req
->assoclen
* 8);
1742 lengths
.b
= cpu_to_be64((req
->cryptlen
- authsize
) * 8);
1744 memset(ctx
->hash
, 0, sizeof(ctx
->hash
));
1745 memset(ctx
->block
, 0, sizeof(ctx
->block
));
1746 memcpy(iv
, req
->iv
, GCM_AES_IV_SIZE
);
1747 *(__be32
*)((void *)iv
+ GCM_AES_IV_SIZE
) = cpu_to_be32(1);
1748 ifx_deu_aes_ctr(ctx
, ctx
->block
, ctx
->block
,
1749 iv
, 16, CRYPTO_DIR_ENCRYPT
, 0);
1751 request
.cryptlen
= req
->cryptlen
+ req
->assoclen
- authsize
;
1752 request
.src
= req
->src
;
1753 request
.dst
= req
->dst
;
1754 request
.base
= req
->base
;
1755 crypto_skcipher_alg(crypto_skcipher_reqtfm(&request
))->walksize
= AES_BLOCK_SIZE
;
1757 if (req
->assoclen
&& (req
->assoclen
< AES_BLOCK_SIZE
))
1758 crypto_skcipher_alg(crypto_skcipher_reqtfm(&request
))->walksize
= req
->assoclen
;
1760 err
= skcipher_walk_virt(&walk
, &request
, false);
1762 //process assoc data if available
1763 if (req
->assoclen
> 0) {
1764 unsigned int assoc_remain
, ghashlen
;
1766 assoc_remain
= req
->assoclen
;
1767 ghashlen
= min(req
->assoclen
, walk
.nbytes
);
1768 while ((nbytes
= dec_bytes
= ghashlen
) && (ghashlen
>= AES_BLOCK_SIZE
)) {
1770 if (nbytes
> req
->assoclen
) nbytes
= dec_bytes
= req
->assoclen
;
1771 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1772 memcpy(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, dec_bytes
);
1773 assoc_remain
-= dec_bytes
;
1774 temp
= walk
.dst
.virt
.addr
;
1775 while (dec_bytes
> 0) {
1776 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)temp
);
1777 gf128mul_4k_lle((be128
*)ctx
->hash
, ctx
->gf128
);
1778 dec_bytes
-= AES_BLOCK_SIZE
;
1781 if (assoc_remain
< AES_BLOCK_SIZE
) walk
.stride
= assoc_remain
;
1782 if (assoc_remain
== 0) walk
.stride
= AES_BLOCK_SIZE
;
1783 dec_bytes
= nbytes
- (nbytes
% AES_BLOCK_SIZE
);
1784 err
= skcipher_walk_done(&walk
, (walk
.nbytes
- dec_bytes
));
1785 ghashlen
= min(assoc_remain
, walk
.nbytes
);
1788 if ((dec_bytes
= ghashlen
)) {
1789 memcpy(ctx
->lastbuffer
, walk
.src
.virt
.addr
, dec_bytes
);
1790 memset(ctx
->lastbuffer
+ dec_bytes
, 0, (AES_BLOCK_SIZE
- dec_bytes
));
1791 memcpy(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, ghashlen
);
1792 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)ctx
->lastbuffer
);
1793 gf128mul_4k_lle((be128
*)ctx
->hash
, ctx
->gf128
);
1794 walk
.stride
= AES_BLOCK_SIZE
;
1795 err
= skcipher_walk_done(&walk
, (walk
.nbytes
- ghashlen
));
1800 while ((nbytes
= dec_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1802 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1803 temp
= walk
.src
.virt
.addr
;
1805 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)temp
);
1806 gf128mul_4k_lle((be128
*)ctx
->hash
, ctx
->gf128
);
1807 dec_bytes
-= AES_BLOCK_SIZE
;
1810 dec_bytes
= nbytes
- (nbytes
% AES_BLOCK_SIZE
);
1811 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1812 iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
1813 nbytes
&= AES_BLOCK_SIZE
- 1;
1814 err
= skcipher_walk_done(&walk
, nbytes
);
1817 /* crypt and hash remaining bytes < AES_BLOCK_SIZE */
1818 if ((dec_bytes
= walk
.nbytes
)) {
1819 memcpy(ctx
->lastbuffer
, walk
.src
.virt
.addr
, dec_bytes
);
1820 memset(ctx
->lastbuffer
+ dec_bytes
, 0, (AES_BLOCK_SIZE
- dec_bytes
));
1821 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)ctx
->lastbuffer
);
1822 gf128mul_4k_lle((be128
*)ctx
->hash
, ctx
->gf128
);
1823 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1824 iv
, walk
.nbytes
, CRYPTO_DIR_DECRYPT
, 0);
1825 err
= skcipher_walk_done(&walk
, 0);
1828 //finalize and copy hash
1829 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)&lengths
);
1830 gf128mul_4k_lle((be128
*)ctx
->hash
, ctx
->gf128
);
1831 u128_xor((u128
*)ctx
->hash
, (u128
*)ctx
->hash
, (u128
*)ctx
->block
);
1833 scatterwalk_map_and_copy(ctx
->lastbuffer
, req
->src
, req
->cryptlen
+ req
->assoclen
- authsize
, authsize
, 0);
1834 err
= crypto_memneq(ctx
->lastbuffer
, ctx
->hash
, authsize
) ? -EBADMSG
: 0;
1836 aead_request_complete(req
, 0);
1841 /*! \fn void aes_gcm_exit_tfm(struct crypto_tfm *tfm)
1842 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1843 * \brief free pointers in aes_ctx
1844 * \param tfm linux crypto shash transform
1846 static void aes_gcm_exit_tfm(struct crypto_tfm
*tfm
)
1848 struct aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
1849 if (ctx
->gf128
) gf128mul_free_4k(ctx
->gf128
);
1853 * \brief AES function mappings
1855 struct aead_alg ifxdeu_gcm_aes_alg
= {
1856 .base
.cra_name
= "gcm(aes)",
1857 .base
.cra_driver_name
= "ifxdeu-gcm(aes)",
1858 .base
.cra_priority
= 400,
1859 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1860 .base
.cra_blocksize
= 1,
1861 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
1862 .base
.cra_module
= THIS_MODULE
,
1863 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_gcm_aes_alg
.base
.cra_list
),
1864 .base
.cra_exit
= aes_gcm_exit_tfm
,
1865 .ivsize
= GCM_AES_IV_SIZE
,
1866 .maxauthsize
= AES_BLOCK_SIZE
,
1867 .chunksize
= AES_BLOCK_SIZE
,
1868 .setkey
= aes_set_key_aead
,
1869 .encrypt
= gcm_aes_encrypt
,
1870 .decrypt
= gcm_aes_decrypt
,
1871 .setauthsize
= gcm_aes_setauthsize
,
1874 /*! \fn int ifxdeu_init_aes (void)
1875 * \ingroup IFX_AES_FUNCTIONS
1876 * \brief function to initialize AES driver
1879 int ifxdeu_init_aes (void)
1885 if ((ret
= crypto_register_alg(&ifxdeu_aes_alg
)))
1888 if ((ret
= crypto_register_skcipher(&ifxdeu_ecb_aes_alg
)))
1891 if ((ret
= crypto_register_skcipher(&ifxdeu_cbc_aes_alg
)))
1894 if ((ret
= crypto_register_skcipher(&ifxdeu_xts_aes_alg
)))
1897 if ((ret
= crypto_register_skcipher(&ifxdeu_ofb_aes_alg
)))
1900 if ((ret
= crypto_register_skcipher(&ifxdeu_cfb_aes_alg
)))
1903 if ((ret
= crypto_register_skcipher(&ifxdeu_ctr_basic_aes_alg
)))
1904 goto ctr_basic_aes_err
;
1906 if ((ret
= crypto_register_skcipher(&ifxdeu_ctr_rfc3686_aes_alg
)))
1907 goto ctr_rfc3686_aes_err
;
1909 if ((ret
= crypto_register_shash(&ifxdeu_cbcmac_aes_alg
)))
1910 goto cbcmac_aes_err
;
1912 if ((ret
= crypto_register_aead(&ifxdeu_gcm_aes_alg
)))
1918 printk (KERN_NOTICE
"IFX DEU AES initialized%s%s.\n", disable_multiblock
? "" : " (multiblock)", disable_deudma
? "" : " (DMA)");
1922 crypto_unregister_aead(&ifxdeu_gcm_aes_alg
);
1923 printk (KERN_ERR
"IFX gcm_aes initialization failed!\n");
1926 crypto_unregister_shash(&ifxdeu_cbcmac_aes_alg
);
1927 printk (KERN_ERR
"IFX cbcmac_aes initialization failed!\n");
1929 ctr_rfc3686_aes_err
:
1930 crypto_unregister_skcipher(&ifxdeu_ctr_rfc3686_aes_alg
);
1931 printk (KERN_ERR
"IFX ctr_rfc3686_aes initialization failed!\n");
1934 crypto_unregister_skcipher(&ifxdeu_ctr_basic_aes_alg
);
1935 printk (KERN_ERR
"IFX ctr_basic_aes initialization failed!\n");
1938 crypto_unregister_skcipher(&ifxdeu_cfb_aes_alg
);
1939 printk (KERN_ERR
"IFX cfb_aes initialization failed!\n");
1942 crypto_unregister_skcipher(&ifxdeu_ofb_aes_alg
);
1943 printk (KERN_ERR
"IFX ofb_aes initialization failed!\n");
1946 crypto_unregister_skcipher(&ifxdeu_xts_aes_alg
);
1947 printk (KERN_ERR
"IFX xts_aes initialization failed!\n");
1950 crypto_unregister_skcipher(&ifxdeu_cbc_aes_alg
);
1951 printk (KERN_ERR
"IFX cbc_aes initialization failed!\n");
1954 crypto_unregister_skcipher(&ifxdeu_ecb_aes_alg
);
1955 printk (KERN_ERR
"IFX aes initialization failed!\n");
1958 printk(KERN_ERR
"IFX DEU AES initialization failed!\n");
1963 /*! \fn void ifxdeu_fini_aes (void)
1964 * \ingroup IFX_AES_FUNCTIONS
1965 * \brief unregister aes driver
1967 void ifxdeu_fini_aes (void)
1969 crypto_unregister_alg (&ifxdeu_aes_alg
);
1970 crypto_unregister_skcipher (&ifxdeu_ecb_aes_alg
);
1971 crypto_unregister_skcipher (&ifxdeu_cbc_aes_alg
);
1972 crypto_unregister_skcipher (&ifxdeu_xts_aes_alg
);
1973 crypto_unregister_skcipher (&ifxdeu_ofb_aes_alg
);
1974 crypto_unregister_skcipher (&ifxdeu_cfb_aes_alg
);
1975 crypto_unregister_skcipher (&ifxdeu_ctr_basic_aes_alg
);
1976 crypto_unregister_skcipher (&ifxdeu_ctr_rfc3686_aes_alg
);
1977 crypto_unregister_shash (&ifxdeu_cbcmac_aes_alg
);
1978 crypto_unregister_aead (&ifxdeu_gcm_aes_alg
);