ltq-deu: remove compiler warning and shorten locked sections
[openwrt/staging/stintel.git] / package / kernel / lantiq / ltq-deu / src / ifxmips_aes.c
1 /******************************************************************************
2 **
3 ** FILE NAME : ifxmips_aes.c
4 ** PROJECT : IFX UEIP
5 ** MODULES : DEU Module
6 **
7 ** DATE : September 8, 2009
8 ** AUTHOR : Mohammad Firdaus
9 ** DESCRIPTION : Data Encryption Unit Driver for AES Algorithm
10 ** COPYRIGHT : Copyright (c) 2009
11 ** Infineon Technologies AG
12 ** Am Campeon 1-12, 85579 Neubiberg, Germany
13 **
14 ** This program is free software; you can redistribute it and/or modify
15 ** it under the terms of the GNU General Public License as published by
16 ** the Free Software Foundation; either version 2 of the License, or
17 ** (at your option) any later version.
18 **
19 ** HISTORY
20 ** $Date $Author $Comment
21 ** 08,Sept 2009 Mohammad Firdaus Initial UEIP release
22 *******************************************************************************/
23 /*!
24 \defgroup IFX_DEU IFX_DEU_DRIVERS
25 \ingroup API
26 \brief ifx DEU driver module
27 */
28
29 /*!
30 \file ifxmips_aes.c
31 \ingroup IFX_DEU
32 \brief AES Encryption Driver main file
33 */
34
35 /*!
36 \defgroup IFX_AES_FUNCTIONS IFX_AES_FUNCTIONS
37 \ingroup IFX_DEU
38 \brief IFX AES driver Functions
39 */
40
41
42 /* Project Header Files */
43 #if defined(CONFIG_MODVERSIONS)
44 #define MODVERSIONS
45 #include <linux/modeversions>
46 #endif
47
48 #include <linux/version.h>
49 #include <linux/module.h>
50 #include <linux/init.h>
51 #include <linux/proc_fs.h>
52 #include <linux/fs.h>
53 #include <linux/types.h>
54 #include <linux/errno.h>
55 #include <linux/crypto.h>
56 #include <linux/interrupt.h>
57 #include <linux/delay.h>
58 #include <asm/byteorder.h>
59 #include <crypto/algapi.h>
60 #include <crypto/internal/skcipher.h>
61
62 #include "ifxmips_deu.h"
63
64 #if defined(CONFIG_DANUBE)
65 #include "ifxmips_deu_danube.h"
66 extern int ifx_danube_pre_1_4;
67 #elif defined(CONFIG_AR9)
68 #include "ifxmips_deu_ar9.h"
69 #elif defined(CONFIG_VR9) || defined(CONFIG_AR10)
70 #include "ifxmips_deu_vr9.h"
71 #else
72 #error "Unkown platform"
73 #endif
74
75 /* DMA related header and variables */
76
77 spinlock_t aes_lock;
78 #define CRTCL_SECT_INIT spin_lock_init(&aes_lock)
79 #define CRTCL_SECT_START spin_lock_irqsave(&aes_lock, flag)
80 #define CRTCL_SECT_END spin_unlock_irqrestore(&aes_lock, flag)
81
82 /* Definition of constants */
83 #define AES_START IFX_AES_CON
84 #define AES_MIN_KEY_SIZE 16
85 #define AES_MAX_KEY_SIZE 32
86 #define AES_BLOCK_SIZE 16
87 #define CTR_RFC3686_NONCE_SIZE 4
88 #define CTR_RFC3686_IV_SIZE 8
89 #define CTR_RFC3686_MIN_KEY_SIZE (AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE)
90 #define CTR_RFC3686_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE)
91
92 #ifdef CRYPTO_DEBUG
93 extern char debug_level;
94 #define DPRINTF(level, format, args...) if (level < debug_level) printk(KERN_INFO "[%s %s %d]: " format, __FILE__, __func__, __LINE__, ##args);
95 #else
96 #define DPRINTF(level, format, args...)
97 #endif /* CRYPTO_DEBUG */
98
99 /* Function decleration */
100 int aes_chip_init(void);
101 u32 endian_swap(u32 input);
102 u32 input_swap(u32 input);
103 u32* memory_alignment(const u8 *arg, u32 *buff_alloc, int in_out, int nbytes);
104 void aes_dma_memory_copy(u32 *outcopy, u32 *out_dma, u8 *out_arg, int nbytes);
105 void des_dma_memory_copy(u32 *outcopy, u32 *out_dma, u8 *out_arg, int nbytes);
106 int aes_memory_allocate(int value);
107 int des_memory_allocate(int value);
108 void memory_release(u32 *addr);
109
110
111 extern void ifx_deu_aes (void *ctx_arg, uint8_t *out_arg, const uint8_t *in_arg,
112 uint8_t *iv_arg, size_t nbytes, int encdec, int mode);
113 /* End of function decleration */
114
115 struct aes_ctx {
116 int key_length;
117 u32 buf[AES_MAX_KEY_SIZE];
118 u8 nonce[CTR_RFC3686_NONCE_SIZE];
119 };
120
121 extern int disable_deudma;
122 extern int disable_multiblock;
123
124 /*! \fn int aes_set_key (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
125 * \ingroup IFX_AES_FUNCTIONS
126 * \brief sets the AES keys
127 * \param tfm linux crypto algo transform
128 * \param in_key input key
129 * \param key_len key lengths of 16, 24 and 32 bytes supported
130 * \return -EINVAL - bad key length, 0 - SUCCESS
131 */
132 int aes_set_key (struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len)
133 {
134 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
135
136 //printk("set_key in %s\n", __FILE__);
137
138 //aes_chip_init();
139
140 if (key_len != 16 && key_len != 24 && key_len != 32) {
141 return -EINVAL;
142 }
143
144 ctx->key_length = key_len;
145 DPRINTF(0, "ctx @%p, key_len %d, ctx->key_length %d\n", ctx, key_len, ctx->key_length);
146 memcpy ((u8 *) (ctx->buf), in_key, key_len);
147
148 return 0;
149 }
150
151
152 /*! \fn int aes_set_key_skcipher (struct crypto_skcipher *tfm, const uint8_t *in_key, unsigned int key_len)
153 * \ingroup IFX_AES_FUNCTIONS
154 * \brief sets the AES keys for skcipher
155 * \param tfm linux crypto skcipher
156 * \param in_key input key
157 * \param key_len key lengths of 16, 24 and 32 bytes supported
158 * \return -EINVAL - bad key length, 0 - SUCCESS
159 */
160 int aes_set_key_skcipher (struct crypto_skcipher *tfm, const u8 *in_key, unsigned int key_len)
161 {
162 return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
163 }
164
165
166 /*! \fn void ifx_deu_aes (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg, size_t nbytes, int encdec, int mode)
167 * \ingroup IFX_AES_FUNCTIONS
168 * \brief main interface to AES hardware
169 * \param ctx_arg crypto algo context
170 * \param out_arg output bytestream
171 * \param in_arg input bytestream
172 * \param iv_arg initialization vector
173 * \param nbytes length of bytestream
174 * \param encdec 1 for encrypt; 0 for decrypt
175 * \param mode operation mode such as ebc, cbc, ctr
176 *
177 */
178 void ifx_deu_aes (void *ctx_arg, u8 *out_arg, const u8 *in_arg,
179 u8 *iv_arg, size_t nbytes, int encdec, int mode)
180
181 {
182 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
183 volatile struct aes_t *aes = (volatile struct aes_t *) AES_START;
184 struct aes_ctx *ctx = (struct aes_ctx *)ctx_arg;
185 u32 *in_key = ctx->buf;
186 unsigned long flag;
187 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
188 int key_len = ctx->key_length;
189
190 int i = 0;
191 int byte_cnt = nbytes;
192
193
194 CRTCL_SECT_START;
195 /* 128, 192 or 256 bit key length */
196 aes->controlr.K = key_len / 8 - 2;
197 if (key_len == 128 / 8) {
198 aes->K3R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 0));
199 aes->K2R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 1));
200 aes->K1R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 2));
201 aes->K0R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 3));
202 }
203 else if (key_len == 192 / 8) {
204 aes->K5R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 0));
205 aes->K4R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 1));
206 aes->K3R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 2));
207 aes->K2R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 3));
208 aes->K1R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 4));
209 aes->K0R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 5));
210 }
211 else if (key_len == 256 / 8) {
212 aes->K7R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 0));
213 aes->K6R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 1));
214 aes->K5R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 2));
215 aes->K4R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 3));
216 aes->K3R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 4));
217 aes->K2R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 5));
218 aes->K1R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 6));
219 aes->K0R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 7));
220 }
221 else {
222 printk (KERN_ERR "[%s %s %d]: Invalid key_len : %d\n", __FILE__, __func__, __LINE__, key_len);
223 CRTCL_SECT_END;
224 return;// -EINVAL;
225 }
226
227 /* let HW pre-process DEcryption key in any case (even if
228 ENcryption is used). Key Valid (KV) bit is then only
229 checked in decryption routine! */
230 aes->controlr.PNK = 1;
231
232
233 aes->controlr.E_D = !encdec; //encryption
234 aes->controlr.O = mode; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
235
236 //aes->controlr.F = 128; //default; only for CFB and OFB modes; change only for customer-specific apps
237 if (mode > 0) {
238 aes->IV3R = DEU_ENDIAN_SWAP(*(u32 *) iv_arg);
239 aes->IV2R = DEU_ENDIAN_SWAP(*((u32 *) iv_arg + 1));
240 aes->IV1R = DEU_ENDIAN_SWAP(*((u32 *) iv_arg + 2));
241 aes->IV0R = DEU_ENDIAN_SWAP(*((u32 *) iv_arg + 3));
242 };
243
244
245 i = 0;
246 while (byte_cnt >= 16) {
247
248 aes->ID3R = INPUT_ENDIAN_SWAP(*((u32 *) in_arg + (i * 4) + 0));
249 aes->ID2R = INPUT_ENDIAN_SWAP(*((u32 *) in_arg + (i * 4) + 1));
250 aes->ID1R = INPUT_ENDIAN_SWAP(*((u32 *) in_arg + (i * 4) + 2));
251 aes->ID0R = INPUT_ENDIAN_SWAP(*((u32 *) in_arg + (i * 4) + 3)); /* start crypto */
252
253 while (aes->controlr.BUS) {
254 // this will not take long
255 }
256
257 *((volatile u32 *) out_arg + (i * 4) + 0) = aes->OD3R;
258 *((volatile u32 *) out_arg + (i * 4) + 1) = aes->OD2R;
259 *((volatile u32 *) out_arg + (i * 4) + 2) = aes->OD1R;
260 *((volatile u32 *) out_arg + (i * 4) + 3) = aes->OD0R;
261
262 i++;
263 byte_cnt -= 16;
264 }
265
266 /* To handle all non-aligned bytes (not aligned to 16B size) */
267 if (byte_cnt) {
268 u8 temparea[16] = {0,};
269
270 memcpy(temparea, ((u32 *) in_arg + (i * 4)), byte_cnt);
271
272 aes->ID3R = INPUT_ENDIAN_SWAP(*((u32 *) temparea + 0));
273 aes->ID2R = INPUT_ENDIAN_SWAP(*((u32 *) temparea + 1));
274 aes->ID1R = INPUT_ENDIAN_SWAP(*((u32 *) temparea + 2));
275 aes->ID0R = INPUT_ENDIAN_SWAP(*((u32 *) temparea + 3)); /* start crypto */
276
277 while (aes->controlr.BUS) {
278 }
279
280 *((volatile u32 *) temparea + 0) = aes->OD3R;
281 *((volatile u32 *) temparea + 1) = aes->OD2R;
282 *((volatile u32 *) temparea + 2) = aes->OD1R;
283 *((volatile u32 *) temparea + 3) = aes->OD0R;
284
285 memcpy(((u32 *) out_arg + (i * 4)), temparea, byte_cnt);
286 }
287
288 //tc.chen : copy iv_arg back
289 if (mode > 0) {
290 *((u32 *) iv_arg) = DEU_ENDIAN_SWAP(aes->IV3R);
291 *((u32 *) iv_arg + 1) = DEU_ENDIAN_SWAP(aes->IV2R);
292 *((u32 *) iv_arg + 2) = DEU_ENDIAN_SWAP(aes->IV1R);
293 *((u32 *) iv_arg + 3) = DEU_ENDIAN_SWAP(aes->IV0R);
294 }
295
296 CRTCL_SECT_END;
297 }
298
299 /*!
300 * \fn int ctr_rfc3686_aes_set_key (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
301 * \ingroup IFX_AES_FUNCTIONS
302 * \brief sets RFC3686 key
303 * \param tfm linux crypto algo transform
304 * \param in_key input key
305 * \param key_len key lengths of 20, 28 and 36 bytes supported; last 4 bytes is nonce
306 * \return 0 - SUCCESS
307 * -EINVAL - bad key length
308 */
309 int ctr_rfc3686_aes_set_key (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
310 {
311 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
312
313 //printk("ctr_rfc3686_aes_set_key in %s\n", __FILE__);
314
315 memcpy(ctx->nonce, in_key + (key_len - CTR_RFC3686_NONCE_SIZE),
316 CTR_RFC3686_NONCE_SIZE);
317
318 key_len -= CTR_RFC3686_NONCE_SIZE; // remove 4 bytes of nonce
319
320 if (key_len != 16 && key_len != 24 && key_len != 32) {
321 return -EINVAL;
322 }
323
324 ctx->key_length = key_len;
325
326 memcpy ((u8 *) (ctx->buf), in_key, key_len);
327
328 return 0;
329 }
330
331 /*!
332 * \fn int ctr_rfc3686_aes_set_key_skcipher (struct crypto_skcipher *tfm, const uint8_t *in_key, unsigned int key_len)
333 * \ingroup IFX_AES_FUNCTIONS
334 * \brief sets RFC3686 key for skcipher
335 * \param tfm linux crypto skcipher
336 * \param in_key input key
337 * \param key_len key lengths of 20, 28 and 36 bytes supported; last 4 bytes is nonce
338 * \return 0 - SUCCESS
339 * -EINVAL - bad key length
340 */
341 int ctr_rfc3686_aes_set_key_skcipher (struct crypto_skcipher *tfm, const uint8_t *in_key, unsigned int key_len)
342 {
343 return ctr_rfc3686_aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
344 }
345
346 /*! \fn void ifx_deu_aes (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg, u32 nbytes, int encdec, int mode)
347 * \ingroup IFX_AES_FUNCTIONS
348 * \brief main interface with deu hardware in DMA mode
349 * \param ctx_arg crypto algo context
350 * \param out_arg output bytestream
351 * \param in_arg input bytestream
352 * \param iv_arg initialization vector
353 * \param nbytes length of bytestream
354 * \param encdec 1 for encrypt; 0 for decrypt
355 * \param mode operation mode such as ebc, cbc, ctr
356 */
357
358
359 //definitions from linux/include/crypto.h:
360 //#define CRYPTO_TFM_MODE_ECB 0x00000001
361 //#define CRYPTO_TFM_MODE_CBC 0x00000002
362 //#define CRYPTO_TFM_MODE_CFB 0x00000004
363 //#define CRYPTO_TFM_MODE_CTR 0x00000008
364 //#define CRYPTO_TFM_MODE_OFB 0x00000010 // not even defined
365 //but hardware definition: 0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
366
367 /*! \fn void ifx_deu_aes_ecb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
368 * \ingroup IFX_AES_FUNCTIONS
369 * \brief sets AES hardware to ECB mode
370 * \param ctx crypto algo context
371 * \param dst output bytestream
372 * \param src input bytestream
373 * \param iv initialization vector
374 * \param nbytes length of bytestream
375 * \param encdec 1 for encrypt; 0 for decrypt
376 * \param inplace not used
377 */
378 void ifx_deu_aes_ecb (void *ctx, uint8_t *dst, const uint8_t *src,
379 uint8_t *iv, size_t nbytes, int encdec, int inplace)
380 {
381 ifx_deu_aes (ctx, dst, src, NULL, nbytes, encdec, 0);
382 }
383
384 /*! \fn void ifx_deu_aes_cbc (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
385 * \ingroup IFX_AES_FUNCTIONS
386 * \brief sets AES hardware to CBC mode
387 * \param ctx crypto algo context
388 * \param dst output bytestream
389 * \param src input bytestream
390 * \param iv initialization vector
391 * \param nbytes length of bytestream
392 * \param encdec 1 for encrypt; 0 for decrypt
393 * \param inplace not used
394 */
395 void ifx_deu_aes_cbc (void *ctx, uint8_t *dst, const uint8_t *src,
396 uint8_t *iv, size_t nbytes, int encdec, int inplace)
397 {
398 ifx_deu_aes (ctx, dst, src, iv, nbytes, encdec, 1);
399 }
400
401 /*! \fn void ifx_deu_aes_ofb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
402 * \ingroup IFX_AES_FUNCTIONS
403 * \brief sets AES hardware to OFB mode
404 * \param ctx crypto algo context
405 * \param dst output bytestream
406 * \param src input bytestream
407 * \param iv initialization vector
408 * \param nbytes length of bytestream
409 * \param encdec 1 for encrypt; 0 for decrypt
410 * \param inplace not used
411 */
412 void ifx_deu_aes_ofb (void *ctx, uint8_t *dst, const uint8_t *src,
413 uint8_t *iv, size_t nbytes, int encdec, int inplace)
414 {
415 ifx_deu_aes (ctx, dst, src, iv, nbytes, encdec, 2);
416 }
417
418 /*! \fn void ifx_deu_aes_cfb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
419 * \ingroup IFX_AES_FUNCTIONS
420 * \brief sets AES hardware to CFB mode
421 * \param ctx crypto algo context
422 * \param dst output bytestream
423 * \param src input bytestream
424 * \param iv initialization vector
425 * \param nbytes length of bytestream
426 * \param encdec 1 for encrypt; 0 for decrypt
427 * \param inplace not used
428 */
429 void ifx_deu_aes_cfb (void *ctx, uint8_t *dst, const uint8_t *src,
430 uint8_t *iv, size_t nbytes, int encdec, int inplace)
431 {
432 ifx_deu_aes (ctx, dst, src, iv, nbytes, encdec, 3);
433 }
434
435 /*! \fn void ifx_deu_aes_ctr (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
436 * \ingroup IFX_AES_FUNCTIONS
437 * \brief sets AES hardware to CTR mode
438 * \param ctx crypto algo context
439 * \param dst output bytestream
440 * \param src input bytestream
441 * \param iv initialization vector
442 * \param nbytes length of bytestream
443 * \param encdec 1 for encrypt; 0 for decrypt
444 * \param inplace not used
445 */
446 void ifx_deu_aes_ctr (void *ctx, uint8_t *dst, const uint8_t *src,
447 uint8_t *iv, size_t nbytes, int encdec, int inplace)
448 {
449 ifx_deu_aes (ctx, dst, src, iv, nbytes, encdec, 4);
450 }
451
452 /*! \fn void aes_encrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
453 * \ingroup IFX_AES_FUNCTIONS
454 * \brief encrypt AES_BLOCK_SIZE of data
455 * \param tfm linux crypto algo transform
456 * \param out output bytestream
457 * \param in input bytestream
458 */
459 void aes_encrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
460 {
461 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
462 ifx_deu_aes (ctx, out, in, NULL, AES_BLOCK_SIZE,
463 CRYPTO_DIR_ENCRYPT, 0);
464 }
465
466 /*! \fn void aes_decrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
467 * \ingroup IFX_AES_FUNCTIONS
468 * \brief decrypt AES_BLOCK_SIZE of data
469 * \param tfm linux crypto algo transform
470 * \param out output bytestream
471 * \param in input bytestream
472 */
473 void aes_decrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
474 {
475 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
476 ifx_deu_aes (ctx, out, in, NULL, AES_BLOCK_SIZE,
477 CRYPTO_DIR_DECRYPT, 0);
478 }
479
480 /*
481 * \brief AES function mappings
482 */
483 struct crypto_alg ifxdeu_aes_alg = {
484 .cra_name = "aes",
485 .cra_driver_name = "ifxdeu-aes",
486 .cra_priority = 300,
487 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
488 .cra_blocksize = AES_BLOCK_SIZE,
489 .cra_ctxsize = sizeof(struct aes_ctx),
490 .cra_module = THIS_MODULE,
491 .cra_list = LIST_HEAD_INIT(ifxdeu_aes_alg.cra_list),
492 .cra_u = {
493 .cipher = {
494 .cia_min_keysize = AES_MIN_KEY_SIZE,
495 .cia_max_keysize = AES_MAX_KEY_SIZE,
496 .cia_setkey = aes_set_key,
497 .cia_encrypt = aes_encrypt,
498 .cia_decrypt = aes_decrypt,
499 }
500 }
501 };
502
503 /*! \fn int ecb_aes_encrypt(struct skcipher_req *req)
504 * \ingroup IFX_AES_FUNCTIONS
505 * \brief ECB AES encrypt using linux crypto skcipher
506 * \param req skcipher request
507 * \return err
508 */
509 int ecb_aes_encrypt(struct skcipher_request *req)
510 {
511 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
512 struct skcipher_walk walk;
513 int err;
514 unsigned int enc_bytes, nbytes;
515
516 err = skcipher_walk_virt(&walk, req, false);
517
518 while ((nbytes = enc_bytes = walk.nbytes)) {
519 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
520 ifx_deu_aes_ecb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
521 NULL, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
522 nbytes &= AES_BLOCK_SIZE - 1;
523 err = skcipher_walk_done(&walk, nbytes);
524 }
525
526 return err;
527 }
528
529 /*! \fn int ecb_aes_decrypt(struct skcipher_req *req)
530 * \ingroup IFX_AES_FUNCTIONS
531 * \brief ECB AES decrypt using linux crypto skcipher
532 * \param req skcipher request
533 * \return err
534 */
535 int ecb_aes_decrypt(struct skcipher_request *req)
536 {
537 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
538 struct skcipher_walk walk;
539 int err;
540 unsigned int dec_bytes, nbytes;
541
542 err = skcipher_walk_virt(&walk, req, false);
543
544 while ((nbytes = dec_bytes = walk.nbytes)) {
545 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
546 ifx_deu_aes_ecb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
547 NULL, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
548 nbytes &= AES_BLOCK_SIZE - 1;
549 err = skcipher_walk_done(&walk, nbytes);
550 }
551
552 return err;
553 }
554
555 /*
556 * \brief AES function mappings
557 */
558 struct skcipher_alg ifxdeu_ecb_aes_alg = {
559 .base.cra_name = "ecb(aes)",
560 .base.cra_driver_name = "ifxdeu-ecb(aes)",
561 .base.cra_priority = 400,
562 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
563 .base.cra_blocksize = AES_BLOCK_SIZE,
564 .base.cra_ctxsize = sizeof(struct aes_ctx),
565 .base.cra_module = THIS_MODULE,
566 .base.cra_list = LIST_HEAD_INIT(ifxdeu_ecb_aes_alg.base.cra_list),
567 .min_keysize = AES_MIN_KEY_SIZE,
568 .max_keysize = AES_MAX_KEY_SIZE,
569 .setkey = aes_set_key_skcipher,
570 .encrypt = ecb_aes_encrypt,
571 .decrypt = ecb_aes_decrypt,
572 };
573
574 /*! \fn int ecb_aes_encrypt(struct skcipher_req *req)
575 * \ingroup IFX_AES_FUNCTIONS
576 * \brief CBC AES encrypt using linux crypto skcipher
577 * \param req skcipher request
578 * \return err
579 */
580 int cbc_aes_encrypt(struct skcipher_request *req)
581 {
582 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
583 struct skcipher_walk walk;
584 int err;
585 unsigned int enc_bytes, nbytes;
586
587 err = skcipher_walk_virt(&walk, req, false);
588
589 while ((nbytes = enc_bytes = walk.nbytes)) {
590 u8 *iv = walk.iv;
591 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
592 ifx_deu_aes_cbc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
593 iv, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
594 nbytes &= AES_BLOCK_SIZE - 1;
595 err = skcipher_walk_done(&walk, nbytes);
596 }
597
598 return err;
599 }
600
601 /*! \fn int cbc_aes_decrypt(struct skcipher_req *req)
602 * \ingroup IFX_AES_FUNCTIONS
603 * \brief CBC AES decrypt using linux crypto skcipher
604 * \param req skcipher request
605 * \return err
606 */
607 int cbc_aes_decrypt(struct skcipher_request *req)
608 {
609 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
610 struct skcipher_walk walk;
611 int err;
612 unsigned int dec_bytes, nbytes;
613
614 err = skcipher_walk_virt(&walk, req, false);
615
616 while ((nbytes = dec_bytes = walk.nbytes)) {
617 u8 *iv = walk.iv;
618 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
619 ifx_deu_aes_cbc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
620 iv, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
621 nbytes &= AES_BLOCK_SIZE - 1;
622 err = skcipher_walk_done(&walk, nbytes);
623 }
624
625 return err;
626 }
627
628 /*
629 * \brief AES function mappings
630 */
631 struct skcipher_alg ifxdeu_cbc_aes_alg = {
632 .base.cra_name = "cbc(aes)",
633 .base.cra_driver_name = "ifxdeu-cbc(aes)",
634 .base.cra_priority = 400,
635 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
636 .base.cra_blocksize = AES_BLOCK_SIZE,
637 .base.cra_ctxsize = sizeof(struct aes_ctx),
638 .base.cra_module = THIS_MODULE,
639 .base.cra_list = LIST_HEAD_INIT(ifxdeu_cbc_aes_alg.base.cra_list),
640 .min_keysize = AES_MIN_KEY_SIZE,
641 .max_keysize = AES_MAX_KEY_SIZE,
642 .ivsize = AES_BLOCK_SIZE,
643 .setkey = aes_set_key_skcipher,
644 .encrypt = cbc_aes_encrypt,
645 .decrypt = cbc_aes_decrypt,
646 };
647
648
649 /*! \fn int ofb_aes_encrypt(struct skcipher_req *req)
650 * \ingroup IFX_AES_FUNCTIONS
651 * \brief OFB AES encrypt using linux crypto skcipher
652 * \param req skcipher request
653 * \return err
654 */
655 int ofb_aes_encrypt(struct skcipher_request *req)
656 {
657 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
658 struct skcipher_walk walk;
659 int err;
660 unsigned int enc_bytes, nbytes;
661
662 err = skcipher_walk_virt(&walk, req, false);
663
664 while ((nbytes = enc_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
665 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
666 ifx_deu_aes_ofb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
667 walk.iv, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
668 nbytes &= AES_BLOCK_SIZE - 1;
669 err = skcipher_walk_done(&walk, nbytes);
670 }
671
672 /* to handle remaining bytes < AES_BLOCK_SIZE */
673 if (walk.nbytes) {
674 ifx_deu_aes_ofb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
675 walk.iv, walk.nbytes, CRYPTO_DIR_ENCRYPT, 0);
676 err = skcipher_walk_done(&walk, 0);
677 }
678
679 return err;
680 }
681
682 /*! \fn int ofb_aes_decrypt(struct skcipher_req *req)
683 * \ingroup IFX_AES_FUNCTIONS
684 * \brief OFB AES decrypt using linux crypto skcipher
685 * \param req skcipher request
686 * \return err
687 */
688 int ofb_aes_decrypt(struct skcipher_request *req)
689 {
690 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
691 struct skcipher_walk walk;
692 int err;
693 unsigned int dec_bytes, nbytes;
694
695 err = skcipher_walk_virt(&walk, req, false);
696
697 while ((nbytes = dec_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
698 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
699 ifx_deu_aes_ofb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
700 walk.iv, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
701 nbytes &= AES_BLOCK_SIZE - 1;
702 err = skcipher_walk_done(&walk, nbytes);
703 }
704
705 /* to handle remaining bytes < AES_BLOCK_SIZE */
706 if (walk.nbytes) {
707 ifx_deu_aes_ofb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
708 walk.iv, walk.nbytes, CRYPTO_DIR_DECRYPT, 0);
709 err = skcipher_walk_done(&walk, 0);
710 }
711
712 return err;
713 }
714
715 /*
716 * \brief AES function mappings
717 */
718 struct skcipher_alg ifxdeu_ofb_aes_alg = {
719 .base.cra_name = "ofb(aes)",
720 .base.cra_driver_name = "ifxdeu-ofb(aes)",
721 .base.cra_priority = 400,
722 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
723 .base.cra_blocksize = 1,
724 .base.cra_ctxsize = sizeof(struct aes_ctx),
725 .base.cra_module = THIS_MODULE,
726 .base.cra_list = LIST_HEAD_INIT(ifxdeu_ofb_aes_alg.base.cra_list),
727 .min_keysize = AES_MIN_KEY_SIZE,
728 .max_keysize = AES_MAX_KEY_SIZE,
729 .ivsize = AES_BLOCK_SIZE,
730 .chunksize = AES_BLOCK_SIZE,
731 .walksize = AES_BLOCK_SIZE,
732 .setkey = aes_set_key_skcipher,
733 .encrypt = ofb_aes_encrypt,
734 .decrypt = ofb_aes_decrypt,
735 };
736
737 /*! \fn int cfb_aes_encrypt(struct skcipher_req *req)
738 * \ingroup IFX_AES_FUNCTIONS
739 * \brief CFB AES encrypt using linux crypto skcipher
740 * \param req skcipher request
741 * \return err
742 */
743 int cfb_aes_encrypt(struct skcipher_request *req)
744 {
745 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
746 struct skcipher_walk walk;
747 int err;
748 unsigned int enc_bytes, nbytes;
749
750 err = skcipher_walk_virt(&walk, req, false);
751
752 while ((nbytes = enc_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
753 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
754 ifx_deu_aes_cfb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
755 walk.iv, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
756 nbytes &= AES_BLOCK_SIZE - 1;
757 err = skcipher_walk_done(&walk, nbytes);
758 }
759
760 /* to handle remaining bytes < AES_BLOCK_SIZE */
761 if (walk.nbytes) {
762 ifx_deu_aes_cfb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
763 walk.iv, walk.nbytes, CRYPTO_DIR_ENCRYPT, 0);
764 err = skcipher_walk_done(&walk, 0);
765 }
766
767 return err;
768 }
769
770 /*! \fn int cfb_aes_decrypt(struct skcipher_req *req)
771 * \ingroup IFX_AES_FUNCTIONS
772 * \brief CFB AES decrypt using linux crypto skcipher
773 * \param req skcipher request
774 * \return err
775 */
776 int cfb_aes_decrypt(struct skcipher_request *req)
777 {
778 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
779 struct skcipher_walk walk;
780 int err;
781 unsigned int dec_bytes, nbytes;
782
783 err = skcipher_walk_virt(&walk, req, false);
784
785 while ((nbytes = dec_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
786 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
787 ifx_deu_aes_cfb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
788 walk.iv, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
789 nbytes &= AES_BLOCK_SIZE - 1;
790 err = skcipher_walk_done(&walk, nbytes);
791 }
792
793 /* to handle remaining bytes < AES_BLOCK_SIZE */
794 if (walk.nbytes) {
795 ifx_deu_aes_cfb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
796 walk.iv, walk.nbytes, CRYPTO_DIR_DECRYPT, 0);
797 err = skcipher_walk_done(&walk, 0);
798 }
799
800 return err;
801 }
802
803 /*
804 * \brief AES function mappings
805 */
806 struct skcipher_alg ifxdeu_cfb_aes_alg = {
807 .base.cra_name = "cfb(aes)",
808 .base.cra_driver_name = "ifxdeu-cfb(aes)",
809 .base.cra_priority = 400,
810 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
811 .base.cra_blocksize = 1,
812 .base.cra_ctxsize = sizeof(struct aes_ctx),
813 .base.cra_module = THIS_MODULE,
814 .base.cra_list = LIST_HEAD_INIT(ifxdeu_cfb_aes_alg.base.cra_list),
815 .min_keysize = AES_MIN_KEY_SIZE,
816 .max_keysize = AES_MAX_KEY_SIZE,
817 .ivsize = AES_BLOCK_SIZE,
818 .chunksize = AES_BLOCK_SIZE,
819 .walksize = AES_BLOCK_SIZE,
820 .setkey = aes_set_key_skcipher,
821 .encrypt = cfb_aes_encrypt,
822 .decrypt = cfb_aes_decrypt,
823 };
824
825 /*! \fn int ctr_basic_aes_encrypt(struct skcipher_req *req)
826 * \ingroup IFX_AES_FUNCTIONS
827 * \brief Counter mode AES encrypt using linux crypto skcipher
828 * \param req skcipher request
829 * \return err
830 */
831 int ctr_basic_aes_encrypt(struct skcipher_request *req)
832 {
833 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
834 struct skcipher_walk walk;
835 int err;
836 unsigned int enc_bytes, nbytes;
837
838 err = skcipher_walk_virt(&walk, req, false);
839
840 while ((nbytes = enc_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
841 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
842 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
843 walk.iv, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
844 nbytes &= AES_BLOCK_SIZE - 1;
845 err = skcipher_walk_done(&walk, nbytes);
846 }
847
848 /* to handle remaining bytes < AES_BLOCK_SIZE */
849 if (walk.nbytes) {
850 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
851 walk.iv, walk.nbytes, CRYPTO_DIR_ENCRYPT, 0);
852 err = skcipher_walk_done(&walk, 0);
853 }
854
855 return err;
856 }
857
858 /*! \fn int ctr_basic_aes_encrypt(struct skcipher_req *req)
859 * \ingroup IFX_AES_FUNCTIONS
860 * \brief Counter mode AES decrypt using linux crypto skcipher
861 * \param req skcipher request
862 * \return err
863 */
864 int ctr_basic_aes_decrypt(struct skcipher_request *req)
865 {
866 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
867 struct skcipher_walk walk;
868 int err;
869 unsigned int dec_bytes, nbytes;
870
871 err = skcipher_walk_virt(&walk, req, false);
872
873 while ((nbytes = dec_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
874 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
875 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
876 walk.iv, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
877 nbytes &= AES_BLOCK_SIZE - 1;
878 err = skcipher_walk_done(&walk, nbytes);
879 }
880
881 /* to handle remaining bytes < AES_BLOCK_SIZE */
882 if (walk.nbytes) {
883 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
884 walk.iv, walk.nbytes, CRYPTO_DIR_DECRYPT, 0);
885 err = skcipher_walk_done(&walk, 0);
886 }
887
888 return err;
889 }
890
891 /*
892 * \brief AES function mappings
893 */
894 struct skcipher_alg ifxdeu_ctr_basic_aes_alg = {
895 .base.cra_name = "ctr(aes)",
896 .base.cra_driver_name = "ifxdeu-ctr(aes)",
897 .base.cra_priority = 400,
898 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
899 .base.cra_blocksize = 1,
900 .base.cra_ctxsize = sizeof(struct aes_ctx),
901 .base.cra_module = THIS_MODULE,
902 .base.cra_list = LIST_HEAD_INIT(ifxdeu_ctr_basic_aes_alg.base.cra_list),
903 .min_keysize = AES_MIN_KEY_SIZE,
904 .max_keysize = AES_MAX_KEY_SIZE,
905 .ivsize = AES_BLOCK_SIZE,
906 .walksize = AES_BLOCK_SIZE,
907 .setkey = aes_set_key_skcipher,
908 .encrypt = ctr_basic_aes_encrypt,
909 .decrypt = ctr_basic_aes_decrypt,
910 };
911
912 /*! \fn int ctr_rfc3686_aes_encrypt(struct skcipher_req *req)
913 * \ingroup IFX_AES_FUNCTIONS
914 * \brief Counter mode AES (rfc3686) encrypt using linux crypto skcipher
915 * \param req skcipher request
916 * \return err
917 */
918 int ctr_rfc3686_aes_encrypt(struct skcipher_request *req)
919 {
920 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
921 struct skcipher_walk walk;
922 unsigned int nbytes, enc_bytes;
923 int err;
924 u8 rfc3686_iv[16];
925
926 err = skcipher_walk_virt(&walk, req, false);
927 nbytes = walk.nbytes;
928
929 /* set up counter block */
930 memcpy(rfc3686_iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
931 memcpy(rfc3686_iv + CTR_RFC3686_NONCE_SIZE, walk.iv, CTR_RFC3686_IV_SIZE);
932
933 /* initialize counter portion of counter block */
934 *(__be32 *)(rfc3686_iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
935 cpu_to_be32(1);
936
937 while ((nbytes = enc_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
938 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
939 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
940 rfc3686_iv, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
941 nbytes &= AES_BLOCK_SIZE - 1;
942 err = skcipher_walk_done(&walk, nbytes);
943 }
944
945 /* to handle remaining bytes < AES_BLOCK_SIZE */
946 if (walk.nbytes) {
947 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
948 rfc3686_iv, walk.nbytes, CRYPTO_DIR_ENCRYPT, 0);
949 err = skcipher_walk_done(&walk, 0);
950 }
951
952 return err;
953 }
954
955 /*! \fn int ctr_rfc3686_aes_decrypt(struct skcipher_req *req)
956 * \ingroup IFX_AES_FUNCTIONS
957 * \brief Counter mode AES (rfc3686) decrypt using linux crypto skcipher
958 * \param req skcipher request
959 * \return err
960 */
961 int ctr_rfc3686_aes_decrypt(struct skcipher_request *req)
962 {
963 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
964 struct skcipher_walk walk;
965 unsigned int nbytes, dec_bytes;
966 int err;
967 u8 rfc3686_iv[16];
968
969 err = skcipher_walk_virt(&walk, req, false);
970 nbytes = walk.nbytes;
971
972 /* set up counter block */
973 memcpy(rfc3686_iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
974 memcpy(rfc3686_iv + CTR_RFC3686_NONCE_SIZE, walk.iv, CTR_RFC3686_IV_SIZE);
975
976 /* initialize counter portion of counter block */
977 *(__be32 *)(rfc3686_iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
978 cpu_to_be32(1);
979
980 while ((nbytes = dec_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
981 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
982 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
983 rfc3686_iv, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
984 nbytes &= AES_BLOCK_SIZE - 1;
985 err = skcipher_walk_done(&walk, nbytes);
986 }
987
988 /* to handle remaining bytes < AES_BLOCK_SIZE */
989 if (walk.nbytes) {
990 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
991 rfc3686_iv, walk.nbytes, CRYPTO_DIR_DECRYPT, 0);
992 err = skcipher_walk_done(&walk, 0);
993 }
994
995 return err;
996 }
997
998 /*
999 * \brief AES function mappings
1000 */
1001 struct skcipher_alg ifxdeu_ctr_rfc3686_aes_alg = {
1002 .base.cra_name = "rfc3686(ctr(aes))",
1003 .base.cra_driver_name = "ifxdeu-ctr-rfc3686(aes)",
1004 .base.cra_priority = 400,
1005 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
1006 .base.cra_blocksize = 1,
1007 .base.cra_ctxsize = sizeof(struct aes_ctx),
1008 .base.cra_module = THIS_MODULE,
1009 .base.cra_list = LIST_HEAD_INIT(ifxdeu_ctr_rfc3686_aes_alg.base.cra_list),
1010 .min_keysize = CTR_RFC3686_MIN_KEY_SIZE,
1011 .max_keysize = CTR_RFC3686_MAX_KEY_SIZE,
1012 .ivsize = CTR_RFC3686_IV_SIZE,
1013 .walksize = AES_BLOCK_SIZE,
1014 .setkey = ctr_rfc3686_aes_set_key_skcipher,
1015 .encrypt = ctr_rfc3686_aes_encrypt,
1016 .decrypt = ctr_rfc3686_aes_decrypt,
1017 };
1018
1019 /*! \fn int ifxdeu_init_aes (void)
1020 * \ingroup IFX_AES_FUNCTIONS
1021 * \brief function to initialize AES driver
1022 * \return ret
1023 */
1024 int ifxdeu_init_aes (void)
1025 {
1026 int ret = -ENOSYS;
1027
1028 aes_chip_init();
1029
1030 if ((ret = crypto_register_alg(&ifxdeu_aes_alg)))
1031 goto aes_err;
1032
1033 if ((ret = crypto_register_skcipher(&ifxdeu_ecb_aes_alg)))
1034 goto ecb_aes_err;
1035
1036 if ((ret = crypto_register_skcipher(&ifxdeu_cbc_aes_alg)))
1037 goto cbc_aes_err;
1038
1039 if ((ret = crypto_register_skcipher(&ifxdeu_ofb_aes_alg)))
1040 goto ofb_aes_err;
1041
1042 if ((ret = crypto_register_skcipher(&ifxdeu_cfb_aes_alg)))
1043 goto cfb_aes_err;
1044
1045 if ((ret = crypto_register_skcipher(&ifxdeu_ctr_basic_aes_alg)))
1046 goto ctr_basic_aes_err;
1047
1048 if ((ret = crypto_register_skcipher(&ifxdeu_ctr_rfc3686_aes_alg)))
1049 goto ctr_rfc3686_aes_err;
1050
1051 CRTCL_SECT_INIT;
1052
1053
1054 printk (KERN_NOTICE "IFX DEU AES initialized%s%s.\n", disable_multiblock ? "" : " (multiblock)", disable_deudma ? "" : " (DMA)");
1055 return ret;
1056
1057 ctr_rfc3686_aes_err:
1058 crypto_unregister_skcipher(&ifxdeu_ctr_rfc3686_aes_alg);
1059 printk (KERN_ERR "IFX ctr_rfc3686_aes initialization failed!\n");
1060 return ret;
1061 ctr_basic_aes_err:
1062 crypto_unregister_skcipher(&ifxdeu_ctr_basic_aes_alg);
1063 printk (KERN_ERR "IFX ctr_basic_aes initialization failed!\n");
1064 return ret;
1065 cfb_aes_err:
1066 crypto_unregister_skcipher(&ifxdeu_cfb_aes_alg);
1067 printk (KERN_ERR "IFX cfb_aes initialization failed!\n");
1068 return ret;
1069 ofb_aes_err:
1070 crypto_unregister_skcipher(&ifxdeu_ofb_aes_alg);
1071 printk (KERN_ERR "IFX ofb_aes initialization failed!\n");
1072 return ret;
1073 cbc_aes_err:
1074 crypto_unregister_skcipher(&ifxdeu_cbc_aes_alg);
1075 printk (KERN_ERR "IFX cbc_aes initialization failed!\n");
1076 return ret;
1077 ecb_aes_err:
1078 crypto_unregister_skcipher(&ifxdeu_ecb_aes_alg);
1079 printk (KERN_ERR "IFX aes initialization failed!\n");
1080 return ret;
1081 aes_err:
1082 printk(KERN_ERR "IFX DEU AES initialization failed!\n");
1083
1084 return ret;
1085 }
1086
1087 /*! \fn void ifxdeu_fini_aes (void)
1088 * \ingroup IFX_AES_FUNCTIONS
1089 * \brief unregister aes driver
1090 */
1091 void ifxdeu_fini_aes (void)
1092 {
1093 crypto_unregister_alg (&ifxdeu_aes_alg);
1094 crypto_unregister_skcipher (&ifxdeu_ecb_aes_alg);
1095 crypto_unregister_skcipher (&ifxdeu_cbc_aes_alg);
1096 crypto_unregister_skcipher (&ifxdeu_ofb_aes_alg);
1097 crypto_unregister_skcipher (&ifxdeu_cfb_aes_alg);
1098 crypto_unregister_skcipher (&ifxdeu_ctr_basic_aes_alg);
1099 crypto_unregister_skcipher (&ifxdeu_ctr_rfc3686_aes_alg);
1100
1101 }