apm821xx: backport crypto4xx patches from 4.15
[openwrt/openwrt.git] / target / linux / apm821xx / patches-4.14 / 020-0023-crypto-crypto4xx-prepare-for-AEAD-support.patch
1 From a0aae821ba3d35a49d4d0143dfb0c07eee22130e Mon Sep 17 00:00:00 2001
2 From: Christian Lamparter <chunkeey@gmail.com>
3 Date: Wed, 4 Oct 2017 01:00:15 +0200
4 Subject: [PATCH 23/25] crypto: crypto4xx - prepare for AEAD support
5
6 This patch enhances existing interfaces and
7 functions to support AEAD ciphers in the next
8 patches.
9
10 Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
11 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
12 ---
13 drivers/crypto/amcc/crypto4xx_alg.c | 19 +--
14 drivers/crypto/amcc/crypto4xx_core.c | 217 +++++++++++++++++++++++++++--------
15 drivers/crypto/amcc/crypto4xx_core.h | 22 ++--
16 drivers/crypto/amcc/crypto4xx_sa.h | 41 +++++++
17 4 files changed, 226 insertions(+), 73 deletions(-)
18
19 --- a/drivers/crypto/amcc/crypto4xx_alg.c
20 +++ b/drivers/crypto/amcc/crypto4xx_alg.c
21 @@ -26,6 +26,7 @@
22 #include <crypto/internal/hash.h>
23 #include <linux/dma-mapping.h>
24 #include <crypto/algapi.h>
25 +#include <crypto/aead.h>
26 #include <crypto/aes.h>
27 #include <crypto/sha.h>
28 #include <crypto/ctr.h>
29 @@ -83,7 +84,7 @@ int crypto4xx_encrypt(struct ablkcipher_
30 crypto4xx_memcpy_to_le32(iv, req->info, ivlen);
31
32 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
33 - req->nbytes, iv, ivlen, ctx->sa_out, ctx->sa_len);
34 + req->nbytes, iv, ivlen, ctx->sa_out, ctx->sa_len, 0);
35 }
36
37 int crypto4xx_decrypt(struct ablkcipher_request *req)
38 @@ -97,7 +98,7 @@ int crypto4xx_decrypt(struct ablkcipher_
39 crypto4xx_memcpy_to_le32(iv, req->info, ivlen);
40
41 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
42 - req->nbytes, iv, ivlen, ctx->sa_in, ctx->sa_len);
43 + req->nbytes, iv, ivlen, ctx->sa_in, ctx->sa_len, 0);
44 }
45
46 /**
47 @@ -213,7 +214,7 @@ int crypto4xx_rfc3686_encrypt(struct abl
48
49 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
50 req->nbytes, iv, AES_IV_SIZE,
51 - ctx->sa_out, ctx->sa_len);
52 + ctx->sa_out, ctx->sa_len, 0);
53 }
54
55 int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req)
56 @@ -227,7 +228,7 @@ int crypto4xx_rfc3686_decrypt(struct abl
57
58 return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
59 req->nbytes, iv, AES_IV_SIZE,
60 - ctx->sa_out, ctx->sa_len);
61 + ctx->sa_out, ctx->sa_len, 0);
62 }
63
64 /**
65 @@ -239,11 +240,13 @@ static int crypto4xx_hash_alg_init(struc
66 unsigned char hm)
67 {
68 struct crypto_alg *alg = tfm->__crt_alg;
69 - struct crypto4xx_alg *my_alg = crypto_alg_to_crypto4xx_alg(alg);
70 + struct crypto4xx_alg *my_alg;
71 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
72 struct dynamic_sa_hash160 *sa;
73 int rc;
74
75 + my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
76 + alg.u.hash);
77 ctx->dev = my_alg->dev;
78
79 /* Create SA */
80 @@ -300,7 +303,7 @@ int crypto4xx_hash_update(struct ahash_r
81
82 return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
83 req->nbytes, NULL, 0, ctx->sa_in,
84 - ctx->sa_len);
85 + ctx->sa_len, 0);
86 }
87
88 int crypto4xx_hash_final(struct ahash_request *req)
89 @@ -319,7 +322,7 @@ int crypto4xx_hash_digest(struct ahash_r
90
91 return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
92 req->nbytes, NULL, 0, ctx->sa_in,
93 - ctx->sa_len);
94 + ctx->sa_len, 0);
95 }
96
97 /**
98 @@ -330,5 +333,3 @@ int crypto4xx_sha1_alg_init(struct crypt
99 return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
100 SA_HASH_MODE_HASH);
101 }
102 -
103 -
104 --- a/drivers/crypto/amcc/crypto4xx_core.c
105 +++ b/drivers/crypto/amcc/crypto4xx_core.c
106 @@ -35,10 +35,12 @@
107 #include <asm/dcr.h>
108 #include <asm/dcr-regs.h>
109 #include <asm/cacheflush.h>
110 +#include <crypto/aead.h>
111 #include <crypto/aes.h>
112 #include <crypto/ctr.h>
113 #include <crypto/sha.h>
114 #include <crypto/scatterwalk.h>
115 +#include <crypto/internal/aead.h>
116 #include <crypto/internal/skcipher.h>
117 #include "crypto4xx_reg_def.h"
118 #include "crypto4xx_core.h"
119 @@ -518,7 +520,7 @@ static void crypto4xx_ret_sg_desc(struct
120 }
121 }
122
123 -static u32 crypto4xx_ablkcipher_done(struct crypto4xx_device *dev,
124 +static void crypto4xx_ablkcipher_done(struct crypto4xx_device *dev,
125 struct pd_uinfo *pd_uinfo,
126 struct ce_pd *pd)
127 {
128 @@ -543,11 +545,9 @@ static u32 crypto4xx_ablkcipher_done(str
129 if (pd_uinfo->state & PD_ENTRY_BUSY)
130 ablkcipher_request_complete(ablk_req, -EINPROGRESS);
131 ablkcipher_request_complete(ablk_req, 0);
132 -
133 - return 0;
134 }
135
136 -static u32 crypto4xx_ahash_done(struct crypto4xx_device *dev,
137 +static void crypto4xx_ahash_done(struct crypto4xx_device *dev,
138 struct pd_uinfo *pd_uinfo)
139 {
140 struct crypto4xx_ctx *ctx;
141 @@ -563,20 +563,88 @@ static u32 crypto4xx_ahash_done(struct c
142 if (pd_uinfo->state & PD_ENTRY_BUSY)
143 ahash_request_complete(ahash_req, -EINPROGRESS);
144 ahash_request_complete(ahash_req, 0);
145 +}
146
147 - return 0;
148 +static void crypto4xx_aead_done(struct crypto4xx_device *dev,
149 + struct pd_uinfo *pd_uinfo,
150 + struct ce_pd *pd)
151 +{
152 + struct aead_request *aead_req;
153 + struct crypto4xx_ctx *ctx;
154 + struct scatterlist *dst = pd_uinfo->dest_va;
155 + int err = 0;
156 +
157 + aead_req = container_of(pd_uinfo->async_req, struct aead_request,
158 + base);
159 + ctx = crypto_tfm_ctx(aead_req->base.tfm);
160 +
161 + if (pd_uinfo->using_sd) {
162 + crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo,
163 + pd->pd_ctl_len.bf.pkt_len,
164 + dst);
165 + } else {
166 + __dma_sync_page(sg_page(dst), dst->offset, dst->length,
167 + DMA_FROM_DEVICE);
168 + }
169 +
170 + if (pd_uinfo->sa_va->sa_command_0.bf.dir == DIR_OUTBOUND) {
171 + /* append icv at the end */
172 + size_t cp_len = crypto_aead_authsize(
173 + crypto_aead_reqtfm(aead_req));
174 + u32 icv[cp_len];
175 +
176 + crypto4xx_memcpy_from_le32(icv, pd_uinfo->sr_va->save_digest,
177 + cp_len);
178 +
179 + scatterwalk_map_and_copy(icv, dst, aead_req->cryptlen,
180 + cp_len, 1);
181 + }
182 +
183 + crypto4xx_ret_sg_desc(dev, pd_uinfo);
184 +
185 + if (pd->pd_ctl.bf.status & 0xff) {
186 + if (pd->pd_ctl.bf.status & 0x1) {
187 + /* authentication error */
188 + err = -EBADMSG;
189 + } else {
190 + if (!__ratelimit(&dev->aead_ratelimit)) {
191 + if (pd->pd_ctl.bf.status & 2)
192 + pr_err("pad fail error\n");
193 + if (pd->pd_ctl.bf.status & 4)
194 + pr_err("seqnum fail\n");
195 + if (pd->pd_ctl.bf.status & 8)
196 + pr_err("error _notify\n");
197 + pr_err("aead return err status = 0x%02x\n",
198 + pd->pd_ctl.bf.status & 0xff);
199 + pr_err("pd pad_ctl = 0x%08x\n",
200 + pd->pd_ctl.bf.pd_pad_ctl);
201 + }
202 + err = -EINVAL;
203 + }
204 + }
205 +
206 + if (pd_uinfo->state & PD_ENTRY_BUSY)
207 + aead_request_complete(aead_req, -EINPROGRESS);
208 +
209 + aead_request_complete(aead_req, err);
210 }
211
212 -static u32 crypto4xx_pd_done(struct crypto4xx_device *dev, u32 idx)
213 +static void crypto4xx_pd_done(struct crypto4xx_device *dev, u32 idx)
214 {
215 struct ce_pd *pd = &dev->pdr[idx];
216 struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx];
217
218 - if (crypto_tfm_alg_type(pd_uinfo->async_req->tfm) ==
219 - CRYPTO_ALG_TYPE_ABLKCIPHER)
220 - return crypto4xx_ablkcipher_done(dev, pd_uinfo, pd);
221 - else
222 - return crypto4xx_ahash_done(dev, pd_uinfo);
223 + switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) {
224 + case CRYPTO_ALG_TYPE_ABLKCIPHER:
225 + crypto4xx_ablkcipher_done(dev, pd_uinfo, pd);
226 + break;
227 + case CRYPTO_ALG_TYPE_AEAD:
228 + crypto4xx_aead_done(dev, pd_uinfo, pd);
229 + break;
230 + case CRYPTO_ALG_TYPE_AHASH:
231 + crypto4xx_ahash_done(dev, pd_uinfo);
232 + break;
233 + }
234 }
235
236 static void crypto4xx_stop_all(struct crypto4xx_core_device *core_dev)
237 @@ -612,8 +680,10 @@ int crypto4xx_build_pd(struct crypto_asy
238 const unsigned int datalen,
239 const __le32 *iv, const u32 iv_len,
240 const struct dynamic_sa_ctl *req_sa,
241 - const unsigned int sa_len)
242 + const unsigned int sa_len,
243 + const unsigned int assoclen)
244 {
245 + struct scatterlist _dst[2];
246 struct crypto4xx_device *dev = ctx->dev;
247 struct dynamic_sa_ctl *sa;
248 struct ce_gd *gd;
249 @@ -627,18 +697,25 @@ int crypto4xx_build_pd(struct crypto_asy
250 unsigned int nbytes = datalen;
251 size_t offset_to_sr_ptr;
252 u32 gd_idx = 0;
253 + int tmp;
254 bool is_busy;
255
256 - /* figure how many gd is needed */
257 - num_gd = sg_nents_for_len(src, datalen);
258 - if ((int)num_gd < 0) {
259 + /* figure how many gd are needed */
260 + tmp = sg_nents_for_len(src, assoclen + datalen);
261 + if (tmp < 0) {
262 dev_err(dev->core_dev->device, "Invalid number of src SG.\n");
263 - return -EINVAL;
264 + return tmp;
265 }
266 - if (num_gd == 1)
267 - num_gd = 0;
268 + if (tmp == 1)
269 + tmp = 0;
270 + num_gd = tmp;
271
272 - /* figure how many sd is needed */
273 + if (assoclen) {
274 + nbytes += assoclen;
275 + dst = scatterwalk_ffwd(_dst, dst, assoclen);
276 + }
277 +
278 + /* figure how many sd are needed */
279 if (sg_is_last(dst)) {
280 num_sd = 0;
281 } else {
282 @@ -724,6 +801,7 @@ int crypto4xx_build_pd(struct crypto_asy
283 sa = pd_uinfo->sa_va;
284 memcpy(sa, req_sa, sa_len * 4);
285
286 + sa->sa_command_1.bf.hash_crypto_offset = (assoclen >> 2);
287 offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(sa);
288 *(u32 *)((unsigned long)sa + offset_to_sr_ptr) = pd_uinfo->sr_pa;
289
290 @@ -830,7 +908,7 @@ int crypto4xx_build_pd(struct crypto_asy
291 ((crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AHASH) |
292 (crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AEAD) ?
293 PD_CTL_HASH_FINAL : 0);
294 - pd->pd_ctl_len.w = 0x00400000 | datalen;
295 + pd->pd_ctl_len.w = 0x00400000 | (assoclen + datalen);
296 pd_uinfo->state = PD_ENTRY_INUSE | (is_busy ? PD_ENTRY_BUSY : 0);
297
298 wmb();
299 @@ -843,40 +921,68 @@ int crypto4xx_build_pd(struct crypto_asy
300 /**
301 * Algorithm Registration Functions
302 */
303 -static int crypto4xx_alg_init(struct crypto_tfm *tfm)
304 +static void crypto4xx_ctx_init(struct crypto4xx_alg *amcc_alg,
305 + struct crypto4xx_ctx *ctx)
306 {
307 - struct crypto_alg *alg = tfm->__crt_alg;
308 - struct crypto4xx_alg *amcc_alg = crypto_alg_to_crypto4xx_alg(alg);
309 - struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
310 -
311 ctx->dev = amcc_alg->dev;
312 ctx->sa_in = NULL;
313 ctx->sa_out = NULL;
314 ctx->sa_len = 0;
315 +}
316
317 - switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
318 - default:
319 - tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx);
320 - break;
321 - case CRYPTO_ALG_TYPE_AHASH:
322 - crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
323 - sizeof(struct crypto4xx_ctx));
324 - break;
325 - }
326 +static int crypto4xx_ablk_init(struct crypto_tfm *tfm)
327 +{
328 + struct crypto_alg *alg = tfm->__crt_alg;
329 + struct crypto4xx_alg *amcc_alg;
330 + struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
331
332 + amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.cipher);
333 + crypto4xx_ctx_init(amcc_alg, ctx);
334 + tfm->crt_ablkcipher.reqsize = sizeof(struct crypto4xx_ctx);
335 return 0;
336 }
337
338 -static void crypto4xx_alg_exit(struct crypto_tfm *tfm)
339 +static void crypto4xx_common_exit(struct crypto4xx_ctx *ctx)
340 {
341 - struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
342 -
343 crypto4xx_free_sa(ctx);
344 }
345
346 -int crypto4xx_register_alg(struct crypto4xx_device *sec_dev,
347 - struct crypto4xx_alg_common *crypto_alg,
348 - int array_size)
349 +static void crypto4xx_ablk_exit(struct crypto_tfm *tfm)
350 +{
351 + crypto4xx_common_exit(crypto_tfm_ctx(tfm));
352 +}
353 +
354 +static int crypto4xx_aead_init(struct crypto_aead *tfm)
355 +{
356 + struct aead_alg *alg = crypto_aead_alg(tfm);
357 + struct crypto4xx_ctx *ctx = crypto_aead_ctx(tfm);
358 + struct crypto4xx_alg *amcc_alg;
359 +
360 + ctx->sw_cipher.aead = crypto_alloc_aead(alg->base.cra_name, 0,
361 + CRYPTO_ALG_NEED_FALLBACK |
362 + CRYPTO_ALG_ASYNC);
363 + if (IS_ERR(ctx->sw_cipher.aead))
364 + return PTR_ERR(ctx->sw_cipher.aead);
365 +
366 + amcc_alg = container_of(alg, struct crypto4xx_alg, alg.u.aead);
367 + crypto4xx_ctx_init(amcc_alg, ctx);
368 + crypto_aead_set_reqsize(tfm, sizeof(struct aead_request) +
369 + max(sizeof(struct crypto4xx_ctx), 32 +
370 + crypto_aead_reqsize(ctx->sw_cipher.aead)));
371 + return 0;
372 +}
373 +
374 +static void crypto4xx_aead_exit(struct crypto_aead *tfm)
375 +{
376 + struct crypto4xx_ctx *ctx = crypto_aead_ctx(tfm);
377 +
378 + crypto4xx_common_exit(ctx);
379 + crypto_free_aead(ctx->sw_cipher.aead);
380 +}
381 +
382 +static int crypto4xx_register_alg(struct crypto4xx_device *sec_dev,
383 + struct crypto4xx_alg_common *crypto_alg,
384 + int array_size)
385 {
386 struct crypto4xx_alg *alg;
387 int i;
388 @@ -891,6 +997,10 @@ int crypto4xx_register_alg(struct crypto
389 alg->dev = sec_dev;
390
391 switch (alg->alg.type) {
392 + case CRYPTO_ALG_TYPE_AEAD:
393 + rc = crypto_register_aead(&alg->alg.u.aead);
394 + break;
395 +
396 case CRYPTO_ALG_TYPE_AHASH:
397 rc = crypto_register_ahash(&alg->alg.u.hash);
398 break;
399 @@ -920,6 +1030,10 @@ static void crypto4xx_unregister_alg(str
400 crypto_unregister_ahash(&alg->alg.u.hash);
401 break;
402
403 + case CRYPTO_ALG_TYPE_AEAD:
404 + crypto_unregister_aead(&alg->alg.u.aead);
405 + break;
406 +
407 default:
408 crypto_unregister_alg(&alg->alg.u.cipher);
409 }
410 @@ -973,7 +1087,7 @@ static irqreturn_t crypto4xx_ce_interrup
411 /**
412 * Supported Crypto Algorithms
413 */
414 -struct crypto4xx_alg_common crypto4xx_alg[] = {
415 +static struct crypto4xx_alg_common crypto4xx_alg[] = {
416 /* Crypto AES modes */
417 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER, .u.cipher = {
418 .cra_name = "cbc(aes)",
419 @@ -985,8 +1099,8 @@ struct crypto4xx_alg_common crypto4xx_al
420 .cra_blocksize = AES_BLOCK_SIZE,
421 .cra_ctxsize = sizeof(struct crypto4xx_ctx),
422 .cra_type = &crypto_ablkcipher_type,
423 - .cra_init = crypto4xx_alg_init,
424 - .cra_exit = crypto4xx_alg_exit,
425 + .cra_init = crypto4xx_ablk_init,
426 + .cra_exit = crypto4xx_ablk_exit,
427 .cra_module = THIS_MODULE,
428 .cra_u = {
429 .ablkcipher = {
430 @@ -1009,8 +1123,8 @@ struct crypto4xx_alg_common crypto4xx_al
431 .cra_blocksize = AES_BLOCK_SIZE,
432 .cra_ctxsize = sizeof(struct crypto4xx_ctx),
433 .cra_type = &crypto_ablkcipher_type,
434 - .cra_init = crypto4xx_alg_init,
435 - .cra_exit = crypto4xx_alg_exit,
436 + .cra_init = crypto4xx_ablk_init,
437 + .cra_exit = crypto4xx_ablk_exit,
438 .cra_module = THIS_MODULE,
439 .cra_u = {
440 .ablkcipher = {
441 @@ -1033,8 +1147,8 @@ struct crypto4xx_alg_common crypto4xx_al
442 .cra_blocksize = AES_BLOCK_SIZE,
443 .cra_ctxsize = sizeof(struct crypto4xx_ctx),
444 .cra_type = &crypto_ablkcipher_type,
445 - .cra_init = crypto4xx_alg_init,
446 - .cra_exit = crypto4xx_alg_exit,
447 + .cra_init = crypto4xx_ablk_init,
448 + .cra_exit = crypto4xx_ablk_exit,
449 .cra_module = THIS_MODULE,
450 .cra_u = {
451 .ablkcipher = {
452 @@ -1059,8 +1173,8 @@ struct crypto4xx_alg_common crypto4xx_al
453 .cra_blocksize = AES_BLOCK_SIZE,
454 .cra_ctxsize = sizeof(struct crypto4xx_ctx),
455 .cra_type = &crypto_ablkcipher_type,
456 - .cra_init = crypto4xx_alg_init,
457 - .cra_exit = crypto4xx_alg_exit,
458 + .cra_init = crypto4xx_ablk_init,
459 + .cra_exit = crypto4xx_ablk_exit,
460 .cra_module = THIS_MODULE,
461 .cra_u = {
462 .ablkcipher = {
463 @@ -1082,8 +1196,8 @@ struct crypto4xx_alg_common crypto4xx_al
464 .cra_blocksize = AES_BLOCK_SIZE,
465 .cra_ctxsize = sizeof(struct crypto4xx_ctx),
466 .cra_type = &crypto_ablkcipher_type,
467 - .cra_init = crypto4xx_alg_init,
468 - .cra_exit = crypto4xx_alg_exit,
469 + .cra_init = crypto4xx_ablk_init,
470 + .cra_exit = crypto4xx_ablk_exit,
471 .cra_module = THIS_MODULE,
472 .cra_u = {
473 .ablkcipher = {
474 @@ -1149,6 +1263,7 @@ static int crypto4xx_probe(struct platfo
475 core_dev->device = dev;
476 spin_lock_init(&core_dev->lock);
477 INIT_LIST_HEAD(&core_dev->dev->alg_list);
478 + ratelimit_default_init(&core_dev->dev->aead_ratelimit);
479 rc = crypto4xx_build_pdr(core_dev->dev);
480 if (rc)
481 goto err_build_pdr;
482 --- a/drivers/crypto/amcc/crypto4xx_core.h
483 +++ b/drivers/crypto/amcc/crypto4xx_core.h
484 @@ -22,7 +22,9 @@
485 #ifndef __CRYPTO4XX_CORE_H__
486 #define __CRYPTO4XX_CORE_H__
487
488 +#include <linux/ratelimit.h>
489 #include <crypto/internal/hash.h>
490 +#include <crypto/internal/aead.h>
491 #include "crypto4xx_reg_def.h"
492 #include "crypto4xx_sa.h"
493
494 @@ -106,6 +108,7 @@ struct crypto4xx_device {
495 struct pd_uinfo *pdr_uinfo;
496 struct list_head alg_list; /* List of algorithm supported
497 by this device */
498 + struct ratelimit_state aead_ratelimit;
499 };
500
501 struct crypto4xx_core_device {
502 @@ -125,6 +128,9 @@ struct crypto4xx_ctx {
503 struct dynamic_sa_ctl *sa_out;
504 __le32 iv_nonce;
505 u32 sa_len;
506 + union {
507 + struct crypto_aead *aead;
508 + } sw_cipher;
509 };
510
511 struct crypto4xx_alg_common {
512 @@ -132,6 +138,7 @@ struct crypto4xx_alg_common {
513 union {
514 struct crypto_alg cipher;
515 struct ahash_alg hash;
516 + struct aead_alg aead;
517 } u;
518 };
519
520 @@ -141,18 +148,6 @@ struct crypto4xx_alg {
521 struct crypto4xx_device *dev;
522 };
523
524 -static inline struct crypto4xx_alg *crypto_alg_to_crypto4xx_alg(
525 - struct crypto_alg *x)
526 -{
527 - switch (x->cra_flags & CRYPTO_ALG_TYPE_MASK) {
528 - case CRYPTO_ALG_TYPE_AHASH:
529 - return container_of(__crypto_ahash_alg(x),
530 - struct crypto4xx_alg, alg.u.hash);
531 - }
532 -
533 - return container_of(x, struct crypto4xx_alg, alg.u.cipher);
534 -}
535 -
536 int crypto4xx_alloc_sa(struct crypto4xx_ctx *ctx, u32 size);
537 void crypto4xx_free_sa(struct crypto4xx_ctx *ctx);
538 void crypto4xx_free_ctx(struct crypto4xx_ctx *ctx);
539 @@ -163,7 +158,8 @@ int crypto4xx_build_pd(struct crypto_asy
540 const unsigned int datalen,
541 const __le32 *iv, const u32 iv_len,
542 const struct dynamic_sa_ctl *sa,
543 - const unsigned int sa_len);
544 + const unsigned int sa_len,
545 + const unsigned int assoclen);
546 int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher,
547 const u8 *key, unsigned int keylen);
548 int crypto4xx_setkey_aes_cfb(struct crypto_ablkcipher *cipher,
549 --- a/drivers/crypto/amcc/crypto4xx_sa.h
550 +++ b/drivers/crypto/amcc/crypto4xx_sa.h
551 @@ -55,6 +55,8 @@ union dynamic_sa_contents {
552 #define SA_OP_GROUP_BASIC 0
553 #define SA_OPCODE_ENCRYPT 0
554 #define SA_OPCODE_DECRYPT 0
555 +#define SA_OPCODE_ENCRYPT_HASH 1
556 +#define SA_OPCODE_HASH_DECRYPT 1
557 #define SA_OPCODE_HASH 3
558 #define SA_CIPHER_ALG_DES 0
559 #define SA_CIPHER_ALG_3DES 1
560 @@ -65,6 +67,8 @@ union dynamic_sa_contents {
561
562 #define SA_HASH_ALG_MD5 0
563 #define SA_HASH_ALG_SHA1 1
564 +#define SA_HASH_ALG_GHASH 12
565 +#define SA_HASH_ALG_CBC_MAC 14
566 #define SA_HASH_ALG_NULL 15
567 #define SA_HASH_ALG_SHA1_DIGEST_SIZE 20
568
569 @@ -234,6 +238,36 @@ struct dynamic_sa_aes256 {
570 #define SA_AES_CONTENTS 0x3e000002
571
572 /**
573 + * Security Association (SA) for AES128 CCM
574 + */
575 +struct dynamic_sa_aes128_ccm {
576 + struct dynamic_sa_ctl ctrl;
577 + __le32 key[4];
578 + __le32 iv[4];
579 + u32 state_ptr;
580 + u32 reserved;
581 +} __packed;
582 +#define SA_AES128_CCM_LEN (sizeof(struct dynamic_sa_aes128_ccm)/4)
583 +#define SA_AES128_CCM_CONTENTS 0x3e000042
584 +#define SA_AES_CCM_CONTENTS 0x3e000002
585 +
586 +/**
587 + * Security Association (SA) for AES128_GCM
588 + */
589 +struct dynamic_sa_aes128_gcm {
590 + struct dynamic_sa_ctl ctrl;
591 + __le32 key[4];
592 + __le32 inner_digest[4];
593 + __le32 iv[4];
594 + u32 state_ptr;
595 + u32 reserved;
596 +} __packed;
597 +
598 +#define SA_AES128_GCM_LEN (sizeof(struct dynamic_sa_aes128_gcm)/4)
599 +#define SA_AES128_GCM_CONTENTS 0x3e000442
600 +#define SA_AES_GCM_CONTENTS 0x3e000402
601 +
602 +/**
603 * Security Association (SA) for HASH160: HMAC-SHA1
604 */
605 struct dynamic_sa_hash160 {
606 @@ -274,4 +308,11 @@ static inline __le32 *get_dynamic_sa_key
607 return (__le32 *) ((unsigned long)cts + sizeof(struct dynamic_sa_ctl));
608 }
609
610 +static inline __le32 *get_dynamic_sa_inner_digest(struct dynamic_sa_ctl *cts)
611 +{
612 + return (__le32 *) ((unsigned long)cts +
613 + sizeof(struct dynamic_sa_ctl) +
614 + cts->sa_contents.bf.key_size * 4);
615 +}
616 +
617 #endif