1 From ba8e92b322a3763880fdc4d19e9c7085f5504be7 Mon Sep 17 00:00:00 2001
2 From: Biwen Li <biwen.li@nxp.com>
3 Date: Tue, 23 Apr 2019 17:41:43 +0800
4 Subject: [PATCH] sec: support layerscape
6 Content-Type: text/plain; charset=UTF-8
7 Content-Transfer-Encoding: 8bit
9 This is an integrated patch of sec for layerscape
11 Signed-off-by: Alex Porosanu <alexandru.porosanu@nxp.com>
12 Signed-off-by: Arnd Bergmann <arnd@arndb.de>
13 Signed-off-by: Biwen Li <biwen.li@nxp.com>
14 Signed-off-by: Carmen Iorga <carmen.iorga@nxp.com>
15 Signed-off-by: Cristian Stoica <cristian.stoica@nxp.com>
16 Signed-off-by: Guanhua Gao <guanhua.gao@nxp.com>
17 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
18 Signed-off-by: Horia Geantă <horia.geanta@nxp.com>
19 Signed-off-by: Ioana Ciornei <ioana.ciornei@nxp.com>
20 Signed-off-by: Radu Alexe <radu.alexe@nxp.com>
21 Signed-off-by: Tudor Ambarus <tudor-dan.ambarus@nxp.com>
22 Signed-off-by: Yangbo Lu <yangbo.lu@nxp.com>
23 Signed-off-by: Zhao Qiang <qiang.zhao@nxp.com>
27 crypto/chacha20poly1305.c | 2 -
28 crypto/tcrypt.c | 27 +-
29 crypto/testmgr.c | 244 ++
30 crypto/testmgr.h | 219 ++
31 crypto/tls.c | 607 ++++
32 drivers/crypto/Makefile | 2 +-
33 drivers/crypto/caam/Kconfig | 85 +-
34 drivers/crypto/caam/Makefile | 26 +-
35 drivers/crypto/caam/caamalg.c | 468 +++-
36 drivers/crypto/caam/caamalg_desc.c | 903 +++++-
37 drivers/crypto/caam/caamalg_desc.h | 52 +-
38 drivers/crypto/caam/caamalg_qi.c | 1060 ++++++-
39 drivers/crypto/caam/caamalg_qi2.c | 5843 +++++++++++++++++++++++++++++++++++
40 drivers/crypto/caam/caamalg_qi2.h | 276 ++
41 drivers/crypto/caam/caamhash.c | 192 +-
42 drivers/crypto/caam/caamhash_desc.c | 108 +
43 drivers/crypto/caam/caamhash_desc.h | 49 +
44 drivers/crypto/caam/caampkc.c | 52 +-
45 drivers/crypto/caam/caamrng.c | 52 +-
46 drivers/crypto/caam/compat.h | 4 +
47 drivers/crypto/caam/ctrl.c | 194 +-
48 drivers/crypto/caam/desc.h | 89 +-
49 drivers/crypto/caam/desc_constr.h | 59 +-
50 drivers/crypto/caam/dpseci.c | 865 ++++++
51 drivers/crypto/caam/dpseci.h | 433 +++
52 drivers/crypto/caam/dpseci_cmd.h | 287 ++
53 drivers/crypto/caam/error.c | 81 +-
54 drivers/crypto/caam/error.h | 6 +-
55 drivers/crypto/caam/intern.h | 102 +-
56 drivers/crypto/caam/jr.c | 84 +
57 drivers/crypto/caam/jr.h | 2 +
58 drivers/crypto/caam/key_gen.c | 30 -
59 drivers/crypto/caam/key_gen.h | 30 +
60 drivers/crypto/caam/qi.c | 134 +-
61 drivers/crypto/caam/qi.h | 2 +-
62 drivers/crypto/caam/regs.h | 76 +-
63 drivers/crypto/caam/sg_sw_qm.h | 46 +-
64 drivers/crypto/talitos.c | 8 +
65 include/crypto/chacha20.h | 1 +
66 41 files changed, 12088 insertions(+), 733 deletions(-)
67 create mode 100644 crypto/tls.c
68 create mode 100644 drivers/crypto/caam/caamalg_qi2.c
69 create mode 100644 drivers/crypto/caam/caamalg_qi2.h
70 create mode 100644 drivers/crypto/caam/caamhash_desc.c
71 create mode 100644 drivers/crypto/caam/caamhash_desc.h
72 create mode 100644 drivers/crypto/caam/dpseci.c
73 create mode 100644 drivers/crypto/caam/dpseci.h
74 create mode 100644 drivers/crypto/caam/dpseci_cmd.h
78 @@ -312,6 +312,26 @@ config CRYPTO_ECHAINIV
79 a sequence number xored with a salt. This is the default
83 + tristate "TLS support"
85 + select CRYPTO_BLKCIPHER
86 + select CRYPTO_MANAGER
89 + select CRYPTO_AUTHENC
91 + Support for TLS 1.0 record encryption and decryption
93 + This module adds support for encryption/decryption of TLS 1.0 frames
94 + using blockcipher algorithms. The name of the resulting algorithm is
95 + "tls10(hmac(<digest>),cbc(<cipher>))". By default, the generic base
96 + algorithms are used (e.g. aes-generic, sha1-generic), but hardware
97 + accelerated versions will be used automatically if available.
99 + User-space applications (OpenSSL, GnuTLS) can offload TLS 1.0
100 + operations through AF_ALG or cryptodev interfaces
102 comment "Block modes"
105 --- a/crypto/Makefile
106 +++ b/crypto/Makefile
107 @@ -118,6 +118,7 @@ obj-$(CONFIG_CRYPTO_CRC32C) += crc32c_ge
108 obj-$(CONFIG_CRYPTO_CRC32) += crc32_generic.o
109 obj-$(CONFIG_CRYPTO_CRCT10DIF) += crct10dif_common.o crct10dif_generic.o
110 obj-$(CONFIG_CRYPTO_AUTHENC) += authenc.o authencesn.o
111 +obj-$(CONFIG_CRYPTO_TLS) += tls.o
112 obj-$(CONFIG_CRYPTO_LZO) += lzo.o
113 obj-$(CONFIG_CRYPTO_LZ4) += lz4.o
114 obj-$(CONFIG_CRYPTO_LZ4HC) += lz4hc.o
115 --- a/crypto/chacha20poly1305.c
116 +++ b/crypto/chacha20poly1305.c
119 #include "internal.h"
121 -#define CHACHAPOLY_IV_SIZE 12
123 struct chachapoly_instance_ctx {
124 struct crypto_skcipher_spawn chacha;
125 struct crypto_ahash_spawn poly;
126 --- a/crypto/tcrypt.c
127 +++ b/crypto/tcrypt.c
128 @@ -76,7 +76,7 @@ static char *check[] = {
129 "khazad", "wp512", "wp384", "wp256", "tnepres", "xeta", "fcrypt",
130 "camellia", "seed", "salsa20", "rmd128", "rmd160", "rmd256", "rmd320",
131 "lzo", "cts", "zlib", "sha3-224", "sha3-256", "sha3-384", "sha3-512",
136 struct tcrypt_result {
137 @@ -355,11 +355,13 @@ static void test_aead_speed(const char *
139 aead_request_set_ad(req, aad_size);
143 ret = test_aead_jiffies(req, enc, *b_size,
148 ret = test_aead_cycles(req, enc, *b_size);
152 pr_err("%s() failed return code=%d\n", e, ret);
153 @@ -736,12 +738,14 @@ static void test_ahash_speed_common(cons
155 ahash_request_set_crypt(req, sg, output, speed[i].plen);
159 ret = test_ahash_jiffies(req, speed[i].blen,
160 speed[i].plen, output, secs);
164 ret = test_ahash_cycles(req, speed[i].blen,
165 speed[i].plen, output);
169 pr_err("hashing failed ret=%d\n", ret);
170 @@ -959,12 +963,14 @@ static void test_skcipher_speed(const ch
172 skcipher_request_set_crypt(req, sg, sg, *b_size, iv);
176 ret = test_acipher_jiffies(req, enc,
181 ret = test_acipher_cycles(req, enc,
186 pr_err("%s() failed flags=%x\n", e,
187 @@ -1336,6 +1342,10 @@ static int do_test(const char *alg, u32
188 ret += tcrypt_test("hmac(sha3-512)");
192 + ret += tcrypt_test("rsa");
196 ret += tcrypt_test("ansi_cprng");
198 @@ -1397,6 +1407,9 @@ static int do_test(const char *alg, u32
200 ret += tcrypt_test("authenc(hmac(sha512),cbc(des3_ede))");
203 + ret += tcrypt_test("tls10(hmac(sha1),cbc(aes))");
206 test_cipher_speed("ecb(aes)", ENCRYPT, sec, NULL, 0,
207 speed_template_16_24_32);
208 --- a/crypto/testmgr.c
209 +++ b/crypto/testmgr.c
210 @@ -117,6 +117,13 @@ struct drbg_test_suite {
214 +struct tls_test_suite {
216 + struct tls_testvec *vecs;
217 + unsigned int count;
221 struct akcipher_test_suite {
222 const struct akcipher_testvec *vecs;
224 @@ -140,6 +147,7 @@ struct alg_test_desc {
225 struct hash_test_suite hash;
226 struct cprng_test_suite cprng;
227 struct drbg_test_suite drbg;
228 + struct tls_test_suite tls;
229 struct akcipher_test_suite akcipher;
230 struct kpp_test_suite kpp;
232 @@ -991,6 +999,233 @@ static int test_aead(struct crypto_aead
236 +static int __test_tls(struct crypto_aead *tfm, int enc,
237 + struct tls_testvec *template, unsigned int tcount,
238 + const bool diff_dst)
240 + const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
241 + unsigned int i, k, authsize;
243 + struct aead_request *req;
244 + struct scatterlist *sg;
245 + struct scatterlist *sgout;
247 + struct tcrypt_result result;
253 + char *xbuf[XBUFSIZE];
254 + char *xoutbuf[XBUFSIZE];
255 + char *axbuf[XBUFSIZE];
258 + if (testmgr_alloc_buf(xbuf))
261 + if (diff_dst && testmgr_alloc_buf(xoutbuf))
264 + if (testmgr_alloc_buf(axbuf))
267 + iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
271 + key = kzalloc(MAX_KEYLEN, GFP_KERNEL);
275 + sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 2 : 1), GFP_KERNEL);
281 + d = diff_dst ? "-ddst" : "";
282 + e = enc ? "encryption" : "decryption";
284 + init_completion(&result.completion);
286 + req = aead_request_alloc(tfm, GFP_KERNEL);
288 + pr_err("alg: tls%s: Failed to allocate request for %s\n",
293 + aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
294 + tcrypt_complete, &result);
296 + for (i = 0; i < tcount; i++) {
301 + if (WARN_ON(template[i].ilen > PAGE_SIZE ||
302 + template[i].alen > PAGE_SIZE))
305 + memcpy(assoc, template[i].assoc, template[i].alen);
306 + memcpy(input, template[i].input, template[i].ilen);
308 + if (template[i].iv)
309 + memcpy(iv, template[i].iv, MAX_IVLEN);
311 + memset(iv, 0, MAX_IVLEN);
313 + crypto_aead_clear_flags(tfm, ~0);
315 + if (template[i].klen > MAX_KEYLEN) {
316 + pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
317 + d, i, algo, template[i].klen, MAX_KEYLEN);
321 + memcpy(key, template[i].key, template[i].klen);
323 + ret = crypto_aead_setkey(tfm, key, template[i].klen);
324 + if (!ret == template[i].fail) {
325 + pr_err("alg: tls%s: setkey failed on test %d for %s: flags=%x\n",
326 + d, i, algo, crypto_aead_get_flags(tfm));
332 + ret = crypto_aead_setauthsize(tfm, authsize);
334 + pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
335 + d, authsize, i, algo);
339 + k = !!template[i].alen;
340 + sg_init_table(sg, k + 1);
341 + sg_set_buf(&sg[0], assoc, template[i].alen);
342 + sg_set_buf(&sg[k], input, (enc ? template[i].rlen :
343 + template[i].ilen));
347 + sg_init_table(sgout, k + 1);
348 + sg_set_buf(&sgout[0], assoc, template[i].alen);
350 + output = xoutbuf[0];
351 + sg_set_buf(&sgout[k], output,
352 + (enc ? template[i].rlen : template[i].ilen));
355 + aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
356 + template[i].ilen, iv);
358 + aead_request_set_ad(req, template[i].alen);
360 + ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
364 + if (template[i].novrfy) {
365 + /* verification was supposed to fail */
366 + pr_err("alg: tls%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
368 + /* so really, we got a bad message */
375 + wait_for_completion(&result.completion);
376 + reinit_completion(&result.completion);
381 + /* verification failure was expected */
382 + if (template[i].novrfy)
386 + pr_err("alg: tls%s: %s failed on test %d for %s: ret=%d\n",
387 + d, e, i, algo, -ret);
392 + if (memcmp(q, template[i].result, template[i].rlen)) {
393 + pr_err("alg: tls%s: Test %d failed on %s for %s\n",
395 + hexdump(q, template[i].rlen);
396 + pr_err("should be:\n");
397 + hexdump(template[i].result, template[i].rlen);
404 + aead_request_free(req);
412 + testmgr_free_buf(axbuf);
415 + testmgr_free_buf(xoutbuf);
417 + testmgr_free_buf(xbuf);
422 +static int test_tls(struct crypto_aead *tfm, int enc,
423 + struct tls_testvec *template, unsigned int tcount)
426 + /* test 'dst == src' case */
427 + ret = __test_tls(tfm, enc, template, tcount, false);
430 + /* test 'dst != src' case */
431 + return __test_tls(tfm, enc, template, tcount, true);
434 +static int alg_test_tls(const struct alg_test_desc *desc, const char *driver,
435 + u32 type, u32 mask)
437 + struct crypto_aead *tfm;
440 + tfm = crypto_alloc_aead(driver, type, mask);
442 + pr_err("alg: aead: Failed to load transform for %s: %ld\n",
443 + driver, PTR_ERR(tfm));
444 + return PTR_ERR(tfm);
447 + if (desc->suite.tls.enc.vecs) {
448 + err = test_tls(tfm, ENCRYPT, desc->suite.tls.enc.vecs,
449 + desc->suite.tls.enc.count);
454 + if (!err && desc->suite.tls.dec.vecs)
455 + err = test_tls(tfm, DECRYPT, desc->suite.tls.dec.vecs,
456 + desc->suite.tls.dec.count);
459 + crypto_free_aead(tfm);
463 static int test_cipher(struct crypto_cipher *tfm, int enc,
464 const struct cipher_testvec *template,
466 @@ -3524,6 +3759,15 @@ static const struct alg_test_desc alg_te
467 .hash = __VECS(tgr192_tv_template)
470 + .alg = "tls10(hmac(sha1),cbc(aes))",
471 + .test = alg_test_tls,
474 + .enc = __VECS(tls_enc_tv_template),
475 + .dec = __VECS(tls_dec_tv_template)
480 .test = alg_test_hash,
482 --- a/crypto/testmgr.h
483 +++ b/crypto/testmgr.h
484 @@ -125,6 +125,20 @@ struct drbg_testvec {
488 +struct tls_testvec {
489 + char *key; /* wrapped keys for encryption and authentication */
490 + char *iv; /* initialization vector */
491 + char *input; /* input data */
492 + char *assoc; /* associated data: seq num, type, version, input len */
493 + char *result; /* result data */
494 + unsigned char fail; /* the test failure is expected */
495 + unsigned char novrfy; /* dec verification failure expected */
496 + unsigned char klen; /* key length */
497 + unsigned short ilen; /* input data length */
498 + unsigned short alen; /* associated data length */
499 + unsigned short rlen; /* result length */
502 struct akcipher_testvec {
503 const unsigned char *key;
504 const unsigned char *m;
505 @@ -153,6 +167,211 @@ struct kpp_testvec {
506 static const char zeroed_string[48];
509 + * TLS1.0 synthetic test vectors
511 +static struct tls_testvec tls_enc_tv_template[] = {
513 +#ifdef __LITTLE_ENDIAN
514 + .key = "\x08\x00" /* rta length */
515 + "\x01\x00" /* rta type */
517 + .key = "\x00\x08" /* rta length */
518 + "\x00\x01" /* rta type */
520 + "\x00\x00\x00\x10" /* enc key length */
521 + "authenticationkey20benckeyis16_bytes",
522 + .klen = 8 + 20 + 16,
523 + .iv = "iv0123456789abcd",
524 + .input = "Single block msg",
526 + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07"
527 + "\x00\x03\x01\x00\x10",
529 + .result = "\xd5\xac\xb\xd2\xac\xad\x3f\xb1"
530 + "\x59\x79\x1e\x91\x5f\x52\x14\x9c"
531 + "\xc0\x75\xd8\x4c\x97\x0f\x07\x73"
532 + "\xdc\x89\x47\x49\x49\xcb\x30\x6b"
533 + "\x1b\x45\x23\xa1\xd0\x51\xcf\x02"
534 + "\x2e\xa8\x5d\xa0\xfe\xca\x82\x61",
535 + .rlen = 16 + 20 + 12,
537 +#ifdef __LITTLE_ENDIAN
538 + .key = "\x08\x00" /* rta length */
539 + "\x01\x00" /* rta type */
541 + .key = "\x00\x08" /* rta length */
542 + "\x00\x01" /* rta type */
544 + "\x00\x00\x00\x10" /* enc key length */
545 + "authenticationkey20benckeyis16_bytes",
546 + .klen = 8 + 20 + 16,
547 + .iv = "iv0123456789abcd",
550 + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07"
551 + "\x00\x03\x01\x00\x00",
553 + .result = "\x58\x2a\x11\xc\x86\x8e\x4b\x67"
554 + "\x2d\x16\x26\x1a\xac\x4b\xe2\x1a"
555 + "\xe9\x6a\xcc\x4d\x6f\x79\x8a\x45"
556 + "\x1f\x4e\x27\xf2\xa7\x59\xb4\x5a",
559 +#ifdef __LITTLE_ENDIAN
560 + .key = "\x08\x00" /* rta length */
561 + "\x01\x00" /* rta type */
563 + .key = "\x00\x08" /* rta length */
564 + "\x00\x01" /* rta type */
566 + "\x00\x00\x00\x10" /* enc key length */
567 + "authenticationkey20benckeyis16_bytes",
568 + .klen = 8 + 20 + 16,
569 + .iv = "iv0123456789abcd",
570 + .input = "285 bytes plaintext285 bytes plaintext285 bytes"
571 + " plaintext285 bytes plaintext285 bytes plaintext285"
572 + " bytes plaintext285 bytes plaintext285 bytes"
573 + " plaintext285 bytes plaintext285 bytes plaintext285"
574 + " bytes plaintext285 bytes plaintext285 bytes"
575 + " plaintext285 bytes plaintext285 bytes plaintext285"
576 + " bytes plaintext285 bytes plaintext",
578 + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07"
579 + "\x00\x03\x01\x01\x1d",
581 + .result = "\x80\x23\x82\x44\x14\x2a\x1d\x94\xc\xc2\x1d\xd"
582 + "\x3a\x32\x89\x4c\x57\x30\xa8\x89\x76\x46\xcc\x90"
583 + "\x1d\x88\xb8\xa6\x1a\x58\xe\x2d\xeb\x2c\xc7\x3a"
584 + "\x52\x4e\xdb\xb3\x1e\x83\x11\xf5\x3c\xce\x6e\x94"
585 + "\xd3\x26\x6a\x9a\xd\xbd\xc7\x98\xb9\xb3\x3a\x51"
586 + "\x1e\x4\x84\x8a\x8f\x54\x9a\x51\x69\x9c\xce\x31"
587 + "\x8d\x5d\x8b\xee\x5f\x70\xc\xc9\xb8\x50\x54\xf8"
588 + "\xb2\x4a\x7a\xcd\xeb\x7a\x82\x81\xc6\x41\xc8\x50"
589 + "\x91\x8d\xc8\xed\xcd\x40\x8f\x55\xd1\xec\xc9\xac"
590 + "\x15\x18\xf9\x20\xa0\xed\x18\xa1\xe3\x56\xe3\x14"
591 + "\xe5\xe8\x66\x63\x20\xed\xe4\x62\x9d\xa3\xa4\x1d"
592 + "\x81\x89\x18\xf2\x36\xae\xc8\x8a\x2b\xbc\xc3\xb8"
593 + "\x80\xf\x97\x21\x36\x39\x8\x84\x23\x18\x9e\x9c"
594 + "\x72\x32\x75\x2d\x2e\xf9\x60\xb\xe8\xcc\xd9\x74"
595 + "\x4\x1b\x8e\x99\xc1\x94\xee\xd0\xac\x4e\xfc\x7e"
596 + "\xf1\x96\xb3\xe7\x14\xb8\xf2\xc\x25\x97\x82\x6b"
597 + "\xbd\x0\x65\xab\x5c\xe3\x16\xfb\x68\xef\xea\x9d"
598 + "\xff\x44\x1d\x2a\x44\xf5\xc8\x56\x77\xb7\xbf\x13"
599 + "\xc8\x54\xdb\x92\xfe\x16\x4c\xbe\x18\xe9\xb\x8d"
600 + "\xb\xd4\x43\x58\x43\xaa\xf4\x3\x80\x97\x62\xd5"
601 + "\xdf\x3c\x28\xaa\xee\x48\x4b\x55\x41\x1b\x31\x2"
602 + "\xbe\xa0\x1c\xbd\xb7\x22\x2a\xe5\x53\x72\x73\x20"
603 + "\x44\x4f\xe6\x1\x2b\x34\x33\x11\x7d\xfb\x10\xc1"
604 + "\x66\x7c\xa6\xf4\x48\x36\x5e\x2\xda\x41\x4b\x3e"
605 + "\xe7\x80\x17\x17\xce\xf1\x3e\x6a\x8e\x26\xf3\xb7"
606 + "\x2b\x85\xd\x31\x8d\xba\x6c\x22\xb4\x28\x55\x7e"
607 + "\x2a\x9e\x26\xf1\x3d\x21\xac\x65",
608 + .rlen = 285 + 20 + 15,
612 +static struct tls_testvec tls_dec_tv_template[] = {
614 +#ifdef __LITTLE_ENDIAN
615 + .key = "\x08\x00" /* rta length */
616 + "\x01\x00" /* rta type */
618 + .key = "\x00\x08" /* rta length */
619 + "\x00\x01" /* rta type */
621 + "\x00\x00\x00\x10" /* enc key length */
622 + "authenticationkey20benckeyis16_bytes",
623 + .klen = 8 + 20 + 16,
624 + .iv = "iv0123456789abcd",
625 + .input = "\xd5\xac\xb\xd2\xac\xad\x3f\xb1"
626 + "\x59\x79\x1e\x91\x5f\x52\x14\x9c"
627 + "\xc0\x75\xd8\x4c\x97\x0f\x07\x73"
628 + "\xdc\x89\x47\x49\x49\xcb\x30\x6b"
629 + "\x1b\x45\x23\xa1\xd0\x51\xcf\x02"
630 + "\x2e\xa8\x5d\xa0\xfe\xca\x82\x61",
631 + .ilen = 16 + 20 + 12,
632 + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07"
633 + "\x00\x03\x01\x00\x30",
635 + .result = "Single block msg",
638 +#ifdef __LITTLE_ENDIAN
639 + .key = "\x08\x00" /* rta length */
640 + "\x01\x00" /* rta type */
642 + .key = "\x00\x08" /* rta length */
643 + "\x00\x01" /* rta type */
645 + "\x00\x00\x00\x10" /* enc key length */
646 + "authenticationkey20benckeyis16_bytes",
647 + .klen = 8 + 20 + 16,
648 + .iv = "iv0123456789abcd",
649 + .input = "\x58\x2a\x11\xc\x86\x8e\x4b\x67"
650 + "\x2d\x16\x26\x1a\xac\x4b\xe2\x1a"
651 + "\xe9\x6a\xcc\x4d\x6f\x79\x8a\x45"
652 + "\x1f\x4e\x27\xf2\xa7\x59\xb4\x5a",
654 + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07"
655 + "\x00\x03\x01\x00\x20",
660 +#ifdef __LITTLE_ENDIAN
661 + .key = "\x08\x00" /* rta length */
662 + "\x01\x00" /* rta type */
664 + .key = "\x00\x08" /* rta length */
665 + "\x00\x01" /* rta type */
667 + "\x00\x00\x00\x10" /* enc key length */
668 + "authenticationkey20benckeyis16_bytes",
669 + .klen = 8 + 20 + 16,
670 + .iv = "iv0123456789abcd",
671 + .input = "\x80\x23\x82\x44\x14\x2a\x1d\x94\xc\xc2\x1d\xd"
672 + "\x3a\x32\x89\x4c\x57\x30\xa8\x89\x76\x46\xcc\x90"
673 + "\x1d\x88\xb8\xa6\x1a\x58\xe\x2d\xeb\x2c\xc7\x3a"
674 + "\x52\x4e\xdb\xb3\x1e\x83\x11\xf5\x3c\xce\x6e\x94"
675 + "\xd3\x26\x6a\x9a\xd\xbd\xc7\x98\xb9\xb3\x3a\x51"
676 + "\x1e\x4\x84\x8a\x8f\x54\x9a\x51\x69\x9c\xce\x31"
677 + "\x8d\x5d\x8b\xee\x5f\x70\xc\xc9\xb8\x50\x54\xf8"
678 + "\xb2\x4a\x7a\xcd\xeb\x7a\x82\x81\xc6\x41\xc8\x50"
679 + "\x91\x8d\xc8\xed\xcd\x40\x8f\x55\xd1\xec\xc9\xac"
680 + "\x15\x18\xf9\x20\xa0\xed\x18\xa1\xe3\x56\xe3\x14"
681 + "\xe5\xe8\x66\x63\x20\xed\xe4\x62\x9d\xa3\xa4\x1d"
682 + "\x81\x89\x18\xf2\x36\xae\xc8\x8a\x2b\xbc\xc3\xb8"
683 + "\x80\xf\x97\x21\x36\x39\x8\x84\x23\x18\x9e\x9c"
684 + "\x72\x32\x75\x2d\x2e\xf9\x60\xb\xe8\xcc\xd9\x74"
685 + "\x4\x1b\x8e\x99\xc1\x94\xee\xd0\xac\x4e\xfc\x7e"
686 + "\xf1\x96\xb3\xe7\x14\xb8\xf2\xc\x25\x97\x82\x6b"
687 + "\xbd\x0\x65\xab\x5c\xe3\x16\xfb\x68\xef\xea\x9d"
688 + "\xff\x44\x1d\x2a\x44\xf5\xc8\x56\x77\xb7\xbf\x13"
689 + "\xc8\x54\xdb\x92\xfe\x16\x4c\xbe\x18\xe9\xb\x8d"
690 + "\xb\xd4\x43\x58\x43\xaa\xf4\x3\x80\x97\x62\xd5"
691 + "\xdf\x3c\x28\xaa\xee\x48\x4b\x55\x41\x1b\x31\x2"
692 + "\xbe\xa0\x1c\xbd\xb7\x22\x2a\xe5\x53\x72\x73\x20"
693 + "\x44\x4f\xe6\x1\x2b\x34\x33\x11\x7d\xfb\x10\xc1"
694 + "\x66\x7c\xa6\xf4\x48\x36\x5e\x2\xda\x41\x4b\x3e"
695 + "\xe7\x80\x17\x17\xce\xf1\x3e\x6a\x8e\x26\xf3\xb7"
696 + "\x2b\x85\xd\x31\x8d\xba\x6c\x22\xb4\x28\x55\x7e"
697 + "\x2a\x9e\x26\xf1\x3d\x21\xac\x65",
699 + .ilen = 285 + 20 + 15,
700 + .assoc = "\x00\x01\x02\x03\x04\x05\x06\x07"
701 + "\x00\x03\x01\x01\x40",
703 + .result = "285 bytes plaintext285 bytes plaintext285 bytes"
704 + " plaintext285 bytes plaintext285 bytes plaintext285"
705 + " bytes plaintext285 bytes plaintext285 bytes"
706 + " plaintext285 bytes plaintext285 bytes plaintext285"
707 + " bytes plaintext285 bytes plaintext285 bytes"
708 + " plaintext285 bytes plaintext285 bytes plaintext",
714 * RSA test vectors. Borrowed from openSSL.
716 static const struct akcipher_testvec rsa_tv_template[] = {
721 + * Copyright 2013 Freescale Semiconductor, Inc.
722 + * Copyright 2017 NXP Semiconductor, Inc.
724 + * This program is free software; you can redistribute it and/or modify it
725 + * under the terms of the GNU General Public License as published by the Free
726 + * Software Foundation; either version 2 of the License, or (at your option)
727 + * any later version.
731 +#include <crypto/internal/aead.h>
732 +#include <crypto/internal/hash.h>
733 +#include <crypto/internal/skcipher.h>
734 +#include <crypto/authenc.h>
735 +#include <crypto/null.h>
736 +#include <crypto/scatterwalk.h>
737 +#include <linux/err.h>
738 +#include <linux/init.h>
739 +#include <linux/module.h>
740 +#include <linux/rtnetlink.h>
742 +struct tls_instance_ctx {
743 + struct crypto_ahash_spawn auth;
744 + struct crypto_skcipher_spawn enc;
747 +struct crypto_tls_ctx {
748 + unsigned int reqoff;
749 + struct crypto_ahash *auth;
750 + struct crypto_skcipher *enc;
751 + struct crypto_skcipher *null;
754 +struct tls_request_ctx {
756 + * cryptlen holds the payload length in the case of encryption or
757 + * payload_len + icv_len + padding_len in case of decryption
759 + unsigned int cryptlen;
760 + /* working space for partial results */
761 + struct scatterlist tmp[2];
762 + struct scatterlist cipher[2];
763 + struct scatterlist dst[2];
768 + struct completion completion;
772 +static void tls_async_op_done(struct crypto_async_request *req, int err)
774 + struct async_op *areq = req->data;
776 + if (err == -EINPROGRESS)
780 + complete(&areq->completion);
783 +static int crypto_tls_setkey(struct crypto_aead *tls, const u8 *key,
784 + unsigned int keylen)
786 + struct crypto_tls_ctx *ctx = crypto_aead_ctx(tls);
787 + struct crypto_ahash *auth = ctx->auth;
788 + struct crypto_skcipher *enc = ctx->enc;
789 + struct crypto_authenc_keys keys;
792 + if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
795 + crypto_ahash_clear_flags(auth, CRYPTO_TFM_REQ_MASK);
796 + crypto_ahash_set_flags(auth, crypto_aead_get_flags(tls) &
797 + CRYPTO_TFM_REQ_MASK);
798 + err = crypto_ahash_setkey(auth, keys.authkey, keys.authkeylen);
799 + crypto_aead_set_flags(tls, crypto_ahash_get_flags(auth) &
800 + CRYPTO_TFM_RES_MASK);
805 + crypto_skcipher_clear_flags(enc, CRYPTO_TFM_REQ_MASK);
806 + crypto_skcipher_set_flags(enc, crypto_aead_get_flags(tls) &
807 + CRYPTO_TFM_REQ_MASK);
808 + err = crypto_skcipher_setkey(enc, keys.enckey, keys.enckeylen);
809 + crypto_aead_set_flags(tls, crypto_skcipher_get_flags(enc) &
810 + CRYPTO_TFM_RES_MASK);
816 + crypto_aead_set_flags(tls, CRYPTO_TFM_RES_BAD_KEY_LEN);
821 + * crypto_tls_genicv - Calculate hmac digest for a TLS record
822 + * @hash: (output) buffer to save the digest into
823 + * @src: (input) scatterlist with the assoc and payload data
824 + * @srclen: (input) size of the source buffer (assoclen + cryptlen)
825 + * @req: (input) aead request
827 +static int crypto_tls_genicv(u8 *hash, struct scatterlist *src,
828 + unsigned int srclen, struct aead_request *req)
830 + struct crypto_aead *tls = crypto_aead_reqtfm(req);
831 + struct crypto_tls_ctx *ctx = crypto_aead_ctx(tls);
832 + struct tls_request_ctx *treq_ctx = aead_request_ctx(req);
833 + struct async_op ahash_op;
834 + struct ahash_request *ahreq = (void *)(treq_ctx->tail + ctx->reqoff);
835 + unsigned int flags = CRYPTO_TFM_REQ_MAY_SLEEP;
836 + int err = -EBADMSG;
838 + /* Bail out if the request assoc len is 0 */
839 + if (!req->assoclen)
842 + init_completion(&ahash_op.completion);
844 + /* the hash transform to be executed comes from the original request */
845 + ahash_request_set_tfm(ahreq, ctx->auth);
846 + /* prepare the hash request with input data and result pointer */
847 + ahash_request_set_crypt(ahreq, src, hash, srclen);
848 + /* set the notifier for when the async hash function returns */
849 + ahash_request_set_callback(ahreq, aead_request_flags(req) & flags,
850 + tls_async_op_done, &ahash_op);
852 + /* Calculate the digest on the given data. The result is put in hash */
853 + err = crypto_ahash_digest(ahreq);
854 + if (err == -EINPROGRESS) {
855 + err = wait_for_completion_interruptible(&ahash_op.completion);
857 + err = ahash_op.err;
864 + * crypto_tls_gen_padicv - Calculate and pad hmac digest for a TLS record
865 + * @hash: (output) buffer to save the digest and padding into
866 + * @phashlen: (output) the size of digest + padding
867 + * @req: (input) aead request
869 +static int crypto_tls_gen_padicv(u8 *hash, unsigned int *phashlen,
870 + struct aead_request *req)
872 + struct crypto_aead *tls = crypto_aead_reqtfm(req);
873 + unsigned int hash_size = crypto_aead_authsize(tls);
874 + unsigned int block_size = crypto_aead_blocksize(tls);
875 + unsigned int srclen = req->cryptlen + hash_size;
876 + unsigned int icvlen = req->cryptlen + req->assoclen;
877 + unsigned int padlen;
880 + err = crypto_tls_genicv(hash, req->src, icvlen, req);
884 + /* add padding after digest */
885 + padlen = block_size - (srclen % block_size);
886 + memset(hash + hash_size, padlen - 1, padlen);
888 + *phashlen = hash_size + padlen;
893 +static int crypto_tls_copy_data(struct aead_request *req,
894 + struct scatterlist *src,
895 + struct scatterlist *dst,
898 + struct crypto_aead *tls = crypto_aead_reqtfm(req);
899 + struct crypto_tls_ctx *ctx = crypto_aead_ctx(tls);
900 + SKCIPHER_REQUEST_ON_STACK(skreq, ctx->null);
902 + skcipher_request_set_tfm(skreq, ctx->null);
903 + skcipher_request_set_callback(skreq, aead_request_flags(req),
905 + skcipher_request_set_crypt(skreq, src, dst, len, NULL);
907 + return crypto_skcipher_encrypt(skreq);
910 +static int crypto_tls_encrypt(struct aead_request *req)
912 + struct crypto_aead *tls = crypto_aead_reqtfm(req);
913 + struct crypto_tls_ctx *ctx = crypto_aead_ctx(tls);
914 + struct tls_request_ctx *treq_ctx = aead_request_ctx(req);
915 + struct skcipher_request *skreq;
916 + struct scatterlist *cipher = treq_ctx->cipher;
917 + struct scatterlist *tmp = treq_ctx->tmp;
918 + struct scatterlist *sg, *src, *dst;
919 + unsigned int cryptlen, phashlen;
920 + u8 *hash = treq_ctx->tail;
924 + * The hash result is saved at the beginning of the tls request ctx
925 + * and is aligned as required by the hash transform. Enough space was
926 + * allocated in crypto_tls_init_tfm to accommodate the difference. The
927 + * requests themselves start later at treq_ctx->tail + ctx->reqoff so
928 + * the result is not overwritten by the second (cipher) request.
930 + hash = (u8 *)ALIGN((unsigned long)hash +
931 + crypto_ahash_alignmask(ctx->auth),
932 + crypto_ahash_alignmask(ctx->auth) + 1);
935 + * STEP 1: create ICV together with necessary padding
937 + err = crypto_tls_gen_padicv(hash, &phashlen, req);
942 + * STEP 2: Hash and padding are combined with the payload
943 + * depending on the form it arrives. Scatter tables must have at least
944 + * one page of data before chaining with another table and can't have
945 + * an empty data page. The following code addresses these requirements.
947 + * If the payload is empty, only the hash is encrypted, otherwise the
948 + * payload scatterlist is merged with the hash. A special merging case
949 + * is when the payload has only one page of data. In that case the
950 + * payload page is moved to another scatterlist and prepared there for
953 + if (req->cryptlen) {
954 + src = scatterwalk_ffwd(tmp, req->src, req->assoclen);
956 + sg_init_table(cipher, 2);
957 + sg_set_buf(cipher + 1, hash, phashlen);
959 + if (sg_is_last(src)) {
960 + sg_set_page(cipher, sg_page(src), req->cryptlen,
964 + unsigned int rem_len = req->cryptlen;
966 + for (sg = src; rem_len > sg->length; sg = sg_next(sg))
967 + rem_len -= min(rem_len, sg->length);
969 + sg_set_page(cipher, sg_page(sg), rem_len, sg->offset);
970 + sg_chain(sg, 1, cipher);
973 + sg_init_one(cipher, hash, phashlen);
978 + * If src != dst copy the associated data from source to destination.
979 + * In both cases fast-forward passed the associated data in the dest.
981 + if (req->src != req->dst) {
982 + err = crypto_tls_copy_data(req, req->src, req->dst,
987 + dst = scatterwalk_ffwd(treq_ctx->dst, req->dst, req->assoclen);
990 + * STEP 3: encrypt the frame and return the result
992 + cryptlen = req->cryptlen + phashlen;
995 + * The hash and the cipher are applied at different times and their
996 + * requests can use the same memory space without interference
998 + skreq = (void *)(treq_ctx->tail + ctx->reqoff);
999 + skcipher_request_set_tfm(skreq, ctx->enc);
1000 + skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
1001 + skcipher_request_set_callback(skreq, aead_request_flags(req),
1002 + req->base.complete, req->base.data);
1004 + * Apply the cipher transform. The result will be in req->dst when the
1005 + * asynchronuous call terminates
1007 + err = crypto_skcipher_encrypt(skreq);
1012 +static int crypto_tls_decrypt(struct aead_request *req)
1014 + struct crypto_aead *tls = crypto_aead_reqtfm(req);
1015 + struct crypto_tls_ctx *ctx = crypto_aead_ctx(tls);
1016 + struct tls_request_ctx *treq_ctx = aead_request_ctx(req);
1017 + unsigned int cryptlen = req->cryptlen;
1018 + unsigned int hash_size = crypto_aead_authsize(tls);
1019 + unsigned int block_size = crypto_aead_blocksize(tls);
1020 + struct skcipher_request *skreq = (void *)(treq_ctx->tail + ctx->reqoff);
1021 + struct scatterlist *tmp = treq_ctx->tmp;
1022 + struct scatterlist *src, *dst;
1024 + u8 padding[255]; /* padding can be 0-255 bytes */
1027 + u8 *ihash, *hash = treq_ctx->tail;
1030 + int err = -EINVAL;
1032 + struct async_op ciph_op;
1035 + * Rule out bad packets. The input packet length must be at least one
1036 + * byte more than the hash_size
1038 + if (cryptlen <= hash_size || cryptlen % block_size)
1042 + * Step 1 - Decrypt the source. Fast-forward past the associated data
1043 + * to the encrypted data. The result will be overwritten in place so
1044 + * that the decrypted data will be adjacent to the associated data. The
1045 + * last step (computing the hash) will have it's input data already
1046 + * prepared and ready to be accessed at req->src.
1048 + src = scatterwalk_ffwd(tmp, req->src, req->assoclen);
1051 + init_completion(&ciph_op.completion);
1052 + skcipher_request_set_tfm(skreq, ctx->enc);
1053 + skcipher_request_set_callback(skreq, aead_request_flags(req),
1054 + tls_async_op_done, &ciph_op);
1055 + skcipher_request_set_crypt(skreq, src, dst, cryptlen, req->iv);
1056 + err = crypto_skcipher_decrypt(skreq);
1057 + if (err == -EINPROGRESS) {
1058 + err = wait_for_completion_interruptible(&ciph_op.completion);
1060 + err = ciph_op.err;
1066 + * Step 2 - Verify padding
1067 + * Retrieve the last byte of the payload; this is the padding size.
1070 + scatterwalk_map_and_copy(&pad_size, dst, cryptlen, 1, 0);
1072 + /* RFC recommendation for invalid padding size. */
1073 + if (cryptlen < pad_size + hash_size) {
1075 + paderr = -EBADMSG;
1077 + cryptlen -= pad_size;
1078 + scatterwalk_map_and_copy(padding, dst, cryptlen, pad_size, 0);
1080 + /* Padding content must be equal with pad_size. We verify it all */
1081 + for (i = 0; i < pad_size; i++)
1082 + if (padding[i] != pad_size)
1083 + paderr = -EBADMSG;
1086 + * Step 3 - Verify hash
1087 + * Align the digest result as required by the hash transform. Enough
1088 + * space was allocated in crypto_tls_init_tfm
1090 + hash = (u8 *)ALIGN((unsigned long)hash +
1091 + crypto_ahash_alignmask(ctx->auth),
1092 + crypto_ahash_alignmask(ctx->auth) + 1);
1094 + * Two bytes at the end of the associated data make the length field.
1095 + * It must be updated with the length of the cleartext message before
1096 + * the hash is calculated.
1098 + len_field = sg_virt(req->src) + req->assoclen - 2;
1099 + cryptlen -= hash_size;
1100 + *len_field = htons(cryptlen);
1102 + /* This is the hash from the decrypted packet. Save it for later */
1103 + ihash = hash + hash_size;
1104 + scatterwalk_map_and_copy(ihash, dst, cryptlen, hash_size, 0);
1106 + /* Now compute and compare our ICV with the one from the packet */
1107 + err = crypto_tls_genicv(hash, req->src, cryptlen + req->assoclen, req);
1109 + err = memcmp(hash, ihash, hash_size) ? -EBADMSG : 0;
1111 + if (req->src != req->dst) {
1112 + err = crypto_tls_copy_data(req, req->src, req->dst, cryptlen +
1118 + /* return the first found error */
1123 + aead_request_complete(req, err);
1127 +static int crypto_tls_init_tfm(struct crypto_aead *tfm)
1129 + struct aead_instance *inst = aead_alg_instance(tfm);
1130 + struct tls_instance_ctx *ictx = aead_instance_ctx(inst);
1131 + struct crypto_tls_ctx *ctx = crypto_aead_ctx(tfm);
1132 + struct crypto_ahash *auth;
1133 + struct crypto_skcipher *enc;
1134 + struct crypto_skcipher *null;
1137 + auth = crypto_spawn_ahash(&ictx->auth);
1139 + return PTR_ERR(auth);
1141 + enc = crypto_spawn_skcipher(&ictx->enc);
1142 + err = PTR_ERR(enc);
1144 + goto err_free_ahash;
1146 + null = crypto_get_default_null_skcipher2();
1147 + err = PTR_ERR(null);
1149 + goto err_free_skcipher;
1156 + * Allow enough space for two digests. The two digests will be compared
1157 + * during the decryption phase. One will come from the decrypted packet
1158 + * and the other will be calculated. For encryption, one digest is
1159 + * padded (up to a cipher blocksize) and chained with the payload
1161 + ctx->reqoff = ALIGN(crypto_ahash_digestsize(auth) +
1162 + crypto_ahash_alignmask(auth),
1163 + crypto_ahash_alignmask(auth) + 1) +
1164 + max(crypto_ahash_digestsize(auth),
1165 + crypto_skcipher_blocksize(enc));
1167 + crypto_aead_set_reqsize(tfm,
1168 + sizeof(struct tls_request_ctx) +
1170 + max_t(unsigned int,
1171 + crypto_ahash_reqsize(auth) +
1172 + sizeof(struct ahash_request),
1173 + crypto_skcipher_reqsize(enc) +
1174 + sizeof(struct skcipher_request)));
1179 + crypto_free_skcipher(enc);
1181 + crypto_free_ahash(auth);
1185 +static void crypto_tls_exit_tfm(struct crypto_aead *tfm)
1187 + struct crypto_tls_ctx *ctx = crypto_aead_ctx(tfm);
1189 + crypto_free_ahash(ctx->auth);
1190 + crypto_free_skcipher(ctx->enc);
1191 + crypto_put_default_null_skcipher2();
1194 +static void crypto_tls_free(struct aead_instance *inst)
1196 + struct tls_instance_ctx *ctx = aead_instance_ctx(inst);
1198 + crypto_drop_skcipher(&ctx->enc);
1199 + crypto_drop_ahash(&ctx->auth);
1203 +static int crypto_tls_create(struct crypto_template *tmpl, struct rtattr **tb)
1205 + struct crypto_attr_type *algt;
1206 + struct aead_instance *inst;
1207 + struct hash_alg_common *auth;
1208 + struct crypto_alg *auth_base;
1209 + struct skcipher_alg *enc;
1210 + struct tls_instance_ctx *ctx;
1211 + const char *enc_name;
1214 + algt = crypto_get_attr_type(tb);
1216 + return PTR_ERR(algt);
1218 + if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask)
1221 + auth = ahash_attr_alg(tb[1], CRYPTO_ALG_TYPE_HASH,
1222 + CRYPTO_ALG_TYPE_AHASH_MASK |
1223 + crypto_requires_sync(algt->type, algt->mask));
1225 + return PTR_ERR(auth);
1227 + auth_base = &auth->base;
1229 + enc_name = crypto_attr_alg_name(tb[2]);
1230 + err = PTR_ERR(enc_name);
1231 + if (IS_ERR(enc_name))
1232 + goto out_put_auth;
1234 + inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL);
1237 + goto out_put_auth;
1239 + ctx = aead_instance_ctx(inst);
1241 + err = crypto_init_ahash_spawn(&ctx->auth, auth,
1242 + aead_crypto_instance(inst));
1244 + goto err_free_inst;
1246 + crypto_set_skcipher_spawn(&ctx->enc, aead_crypto_instance(inst));
1247 + err = crypto_grab_skcipher(&ctx->enc, enc_name, 0,
1248 + crypto_requires_sync(algt->type,
1251 + goto err_drop_auth;
1253 + enc = crypto_spawn_skcipher_alg(&ctx->enc);
1255 + err = -ENAMETOOLONG;
1256 + if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
1257 + "tls10(%s,%s)", auth_base->cra_name,
1258 + enc->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
1259 + goto err_drop_enc;
1261 + if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
1262 + "tls10(%s,%s)", auth_base->cra_driver_name,
1263 + enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
1264 + goto err_drop_enc;
1266 + inst->alg.base.cra_flags = (auth_base->cra_flags |
1267 + enc->base.cra_flags) & CRYPTO_ALG_ASYNC;
1268 + inst->alg.base.cra_priority = enc->base.cra_priority * 10 +
1269 + auth_base->cra_priority;
1270 + inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
1271 + inst->alg.base.cra_alignmask = auth_base->cra_alignmask |
1272 + enc->base.cra_alignmask;
1273 + inst->alg.base.cra_ctxsize = sizeof(struct crypto_tls_ctx);
1275 + inst->alg.ivsize = crypto_skcipher_alg_ivsize(enc);
1276 + inst->alg.chunksize = crypto_skcipher_alg_chunksize(enc);
1277 + inst->alg.maxauthsize = auth->digestsize;
1279 + inst->alg.init = crypto_tls_init_tfm;
1280 + inst->alg.exit = crypto_tls_exit_tfm;
1282 + inst->alg.setkey = crypto_tls_setkey;
1283 + inst->alg.encrypt = crypto_tls_encrypt;
1284 + inst->alg.decrypt = crypto_tls_decrypt;
1286 + inst->free = crypto_tls_free;
1288 + err = aead_register_instance(tmpl, inst);
1290 + goto err_drop_enc;
1293 + crypto_mod_put(auth_base);
1297 + crypto_drop_skcipher(&ctx->enc);
1299 + crypto_drop_ahash(&ctx->auth);
1306 +static struct crypto_template crypto_tls_tmpl = {
1308 + .create = crypto_tls_create,
1309 + .module = THIS_MODULE,
1312 +static int __init crypto_tls_module_init(void)
1314 + return crypto_register_template(&crypto_tls_tmpl);
1317 +static void __exit crypto_tls_module_exit(void)
1319 + crypto_unregister_template(&crypto_tls_tmpl);
1322 +module_init(crypto_tls_module_init);
1323 +module_exit(crypto_tls_module_exit);
1325 +MODULE_LICENSE("GPL");
1326 +MODULE_DESCRIPTION("TLS 1.0 record encryption");
1327 --- a/drivers/crypto/Makefile
1328 +++ b/drivers/crypto/Makefile
1329 @@ -10,7 +10,7 @@ obj-$(CONFIG_CRYPTO_DEV_CHELSIO) += chel
1330 obj-$(CONFIG_CRYPTO_DEV_CPT) += cavium/cpt/
1331 obj-$(CONFIG_CRYPTO_DEV_NITROX) += cavium/nitrox/
1332 obj-$(CONFIG_CRYPTO_DEV_EXYNOS_RNG) += exynos-rng.o
1333 -obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM) += caam/
1334 +obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM_COMMON) += caam/
1335 obj-$(CONFIG_CRYPTO_DEV_GEODE) += geode-aes.o
1336 obj-$(CONFIG_CRYPTO_DEV_HIFN_795X) += hifn_795x.o
1337 obj-$(CONFIG_CRYPTO_DEV_IMGTEC_HASH) += img-hash.o
1338 --- a/drivers/crypto/caam/Kconfig
1339 +++ b/drivers/crypto/caam/Kconfig
1341 +config CRYPTO_DEV_FSL_CAAM_COMMON
1344 +config CRYPTO_DEV_FSL_CAAM_CRYPTO_API_DESC
1347 +config CRYPTO_DEV_FSL_CAAM_AHASH_API_DESC
1350 config CRYPTO_DEV_FSL_CAAM
1351 - tristate "Freescale CAAM-Multicore driver backend"
1352 + tristate "Freescale CAAM-Multicore platform driver backend"
1353 depends on FSL_SOC || ARCH_MXC || ARCH_LAYERSCAPE
1355 + select CRYPTO_DEV_FSL_CAAM_COMMON
1357 Enables the driver module for Freescale's Cryptographic Accelerator
1358 and Assurance Module (CAAM), also known as the SEC version 4 (SEC4).
1359 @@ -12,9 +22,16 @@ config CRYPTO_DEV_FSL_CAAM
1360 To compile this driver as a module, choose M here: the module
1361 will be called caam.
1363 +if CRYPTO_DEV_FSL_CAAM
1365 +config CRYPTO_DEV_FSL_CAAM_DEBUG
1366 + bool "Enable debug output in CAAM driver"
1368 + Selecting this will enable printing of various debug
1369 + information in the CAAM driver.
1371 config CRYPTO_DEV_FSL_CAAM_JR
1372 tristate "Freescale CAAM Job Ring driver backend"
1373 - depends on CRYPTO_DEV_FSL_CAAM
1376 Enables the driver module for Job Rings which are part of
1377 @@ -25,9 +42,10 @@ config CRYPTO_DEV_FSL_CAAM_JR
1378 To compile this driver as a module, choose M here: the module
1379 will be called caam_jr.
1381 +if CRYPTO_DEV_FSL_CAAM_JR
1383 config CRYPTO_DEV_FSL_CAAM_RINGSIZE
1385 - depends on CRYPTO_DEV_FSL_CAAM_JR
1389 @@ -45,7 +63,6 @@ config CRYPTO_DEV_FSL_CAAM_RINGSIZE
1391 config CRYPTO_DEV_FSL_CAAM_INTC
1392 bool "Job Ring interrupt coalescing"
1393 - depends on CRYPTO_DEV_FSL_CAAM_JR
1395 Enable the Job Ring's interrupt coalescing feature.
1397 @@ -74,9 +91,9 @@ config CRYPTO_DEV_FSL_CAAM_INTC_TIME_THL
1398 threshold. Range is 1-65535.
1400 config CRYPTO_DEV_FSL_CAAM_CRYPTO_API
1401 - tristate "Register algorithm implementations with the Crypto API"
1402 - depends on CRYPTO_DEV_FSL_CAAM_JR
1403 + bool "Register algorithm implementations with the Crypto API"
1405 + select CRYPTO_DEV_FSL_CAAM_CRYPTO_API_DESC
1407 select CRYPTO_AUTHENC
1408 select CRYPTO_BLKCIPHER
1409 @@ -85,13 +102,11 @@ config CRYPTO_DEV_FSL_CAAM_CRYPTO_API
1410 scatterlist crypto API (such as the linux native IPSec
1411 stack) to the SEC4 via job ring.
1413 - To compile this as a module, choose M here: the module
1414 - will be called caamalg.
1416 config CRYPTO_DEV_FSL_CAAM_CRYPTO_API_QI
1417 - tristate "Queue Interface as Crypto API backend"
1418 - depends on CRYPTO_DEV_FSL_CAAM_JR && FSL_DPAA && NET
1419 + bool "Queue Interface as Crypto API backend"
1420 + depends on FSL_SDK_DPA && NET
1422 + select CRYPTO_DEV_FSL_CAAM_CRYPTO_API_DESC
1423 select CRYPTO_AUTHENC
1424 select CRYPTO_BLKCIPHER
1426 @@ -102,36 +117,26 @@ config CRYPTO_DEV_FSL_CAAM_CRYPTO_API_QI
1427 assigned to the kernel should also be more than the number of
1430 - To compile this as a module, choose M here: the module
1431 - will be called caamalg_qi.
1433 config CRYPTO_DEV_FSL_CAAM_AHASH_API
1434 - tristate "Register hash algorithm implementations with Crypto API"
1435 - depends on CRYPTO_DEV_FSL_CAAM_JR
1436 + bool "Register hash algorithm implementations with Crypto API"
1438 + select CRYPTO_DEV_FSL_CAAM_AHASH_API_DESC
1441 Selecting this will offload ahash for users of the
1442 scatterlist crypto API to the SEC4 via job ring.
1444 - To compile this as a module, choose M here: the module
1445 - will be called caamhash.
1447 config CRYPTO_DEV_FSL_CAAM_PKC_API
1448 - tristate "Register public key cryptography implementations with Crypto API"
1449 - depends on CRYPTO_DEV_FSL_CAAM_JR
1450 + bool "Register public key cryptography implementations with Crypto API"
1454 Selecting this will allow SEC Public key support for RSA.
1455 Supported cryptographic primitives: encryption, decryption,
1456 signature and verification.
1457 - To compile this as a module, choose M here: the module
1458 - will be called caam_pkc.
1460 config CRYPTO_DEV_FSL_CAAM_RNG_API
1461 - tristate "Register caam device for hwrng API"
1462 - depends on CRYPTO_DEV_FSL_CAAM_JR
1463 + bool "Register caam device for hwrng API"
1467 @@ -139,16 +144,24 @@ config CRYPTO_DEV_FSL_CAAM_RNG_API
1468 Selecting this will register the SEC4 hardware rng to
1469 the hw_random API for suppying the kernel entropy pool.
1471 - To compile this as a module, choose M here: the module
1472 - will be called caamrng.
1473 +endif # CRYPTO_DEV_FSL_CAAM_JR
1475 -config CRYPTO_DEV_FSL_CAAM_DEBUG
1476 - bool "Enable debug output in CAAM driver"
1477 - depends on CRYPTO_DEV_FSL_CAAM
1479 - Selecting this will enable printing of various debug
1480 - information in the CAAM driver.
1481 +endif # CRYPTO_DEV_FSL_CAAM
1483 -config CRYPTO_DEV_FSL_CAAM_CRYPTO_API_DESC
1484 - def_tristate (CRYPTO_DEV_FSL_CAAM_CRYPTO_API || \
1485 - CRYPTO_DEV_FSL_CAAM_CRYPTO_API_QI)
1486 +config CRYPTO_DEV_FSL_DPAA2_CAAM
1487 + tristate "QorIQ DPAA2 CAAM (DPSECI) driver"
1488 + depends on FSL_MC_DPIO
1489 + select CRYPTO_DEV_FSL_CAAM_COMMON
1490 + select CRYPTO_DEV_FSL_CAAM_CRYPTO_API_DESC
1491 + select CRYPTO_DEV_FSL_CAAM_AHASH_API_DESC
1492 + select CRYPTO_BLKCIPHER
1493 + select CRYPTO_AUTHENC
1494 + select CRYPTO_AEAD
1495 + select CRYPTO_HASH
1497 + CAAM driver for QorIQ Data Path Acceleration Architecture 2.
1498 + It handles DPSECI DPAA2 objects that sit on the Management Complex
1501 + To compile this as a module, choose M here: the module
1502 + will be called dpaa2_caam.
1503 --- a/drivers/crypto/caam/Makefile
1504 +++ b/drivers/crypto/caam/Makefile
1505 @@ -6,19 +6,27 @@ ifeq ($(CONFIG_CRYPTO_DEV_FSL_CAAM_DEBUG
1506 ccflags-y := -DDEBUG
1509 +ccflags-y += -DVERSION=\"\"
1511 +obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM_COMMON) += error.o
1512 obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM) += caam.o
1513 obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM_JR) += caam_jr.o
1514 -obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM_CRYPTO_API) += caamalg.o
1515 -obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM_CRYPTO_API_QI) += caamalg_qi.o
1516 obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM_CRYPTO_API_DESC) += caamalg_desc.o
1517 -obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM_AHASH_API) += caamhash.o
1518 -obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM_RNG_API) += caamrng.o
1519 -obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM_PKC_API) += caam_pkc.o
1521 -caam-objs := ctrl.o
1522 -caam_jr-objs := jr.o key_gen.o error.o
1523 -caam_pkc-y := caampkc.o pkc_desc.o
1524 +obj-$(CONFIG_CRYPTO_DEV_FSL_CAAM_AHASH_API_DESC) += caamhash_desc.o
1527 +caam_jr-y := jr.o key_gen.o
1528 +caam_jr-$(CONFIG_CRYPTO_DEV_FSL_CAAM_CRYPTO_API) += caamalg.o
1529 +caam_jr-$(CONFIG_CRYPTO_DEV_FSL_CAAM_CRYPTO_API_QI) += caamalg_qi.o
1530 +caam_jr-$(CONFIG_CRYPTO_DEV_FSL_CAAM_AHASH_API) += caamhash.o
1531 +caam_jr-$(CONFIG_CRYPTO_DEV_FSL_CAAM_RNG_API) += caamrng.o
1532 +caam_jr-$(CONFIG_CRYPTO_DEV_FSL_CAAM_PKC_API) += caampkc.o pkc_desc.o
1534 +caam-$(CONFIG_CRYPTO_DEV_FSL_CAAM_CRYPTO_API_QI) += qi.o
1535 ifneq ($(CONFIG_CRYPTO_DEV_FSL_CAAM_CRYPTO_API_QI),)
1536 ccflags-y += -DCONFIG_CAAM_QI
1540 +obj-$(CONFIG_CRYPTO_DEV_FSL_DPAA2_CAAM) += dpaa2_caam.o
1542 +dpaa2_caam-y := caamalg_qi2.o dpseci.o
1543 --- a/drivers/crypto/caam/caamalg.c
1544 +++ b/drivers/crypto/caam/caamalg.c
1546 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
1549 +#define CHACHAPOLY_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
1551 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
1552 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
1554 @@ -108,6 +110,7 @@ struct caam_ctx {
1555 dma_addr_t sh_desc_dec_dma;
1556 dma_addr_t sh_desc_givenc_dma;
1558 + enum dma_data_direction dir;
1559 struct device *jrdev;
1560 struct alginfo adata;
1561 struct alginfo cdata;
1562 @@ -118,6 +121,7 @@ static int aead_null_set_sh_desc(struct
1564 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1565 struct device *jrdev = ctx->jrdev;
1566 + struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
1568 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
1569 ctx->adata.keylen_pad;
1570 @@ -136,9 +140,10 @@ static int aead_null_set_sh_desc(struct
1572 /* aead_encrypt shared descriptor */
1573 desc = ctx->sh_desc_enc;
1574 - cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
1575 + cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
1577 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
1578 - desc_bytes(desc), DMA_TO_DEVICE);
1579 + desc_bytes(desc), ctx->dir);
1582 * Job Descriptor and Shared Descriptors
1583 @@ -154,9 +159,10 @@ static int aead_null_set_sh_desc(struct
1585 /* aead_decrypt shared descriptor */
1586 desc = ctx->sh_desc_dec;
1587 - cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
1588 + cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
1590 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
1591 - desc_bytes(desc), DMA_TO_DEVICE);
1592 + desc_bytes(desc), ctx->dir);
1596 @@ -168,6 +174,7 @@ static int aead_set_sh_desc(struct crypt
1597 unsigned int ivsize = crypto_aead_ivsize(aead);
1598 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1599 struct device *jrdev = ctx->jrdev;
1600 + struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
1601 u32 ctx1_iv_off = 0;
1602 u32 *desc, *nonce = NULL;
1604 @@ -234,9 +241,9 @@ static int aead_set_sh_desc(struct crypt
1605 desc = ctx->sh_desc_enc;
1606 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
1607 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
1609 + false, ctrlpriv->era);
1610 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
1611 - desc_bytes(desc), DMA_TO_DEVICE);
1612 + desc_bytes(desc), ctx->dir);
1616 @@ -266,9 +273,9 @@ skip_enc:
1617 desc = ctx->sh_desc_dec;
1618 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
1619 ctx->authsize, alg->caam.geniv, is_rfc3686,
1620 - nonce, ctx1_iv_off, false);
1621 + nonce, ctx1_iv_off, false, ctrlpriv->era);
1622 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
1623 - desc_bytes(desc), DMA_TO_DEVICE);
1624 + desc_bytes(desc), ctx->dir);
1626 if (!alg->caam.geniv)
1628 @@ -300,9 +307,9 @@ skip_enc:
1629 desc = ctx->sh_desc_enc;
1630 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
1631 ctx->authsize, is_rfc3686, nonce,
1632 - ctx1_iv_off, false);
1633 + ctx1_iv_off, false, ctrlpriv->era);
1634 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
1635 - desc_bytes(desc), DMA_TO_DEVICE);
1636 + desc_bytes(desc), ctx->dir);
1640 @@ -323,6 +330,7 @@ static int gcm_set_sh_desc(struct crypto
1642 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1643 struct device *jrdev = ctx->jrdev;
1644 + unsigned int ivsize = crypto_aead_ivsize(aead);
1646 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
1648 @@ -344,9 +352,9 @@ static int gcm_set_sh_desc(struct crypto
1651 desc = ctx->sh_desc_enc;
1652 - cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
1653 + cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
1654 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
1655 - desc_bytes(desc), DMA_TO_DEVICE);
1656 + desc_bytes(desc), ctx->dir);
1659 * Job Descriptor and Shared Descriptors
1660 @@ -361,9 +369,9 @@ static int gcm_set_sh_desc(struct crypto
1663 desc = ctx->sh_desc_dec;
1664 - cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
1665 + cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
1666 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
1667 - desc_bytes(desc), DMA_TO_DEVICE);
1668 + desc_bytes(desc), ctx->dir);
1672 @@ -382,6 +390,7 @@ static int rfc4106_set_sh_desc(struct cr
1674 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1675 struct device *jrdev = ctx->jrdev;
1676 + unsigned int ivsize = crypto_aead_ivsize(aead);
1678 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
1680 @@ -403,9 +412,10 @@ static int rfc4106_set_sh_desc(struct cr
1683 desc = ctx->sh_desc_enc;
1684 - cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
1685 + cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
1687 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
1688 - desc_bytes(desc), DMA_TO_DEVICE);
1689 + desc_bytes(desc), ctx->dir);
1692 * Job Descriptor and Shared Descriptors
1693 @@ -420,9 +430,10 @@ static int rfc4106_set_sh_desc(struct cr
1696 desc = ctx->sh_desc_dec;
1697 - cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
1698 + cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
1700 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
1701 - desc_bytes(desc), DMA_TO_DEVICE);
1702 + desc_bytes(desc), ctx->dir);
1706 @@ -442,6 +453,7 @@ static int rfc4543_set_sh_desc(struct cr
1708 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1709 struct device *jrdev = ctx->jrdev;
1710 + unsigned int ivsize = crypto_aead_ivsize(aead);
1712 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
1714 @@ -463,9 +475,10 @@ static int rfc4543_set_sh_desc(struct cr
1717 desc = ctx->sh_desc_enc;
1718 - cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
1719 + cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
1721 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
1722 - desc_bytes(desc), DMA_TO_DEVICE);
1723 + desc_bytes(desc), ctx->dir);
1726 * Job Descriptor and Shared Descriptors
1727 @@ -480,9 +493,10 @@ static int rfc4543_set_sh_desc(struct cr
1730 desc = ctx->sh_desc_dec;
1731 - cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
1732 + cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
1734 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
1735 - desc_bytes(desc), DMA_TO_DEVICE);
1736 + desc_bytes(desc), ctx->dir);
1740 @@ -498,11 +512,67 @@ static int rfc4543_setauthsize(struct cr
1744 +static int chachapoly_set_sh_desc(struct crypto_aead *aead)
1746 + struct caam_ctx *ctx = crypto_aead_ctx(aead);
1747 + struct device *jrdev = ctx->jrdev;
1748 + unsigned int ivsize = crypto_aead_ivsize(aead);
1751 + if (!ctx->cdata.keylen || !ctx->authsize)
1754 + desc = ctx->sh_desc_enc;
1755 + cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
1756 + ctx->authsize, true, false);
1757 + dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
1758 + desc_bytes(desc), ctx->dir);
1760 + desc = ctx->sh_desc_dec;
1761 + cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
1762 + ctx->authsize, false, false);
1763 + dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
1764 + desc_bytes(desc), ctx->dir);
1769 +static int chachapoly_setauthsize(struct crypto_aead *aead,
1770 + unsigned int authsize)
1772 + struct caam_ctx *ctx = crypto_aead_ctx(aead);
1774 + if (authsize != POLY1305_DIGEST_SIZE)
1777 + ctx->authsize = authsize;
1778 + return chachapoly_set_sh_desc(aead);
1781 +static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
1782 + unsigned int keylen)
1784 + struct caam_ctx *ctx = crypto_aead_ctx(aead);
1785 + unsigned int ivsize = crypto_aead_ivsize(aead);
1786 + unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
1788 + if (keylen != CHACHA20_KEY_SIZE + saltlen) {
1789 + crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
1793 + ctx->cdata.key_virt = key;
1794 + ctx->cdata.keylen = keylen - saltlen;
1796 + return chachapoly_set_sh_desc(aead);
1799 static int aead_setkey(struct crypto_aead *aead,
1800 const u8 *key, unsigned int keylen)
1802 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1803 struct device *jrdev = ctx->jrdev;
1804 + struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
1805 struct crypto_authenc_keys keys;
1808 @@ -517,6 +587,27 @@ static int aead_setkey(struct crypto_aea
1809 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1813 + * If DKP is supported, use it in the shared descriptor to generate
1816 + if (ctrlpriv->era >= 6) {
1817 + ctx->adata.keylen = keys.authkeylen;
1818 + ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
1819 + OP_ALG_ALGSEL_MASK);
1821 + if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
1824 + memcpy(ctx->key, keys.authkey, keys.authkeylen);
1825 + memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
1827 + dma_sync_single_for_device(jrdev, ctx->key_dma,
1828 + ctx->adata.keylen_pad +
1829 + keys.enckeylen, ctx->dir);
1830 + goto skip_split_key;
1833 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
1834 keys.authkeylen, CAAM_MAX_KEY_SIZE -
1836 @@ -527,12 +618,14 @@ static int aead_setkey(struct crypto_aea
1837 /* postpend encryption key to auth split key */
1838 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
1839 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
1840 - keys.enckeylen, DMA_TO_DEVICE);
1841 + keys.enckeylen, ctx->dir);
1843 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
1844 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
1845 ctx->adata.keylen_pad + keys.enckeylen, 1);
1849 ctx->cdata.keylen = keys.enckeylen;
1850 return aead_set_sh_desc(aead);
1852 @@ -552,7 +645,7 @@ static int gcm_setkey(struct crypto_aead
1855 memcpy(ctx->key, key, keylen);
1856 - dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
1857 + dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
1858 ctx->cdata.keylen = keylen;
1860 return gcm_set_sh_desc(aead);
1861 @@ -580,7 +673,7 @@ static int rfc4106_setkey(struct crypto_
1863 ctx->cdata.keylen = keylen - 4;
1864 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
1867 return rfc4106_set_sh_desc(aead);
1870 @@ -606,7 +699,7 @@ static int rfc4543_setkey(struct crypto_
1872 ctx->cdata.keylen = keylen - 4;
1873 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
1876 return rfc4543_set_sh_desc(aead);
1879 @@ -658,21 +751,21 @@ static int ablkcipher_setkey(struct cryp
1880 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
1882 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
1883 - desc_bytes(desc), DMA_TO_DEVICE);
1884 + desc_bytes(desc), ctx->dir);
1886 /* ablkcipher_decrypt shared descriptor */
1887 desc = ctx->sh_desc_dec;
1888 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
1890 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
1891 - desc_bytes(desc), DMA_TO_DEVICE);
1892 + desc_bytes(desc), ctx->dir);
1894 /* ablkcipher_givencrypt shared descriptor */
1895 desc = ctx->sh_desc_givenc;
1896 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
1898 dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
1899 - desc_bytes(desc), DMA_TO_DEVICE);
1900 + desc_bytes(desc), ctx->dir);
1904 @@ -701,13 +794,13 @@ static int xts_ablkcipher_setkey(struct
1905 desc = ctx->sh_desc_enc;
1906 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
1907 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
1908 - desc_bytes(desc), DMA_TO_DEVICE);
1909 + desc_bytes(desc), ctx->dir);
1911 /* xts_ablkcipher_decrypt shared descriptor */
1912 desc = ctx->sh_desc_dec;
1913 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
1914 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
1915 - desc_bytes(desc), DMA_TO_DEVICE);
1916 + desc_bytes(desc), ctx->dir);
1920 @@ -987,9 +1080,6 @@ static void init_aead_job(struct aead_re
1921 append_seq_out_ptr(desc, dst_dma,
1922 req->assoclen + req->cryptlen - authsize,
1925 - /* REG3 = assoclen */
1926 - append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1929 static void init_gcm_job(struct aead_request *req,
1930 @@ -1004,6 +1094,7 @@ static void init_gcm_job(struct aead_req
1933 init_aead_job(req, edesc, all_contig, encrypt);
1934 + append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1936 /* BUG This should not be specific to generic GCM. */
1938 @@ -1021,6 +1112,40 @@ static void init_gcm_job(struct aead_req
1939 /* End of blank commands */
1942 +static void init_chachapoly_job(struct aead_request *req,
1943 + struct aead_edesc *edesc, bool all_contig,
1946 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
1947 + unsigned int ivsize = crypto_aead_ivsize(aead);
1948 + unsigned int assoclen = req->assoclen;
1949 + u32 *desc = edesc->hw_desc;
1950 + u32 ctx_iv_off = 4;
1952 + init_aead_job(req, edesc, all_contig, encrypt);
1954 + if (ivsize != CHACHAPOLY_IV_SIZE) {
1955 + /* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1959 + * The associated data comes already with the IV but we need
1960 + * to skip it when we authenticate or encrypt...
1962 + assoclen -= ivsize;
1965 + append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1968 + * For IPsec load the IV further in the same register.
1969 + * For RFC7539 simply load the 12 bytes nonce in a single operation
1971 + append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1972 + LDST_SRCDST_BYTE_CONTEXT |
1973 + ctx_iv_off << LDST_OFFSET_SHIFT);
1976 static void init_authenc_job(struct aead_request *req,
1977 struct aead_edesc *edesc,
1978 bool all_contig, bool encrypt)
1979 @@ -1030,6 +1155,7 @@ static void init_authenc_job(struct aead
1980 struct caam_aead_alg, aead);
1981 unsigned int ivsize = crypto_aead_ivsize(aead);
1982 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1983 + struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1984 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1985 OP_ALG_AAI_CTR_MOD128);
1986 const bool is_rfc3686 = alg->caam.rfc3686;
1987 @@ -1053,6 +1179,15 @@ static void init_authenc_job(struct aead
1989 init_aead_job(req, edesc, all_contig, encrypt);
1992 + * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1993 + * having DPOVRD as destination.
1995 + if (ctrlpriv->era < 3)
1996 + append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1998 + append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
2000 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
2001 append_load_as_imm(desc, req->iv, ivsize,
2003 @@ -1225,8 +1360,16 @@ static struct aead_edesc *aead_edesc_all
2008 + * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
2009 + * the end of the table by allocating more S/G entries.
2011 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
2012 - sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
2013 + if (mapped_dst_nents > 1)
2014 + sec4_sg_len += ALIGN(mapped_dst_nents, 4);
2016 + sec4_sg_len = ALIGN(sec4_sg_len, 4);
2018 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
2020 /* allocate space for base edesc and hw desc commands, link tables */
2021 @@ -1307,6 +1450,72 @@ static int gcm_encrypt(struct aead_reque
2025 +static int chachapoly_encrypt(struct aead_request *req)
2027 + struct aead_edesc *edesc;
2028 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
2029 + struct caam_ctx *ctx = crypto_aead_ctx(aead);
2030 + struct device *jrdev = ctx->jrdev;
2035 + edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
2037 + if (IS_ERR(edesc))
2038 + return PTR_ERR(edesc);
2040 + desc = edesc->hw_desc;
2042 + init_chachapoly_job(req, edesc, all_contig, true);
2043 + print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
2044 + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
2047 + ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2049 + ret = -EINPROGRESS;
2051 + aead_unmap(jrdev, edesc, req);
2058 +static int chachapoly_decrypt(struct aead_request *req)
2060 + struct aead_edesc *edesc;
2061 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
2062 + struct caam_ctx *ctx = crypto_aead_ctx(aead);
2063 + struct device *jrdev = ctx->jrdev;
2068 + edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
2070 + if (IS_ERR(edesc))
2071 + return PTR_ERR(edesc);
2073 + desc = edesc->hw_desc;
2075 + init_chachapoly_job(req, edesc, all_contig, false);
2076 + print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
2077 + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
2080 + ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2082 + ret = -EINPROGRESS;
2084 + aead_unmap(jrdev, edesc, req);
2091 static int ipsec_gcm_encrypt(struct aead_request *req)
2093 if (req->assoclen < 8)
2094 @@ -1494,7 +1703,25 @@ static struct ablkcipher_edesc *ablkciph
2096 sec4_sg_ents = 1 + mapped_src_nents;
2097 dst_sg_idx = sec4_sg_ents;
2098 - sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
2101 + * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
2102 + * the end of the table by allocating more S/G entries. Logic:
2103 + * if (src != dst && output S/G)
2104 + * pad output S/G, if needed
2105 + * else if (src == dst && S/G)
2106 + * overlapping S/Gs; pad one of them
2107 + * else if (input S/G) ...
2108 + * pad input S/G, if needed
2110 + if (mapped_dst_nents > 1)
2111 + sec4_sg_ents += ALIGN(mapped_dst_nents, 4);
2112 + else if ((req->src == req->dst) && (mapped_src_nents > 1))
2113 + sec4_sg_ents = max(ALIGN(sec4_sg_ents, 4),
2114 + 1 + ALIGN(mapped_src_nents, 4));
2116 + sec4_sg_ents = ALIGN(sec4_sg_ents, 4);
2118 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
2121 @@ -3196,6 +3423,50 @@ static struct caam_aead_alg driver_aeads
2128 + .cra_name = "rfc7539(chacha20,poly1305)",
2129 + .cra_driver_name = "rfc7539-chacha20-poly1305-"
2131 + .cra_blocksize = 1,
2133 + .setkey = chachapoly_setkey,
2134 + .setauthsize = chachapoly_setauthsize,
2135 + .encrypt = chachapoly_encrypt,
2136 + .decrypt = chachapoly_decrypt,
2137 + .ivsize = CHACHAPOLY_IV_SIZE,
2138 + .maxauthsize = POLY1305_DIGEST_SIZE,
2141 + .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
2143 + .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
2150 + .cra_name = "rfc7539esp(chacha20,poly1305)",
2151 + .cra_driver_name = "rfc7539esp-chacha20-"
2153 + .cra_blocksize = 1,
2155 + .setkey = chachapoly_setkey,
2156 + .setauthsize = chachapoly_setauthsize,
2157 + .encrypt = chachapoly_encrypt,
2158 + .decrypt = chachapoly_decrypt,
2160 + .maxauthsize = POLY1305_DIGEST_SIZE,
2163 + .class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
2165 + .class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
2171 struct caam_crypto_alg {
2172 @@ -3204,9 +3475,11 @@ struct caam_crypto_alg {
2173 struct caam_alg_entry caam;
2176 -static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
2177 +static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
2180 dma_addr_t dma_addr;
2181 + struct caam_drv_private *priv;
2183 ctx->jrdev = caam_jr_alloc();
2184 if (IS_ERR(ctx->jrdev)) {
2185 @@ -3214,10 +3487,16 @@ static int caam_init_common(struct caam_
2186 return PTR_ERR(ctx->jrdev);
2189 + priv = dev_get_drvdata(ctx->jrdev->parent);
2190 + if (priv->era >= 6 && uses_dkp)
2191 + ctx->dir = DMA_BIDIRECTIONAL;
2193 + ctx->dir = DMA_TO_DEVICE;
2195 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
2196 offsetof(struct caam_ctx,
2198 - DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
2199 + ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
2200 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
2201 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
2202 caam_jr_free(ctx->jrdev);
2203 @@ -3245,7 +3524,7 @@ static int caam_cra_init(struct crypto_t
2204 container_of(alg, struct caam_crypto_alg, crypto_alg);
2205 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
2207 - return caam_init_common(ctx, &caam_alg->caam);
2208 + return caam_init_common(ctx, &caam_alg->caam, false);
2211 static int caam_aead_init(struct crypto_aead *tfm)
2212 @@ -3255,14 +3534,15 @@ static int caam_aead_init(struct crypto_
2213 container_of(alg, struct caam_aead_alg, aead);
2214 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
2216 - return caam_init_common(ctx, &caam_alg->caam);
2217 + return caam_init_common(ctx, &caam_alg->caam,
2218 + alg->setkey == aead_setkey);
2221 static void caam_exit_common(struct caam_ctx *ctx)
2223 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
2224 offsetof(struct caam_ctx, sh_desc_enc_dma),
2225 - DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
2226 + ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
2227 caam_jr_free(ctx->jrdev);
2230 @@ -3276,7 +3556,7 @@ static void caam_aead_exit(struct crypto
2231 caam_exit_common(crypto_aead_ctx(tfm));
2234 -static void __exit caam_algapi_exit(void)
2235 +void caam_algapi_exit(void)
2238 struct caam_crypto_alg *t_alg, *n;
2239 @@ -3355,56 +3635,52 @@ static void caam_aead_alg_init(struct ca
2240 alg->exit = caam_aead_exit;
2243 -static int __init caam_algapi_init(void)
2244 +int caam_algapi_init(struct device *ctrldev)
2246 - struct device_node *dev_node;
2247 - struct platform_device *pdev;
2248 - struct device *ctrldev;
2249 - struct caam_drv_private *priv;
2250 + struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
2252 - u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
2253 + u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
2254 unsigned int md_limit = SHA512_DIGEST_SIZE;
2255 bool registered = false;
2257 - dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
2259 - dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
2264 - pdev = of_find_device_by_node(dev_node);
2266 - of_node_put(dev_node);
2270 - ctrldev = &pdev->dev;
2271 - priv = dev_get_drvdata(ctrldev);
2272 - of_node_put(dev_node);
2275 - * If priv is NULL, it's probably because the caam driver wasn't
2276 - * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
2282 INIT_LIST_HEAD(&alg_list);
2285 * Register crypto algorithms the device supports.
2286 * First, detect presence and attributes of DES, AES, and MD blocks.
2288 - cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
2289 - cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
2290 - des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
2291 - aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
2292 - md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2293 + if (priv->era < 10) {
2294 + u32 cha_vid, cha_inst;
2296 + cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
2297 + aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
2298 + md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2300 + cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
2301 + des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
2302 + CHA_ID_LS_DES_SHIFT;
2303 + aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
2304 + md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
2310 + aesa = rd_reg32(&priv->ctrl->vreg.aesa);
2311 + mdha = rd_reg32(&priv->ctrl->vreg.mdha);
2313 + aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
2314 + md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
2316 + des_inst = rd_reg32(&priv->ctrl->vreg.desa) & CHA_VER_NUM_MASK;
2317 + aes_inst = aesa & CHA_VER_NUM_MASK;
2318 + md_inst = mdha & CHA_VER_NUM_MASK;
2319 + ccha_inst = rd_reg32(&priv->ctrl->vreg.ccha) & CHA_VER_NUM_MASK;
2320 + ptha_inst = rd_reg32(&priv->ctrl->vreg.ptha) & CHA_VER_NUM_MASK;
2323 /* If MD is present, limit digest size based on LP256 */
2324 - if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
2325 + if (md_inst && md_vid == CHA_VER_VID_MD_LP256)
2326 md_limit = SHA256_DIGEST_SIZE;
2328 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
2329 @@ -3426,10 +3702,10 @@ static int __init caam_algapi_init(void)
2330 * Check support for AES modes not available
2333 - if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
2334 - if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
2337 + if (aes_vid == CHA_VER_VID_AES_LP &&
2338 + (alg->class1_alg_type & OP_ALG_AAI_MASK) ==
2342 t_alg = caam_alg_alloc(alg);
2343 if (IS_ERR(t_alg)) {
2344 @@ -3468,21 +3744,28 @@ static int __init caam_algapi_init(void)
2345 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
2348 + /* Skip CHACHA20 algorithms if not supported by device */
2349 + if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
2352 + /* Skip POLY1305 algorithms if not supported by device */
2353 + if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
2357 * Check support for AES algorithms not available
2360 - if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
2361 - if (alg_aai == OP_ALG_AAI_GCM)
2363 + if (aes_vid == CHA_VER_VID_AES_LP && alg_aai == OP_ALG_AAI_GCM)
2367 * Skip algorithms requiring message digests
2368 * if MD or MD size is not supported by device.
2371 - (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
2373 + if ((c2_alg_sel & ~OP_ALG_ALGSEL_SUBMASK) == 0x40 &&
2374 + (!md_inst || t_alg->aead.maxauthsize > md_limit))
2377 caam_aead_alg_init(t_alg);
2379 @@ -3502,10 +3785,3 @@ static int __init caam_algapi_init(void)
2384 -module_init(caam_algapi_init);
2385 -module_exit(caam_algapi_exit);
2387 -MODULE_LICENSE("GPL");
2388 -MODULE_DESCRIPTION("FSL CAAM support for crypto API");
2389 -MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");
2390 --- a/drivers/crypto/caam/caamalg_desc.c
2391 +++ b/drivers/crypto/caam/caamalg_desc.c
2392 @@ -45,16 +45,16 @@ static inline void append_dec_op1(u32 *d
2393 * cnstr_shdsc_aead_null_encap - IPSec ESP encapsulation shared descriptor
2394 * (non-protocol) with no (null) encryption.
2395 * @desc: pointer to buffer used for descriptor construction
2396 - * @adata: pointer to authentication transform definitions. Note that since a
2397 - * split key is to be used, the size of the split key itself is
2398 - * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
2399 - * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
2400 + * @adata: pointer to authentication transform definitions.
2401 + * A split key is required for SEC Era < 6; the size of the split key
2402 + * is specified in this case. Valid algorithm values - one of
2403 + * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
2404 + * with OP_ALG_AAI_HMAC_PRECOMP.
2405 * @icvsize: integrity check value (ICV) size (truncated or full)
2407 - * Note: Requires an MDHA split key.
2410 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
2411 - unsigned int icvsize)
2412 + unsigned int icvsize, int era)
2414 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
2416 @@ -63,13 +63,18 @@ void cnstr_shdsc_aead_null_encap(u32 * c
2417 /* Skip if already shared */
2418 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
2420 - if (adata->key_inline)
2421 - append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
2422 - adata->keylen, CLASS_2 | KEY_DEST_MDHA_SPLIT |
2425 - append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
2426 - KEY_DEST_MDHA_SPLIT | KEY_ENC);
2428 + if (adata->key_inline)
2429 + append_key_as_imm(desc, adata->key_virt,
2430 + adata->keylen_pad, adata->keylen,
2431 + CLASS_2 | KEY_DEST_MDHA_SPLIT |
2434 + append_key(desc, adata->key_dma, adata->keylen,
2435 + CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
2437 + append_proto_dkp(desc, adata);
2439 set_jump_tgt_here(desc, key_jump_cmd);
2441 /* assoclen + cryptlen = seqinlen */
2442 @@ -121,16 +126,16 @@ EXPORT_SYMBOL(cnstr_shdsc_aead_null_enca
2443 * cnstr_shdsc_aead_null_decap - IPSec ESP decapsulation shared descriptor
2444 * (non-protocol) with no (null) decryption.
2445 * @desc: pointer to buffer used for descriptor construction
2446 - * @adata: pointer to authentication transform definitions. Note that since a
2447 - * split key is to be used, the size of the split key itself is
2448 - * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
2449 - * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
2450 + * @adata: pointer to authentication transform definitions.
2451 + * A split key is required for SEC Era < 6; the size of the split key
2452 + * is specified in this case. Valid algorithm values - one of
2453 + * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
2454 + * with OP_ALG_AAI_HMAC_PRECOMP.
2455 * @icvsize: integrity check value (ICV) size (truncated or full)
2457 - * Note: Requires an MDHA split key.
2460 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
2461 - unsigned int icvsize)
2462 + unsigned int icvsize, int era)
2464 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd, *jump_cmd;
2466 @@ -139,13 +144,18 @@ void cnstr_shdsc_aead_null_decap(u32 * c
2467 /* Skip if already shared */
2468 key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
2470 - if (adata->key_inline)
2471 - append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
2472 - adata->keylen, CLASS_2 |
2473 - KEY_DEST_MDHA_SPLIT | KEY_ENC);
2475 - append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
2476 - KEY_DEST_MDHA_SPLIT | KEY_ENC);
2478 + if (adata->key_inline)
2479 + append_key_as_imm(desc, adata->key_virt,
2480 + adata->keylen_pad, adata->keylen,
2481 + CLASS_2 | KEY_DEST_MDHA_SPLIT |
2484 + append_key(desc, adata->key_dma, adata->keylen,
2485 + CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
2487 + append_proto_dkp(desc, adata);
2489 set_jump_tgt_here(desc, key_jump_cmd);
2491 /* Class 2 operation */
2492 @@ -204,7 +214,7 @@ EXPORT_SYMBOL(cnstr_shdsc_aead_null_deca
2493 static void init_sh_desc_key_aead(u32 * const desc,
2494 struct alginfo * const cdata,
2495 struct alginfo * const adata,
2496 - const bool is_rfc3686, u32 *nonce)
2497 + const bool is_rfc3686, u32 *nonce, int era)
2500 unsigned int enckeylen = cdata->keylen;
2501 @@ -224,13 +234,18 @@ static void init_sh_desc_key_aead(u32 *
2503 enckeylen -= CTR_RFC3686_NONCE_SIZE;
2505 - if (adata->key_inline)
2506 - append_key_as_imm(desc, adata->key_virt, adata->keylen_pad,
2507 - adata->keylen, CLASS_2 |
2508 - KEY_DEST_MDHA_SPLIT | KEY_ENC);
2510 - append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
2511 - KEY_DEST_MDHA_SPLIT | KEY_ENC);
2513 + if (adata->key_inline)
2514 + append_key_as_imm(desc, adata->key_virt,
2515 + adata->keylen_pad, adata->keylen,
2516 + CLASS_2 | KEY_DEST_MDHA_SPLIT |
2519 + append_key(desc, adata->key_dma, adata->keylen,
2520 + CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
2522 + append_proto_dkp(desc, adata);
2525 if (cdata->key_inline)
2526 append_key_as_imm(desc, cdata->key_virt, enckeylen,
2527 @@ -261,26 +276,27 @@ static void init_sh_desc_key_aead(u32 *
2528 * @cdata: pointer to block cipher transform definitions
2529 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
2530 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
2531 - * @adata: pointer to authentication transform definitions. Note that since a
2532 - * split key is to be used, the size of the split key itself is
2533 - * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
2534 - * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
2535 + * @adata: pointer to authentication transform definitions.
2536 + * A split key is required for SEC Era < 6; the size of the split key
2537 + * is specified in this case. Valid algorithm values - one of
2538 + * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
2539 + * with OP_ALG_AAI_HMAC_PRECOMP.
2540 * @ivsize: initialization vector size
2541 * @icvsize: integrity check value (ICV) size (truncated or full)
2542 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
2543 * @nonce: pointer to rfc3686 nonce
2544 * @ctx1_iv_off: IV offset in CONTEXT1 register
2545 * @is_qi: true when called from caam/qi
2547 - * Note: Requires an MDHA split key.
2550 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
2551 struct alginfo *adata, unsigned int ivsize,
2552 unsigned int icvsize, const bool is_rfc3686,
2553 - u32 *nonce, const u32 ctx1_iv_off, const bool is_qi)
2554 + u32 *nonce, const u32 ctx1_iv_off, const bool is_qi,
2557 /* Note: Context registers are saved. */
2558 - init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
2559 + init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
2561 /* Class 2 operation */
2562 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
2563 @@ -306,8 +322,13 @@ void cnstr_shdsc_aead_encap(u32 * const
2566 /* Read and write assoclen bytes */
2567 - append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
2568 - append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
2569 + if (is_qi || era < 3) {
2570 + append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
2571 + append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
2573 + append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
2574 + append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
2577 /* Skip assoc data */
2578 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
2579 @@ -350,27 +371,27 @@ EXPORT_SYMBOL(cnstr_shdsc_aead_encap);
2580 * @cdata: pointer to block cipher transform definitions
2581 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
2582 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
2583 - * @adata: pointer to authentication transform definitions. Note that since a
2584 - * split key is to be used, the size of the split key itself is
2585 - * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
2586 - * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
2587 + * @adata: pointer to authentication transform definitions.
2588 + * A split key is required for SEC Era < 6; the size of the split key
2589 + * is specified in this case. Valid algorithm values - one of
2590 + * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
2591 + * with OP_ALG_AAI_HMAC_PRECOMP.
2592 * @ivsize: initialization vector size
2593 * @icvsize: integrity check value (ICV) size (truncated or full)
2594 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
2595 * @nonce: pointer to rfc3686 nonce
2596 * @ctx1_iv_off: IV offset in CONTEXT1 register
2597 * @is_qi: true when called from caam/qi
2599 - * Note: Requires an MDHA split key.
2602 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
2603 struct alginfo *adata, unsigned int ivsize,
2604 unsigned int icvsize, const bool geniv,
2605 const bool is_rfc3686, u32 *nonce,
2606 - const u32 ctx1_iv_off, const bool is_qi)
2607 + const u32 ctx1_iv_off, const bool is_qi, int era)
2609 /* Note: Context registers are saved. */
2610 - init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
2611 + init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
2613 /* Class 2 operation */
2614 append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
2615 @@ -397,11 +418,23 @@ void cnstr_shdsc_aead_decap(u32 * const
2618 /* Read and write assoclen bytes */
2619 - append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
2621 - append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, ivsize);
2623 - append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
2624 + if (is_qi || era < 3) {
2625 + append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
2627 + append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM,
2630 + append_math_add(desc, VARSEQOUTLEN, ZERO, REG3,
2633 + append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
2635 + append_math_add_imm_u32(desc, VARSEQOUTLEN, DPOVRD, IMM,
2638 + append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD,
2642 /* Skip assoc data */
2643 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
2644 @@ -456,29 +489,29 @@ EXPORT_SYMBOL(cnstr_shdsc_aead_decap);
2645 * @cdata: pointer to block cipher transform definitions
2646 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
2647 * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
2648 - * @adata: pointer to authentication transform definitions. Note that since a
2649 - * split key is to be used, the size of the split key itself is
2650 - * specified. Valid algorithm values - one of OP_ALG_ALGSEL_{MD5, SHA1,
2651 - * SHA224, SHA256, SHA384, SHA512} ANDed with OP_ALG_AAI_HMAC_PRECOMP.
2652 + * @adata: pointer to authentication transform definitions.
2653 + * A split key is required for SEC Era < 6; the size of the split key
2654 + * is specified in this case. Valid algorithm values - one of
2655 + * OP_ALG_ALGSEL_{MD5, SHA1, SHA224, SHA256, SHA384, SHA512} ANDed
2656 + * with OP_ALG_AAI_HMAC_PRECOMP.
2657 * @ivsize: initialization vector size
2658 * @icvsize: integrity check value (ICV) size (truncated or full)
2659 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
2660 * @nonce: pointer to rfc3686 nonce
2661 * @ctx1_iv_off: IV offset in CONTEXT1 register
2662 * @is_qi: true when called from caam/qi
2664 - * Note: Requires an MDHA split key.
2667 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
2668 struct alginfo *adata, unsigned int ivsize,
2669 unsigned int icvsize, const bool is_rfc3686,
2670 u32 *nonce, const u32 ctx1_iv_off,
2672 + const bool is_qi, int era)
2676 /* Note: Context registers are saved. */
2677 - init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce);
2678 + init_sh_desc_key_aead(desc, cdata, adata, is_rfc3686, nonce, era);
2682 @@ -528,8 +561,13 @@ copy_iv:
2685 /* Read and write assoclen bytes */
2686 - append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
2687 - append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
2688 + if (is_qi || era < 3) {
2689 + append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
2690 + append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
2692 + append_math_add(desc, VARSEQINLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
2693 + append_math_add(desc, VARSEQOUTLEN, ZERO, DPOVRD, CAAM_CMD_SZ);
2696 /* Skip assoc data */
2697 append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
2698 @@ -583,14 +621,431 @@ copy_iv:
2699 EXPORT_SYMBOL(cnstr_shdsc_aead_givencap);
2702 + * cnstr_shdsc_tls_encap - tls encapsulation shared descriptor
2703 + * @desc: pointer to buffer used for descriptor construction
2704 + * @cdata: pointer to block cipher transform definitions
2705 + * Valid algorithm values - one of OP_ALG_ALGSEL_AES ANDed
2706 + * with OP_ALG_AAI_CBC
2707 + * @adata: pointer to authentication transform definitions.
2708 + * A split key is required for SEC Era < 6; the size of the split key
2709 + * is specified in this case. Valid algorithm values OP_ALG_ALGSEL_SHA1
2710 + * ANDed with OP_ALG_AAI_HMAC_PRECOMP.
2711 + * @assoclen: associated data length
2712 + * @ivsize: initialization vector size
2713 + * @authsize: authentication data size
2714 + * @blocksize: block cipher size
2717 +void cnstr_shdsc_tls_encap(u32 * const desc, struct alginfo *cdata,
2718 + struct alginfo *adata, unsigned int assoclen,
2719 + unsigned int ivsize, unsigned int authsize,
2720 + unsigned int blocksize, int era)
2722 + u32 *key_jump_cmd, *zero_payload_jump_cmd;
2723 + u32 genpad, idx_ld_datasz, idx_ld_pad, stidx;
2726 + * Compute the index (in bytes) for the LOAD with destination of
2727 + * Class 1 Data Size Register and for the LOAD that generates padding
2729 + if (adata->key_inline) {
2730 + idx_ld_datasz = DESC_TLS10_ENC_LEN + adata->keylen_pad +
2731 + cdata->keylen - 4 * CAAM_CMD_SZ;
2732 + idx_ld_pad = DESC_TLS10_ENC_LEN + adata->keylen_pad +
2733 + cdata->keylen - 2 * CAAM_CMD_SZ;
2735 + idx_ld_datasz = DESC_TLS10_ENC_LEN + 2 * CAAM_PTR_SZ -
2737 + idx_ld_pad = DESC_TLS10_ENC_LEN + 2 * CAAM_PTR_SZ -
2741 + stidx = 1 << HDR_START_IDX_SHIFT;
2742 + init_sh_desc(desc, HDR_SHARE_SERIAL | stidx);
2744 + /* skip key loading if they are loaded due to sharing */
2745 + key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
2749 + if (adata->key_inline)
2750 + append_key_as_imm(desc, adata->key_virt,
2751 + adata->keylen_pad, adata->keylen,
2752 + CLASS_2 | KEY_DEST_MDHA_SPLIT |
2755 + append_key(desc, adata->key_dma, adata->keylen,
2756 + CLASS_2 | KEY_DEST_MDHA_SPLIT | KEY_ENC);
2758 + append_proto_dkp(desc, adata);
2761 + if (cdata->key_inline)
2762 + append_key_as_imm(desc, cdata->key_virt, cdata->keylen,
2763 + cdata->keylen, CLASS_1 | KEY_DEST_CLASS_REG);
2765 + append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
2766 + KEY_DEST_CLASS_REG);
2768 + set_jump_tgt_here(desc, key_jump_cmd);
2770 + /* class 2 operation */
2771 + append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
2773 + /* class 1 operation */
2774 + append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
2777 + /* payloadlen = input data length - (assoclen + ivlen) */
2778 + append_math_sub_imm_u32(desc, REG0, SEQINLEN, IMM, assoclen + ivsize);
2780 + /* math1 = payloadlen + icvlen */
2781 + append_math_add_imm_u32(desc, REG1, REG0, IMM, authsize);
2783 + /* padlen = block_size - math1 % block_size */
2784 + append_math_and_imm_u32(desc, REG3, REG1, IMM, blocksize - 1);
2785 + append_math_sub_imm_u32(desc, REG2, IMM, REG3, blocksize);
2787 + /* cryptlen = payloadlen + icvlen + padlen */
2788 + append_math_add(desc, VARSEQOUTLEN, REG1, REG2, 4);
2791 + * update immediate data with the padding length value
2792 + * for the LOAD in the class 1 data size register.
2794 + append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH2 |
2795 + (idx_ld_datasz << MOVE_OFFSET_SHIFT) | 7);
2796 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH2 | MOVE_DEST_DESCBUF |
2797 + (idx_ld_datasz << MOVE_OFFSET_SHIFT) | 8);
2799 + /* overwrite PL field for the padding iNFO FIFO entry */
2800 + append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH2 |
2801 + (idx_ld_pad << MOVE_OFFSET_SHIFT) | 7);
2802 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH2 | MOVE_DEST_DESCBUF |
2803 + (idx_ld_pad << MOVE_OFFSET_SHIFT) | 8);
2805 + /* store encrypted payload, icv and padding */
2806 + append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
2808 + /* if payload length is zero, jump to zero-payload commands */
2809 + append_math_add(desc, VARSEQINLEN, ZERO, REG0, 4);
2810 + zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
2811 + JUMP_COND_MATH_Z);
2813 + /* load iv in context1 */
2814 + append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_WORD_CLASS_CTX |
2815 + LDST_CLASS_1_CCB | ivsize);
2817 + /* read assoc for authentication */
2818 + append_seq_fifo_load(desc, assoclen, FIFOLD_CLASS_CLASS2 |
2820 + /* insnoop payload */
2821 + append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLD_TYPE_MSG |
2822 + FIFOLD_TYPE_LAST2 | FIFOLDST_VLF);
2824 + /* jump the zero-payload commands */
2825 + append_jump(desc, JUMP_TEST_ALL | 3);
2827 + /* zero-payload commands */
2828 + set_jump_tgt_here(desc, zero_payload_jump_cmd);
2830 + /* load iv in context1 */
2831 + append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_WORD_CLASS_CTX |
2832 + LDST_CLASS_1_CCB | ivsize);
2834 + /* assoc data is the only data for authentication */
2835 + append_seq_fifo_load(desc, assoclen, FIFOLD_CLASS_CLASS2 |
2836 + FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST2);
2838 + /* send icv to encryption */
2839 + append_move(desc, MOVE_SRC_CLASS2CTX | MOVE_DEST_CLASS1INFIFO |
2842 + /* update class 1 data size register with padding length */
2843 + append_load_imm_u32(desc, 0, LDST_CLASS_1_CCB |
2844 + LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
2846 + /* generate padding and send it to encryption */
2847 + genpad = NFIFOENTRY_DEST_CLASS1 | NFIFOENTRY_LC1 | NFIFOENTRY_FC1 |
2848 + NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_PTYPE_N;
2849 + append_load_imm_u32(desc, genpad, LDST_CLASS_IND_CCB |
2850 + LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
2853 + print_hex_dump(KERN_ERR, "tls enc shdesc@" __stringify(__LINE__) ": ",
2854 + DUMP_PREFIX_ADDRESS, 16, 4, desc,
2855 + desc_bytes(desc), 1);
2858 +EXPORT_SYMBOL(cnstr_shdsc_tls_encap);
2861 + * cnstr_shdsc_tls_decap - tls decapsulation shared descriptor
2862 + * @desc: pointer to buffer used for descriptor construction
2863 + * @cdata: pointer to block cipher transform definitions
2864 + * Valid algorithm values - one of OP_ALG_ALGSEL_AES ANDed
2865 + * with OP_ALG_AAI_CBC
2866 + * @adata: pointer to authentication transform definitions.
2867 + * A split key is required for SEC Era < 6; the size of the split key
2868 + * is specified in this case. Valid algorithm values OP_ALG_ALGSEL_SHA1
2869 + * ANDed with OP_ALG_AAI_HMAC_PRECOMP.
2870 + * @assoclen: associated data length
2871 + * @ivsize: initialization vector size
2872 + * @authsize: authentication data size
2873 + * @blocksize: block cipher size
2876 +void cnstr_shdsc_tls_decap(u32 * const desc, struct alginfo *cdata,
2877 + struct alginfo *adata, unsigned int assoclen,
2878 + unsigned int ivsize, unsigned int authsize,
2879 + unsigned int blocksize, int era)
2881 + u32 stidx, jumpback;
2882 + u32 *key_jump_cmd, *zero_payload_jump_cmd, *skip_zero_jump_cmd;
2884 + * Pointer Size bool determines the size of address pointers.
2885 + * false - Pointers fit in one 32-bit word.
2886 + * true - Pointers fit in two 32-bit words.
2888 + static const bool ps = (CAAM_PTR_SZ != CAAM_CMD_SZ);
2890 + stidx = 1 << HDR_START_IDX_SHIFT;
2891 + init_sh_desc(desc, HDR_SHARE_SERIAL | stidx);
2893 + /* skip key loading if they are loaded due to sharing */
2894 + key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
2898 + append_key(desc, adata->key_dma, adata->keylen, CLASS_2 |
2899 + KEY_DEST_MDHA_SPLIT | KEY_ENC);
2901 + append_proto_dkp(desc, adata);
2903 + append_key(desc, cdata->key_dma, cdata->keylen, CLASS_1 |
2904 + KEY_DEST_CLASS_REG);
2906 + set_jump_tgt_here(desc, key_jump_cmd);
2908 + /* class 2 operation */
2909 + append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
2910 + OP_ALG_DECRYPT | OP_ALG_ICV_ON);
2911 + /* class 1 operation */
2912 + append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
2915 + /* VSIL = input data length - 2 * block_size */
2916 + append_math_sub_imm_u32(desc, VARSEQINLEN, SEQINLEN, IMM, 2 *
2920 + * payloadlen + icvlen + padlen = input data length - (assoclen +
2923 + append_math_sub_imm_u32(desc, REG3, SEQINLEN, IMM, assoclen + ivsize);
2925 + /* skip data to the last but one cipher block */
2926 + append_seq_fifo_load(desc, 0, FIFOLD_CLASS_SKIP | LDST_VLF);
2928 + /* load iv for the last cipher block */
2929 + append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_WORD_CLASS_CTX |
2930 + LDST_CLASS_1_CCB | ivsize);
2932 + /* read last cipher block */
2933 + append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG |
2934 + FIFOLD_TYPE_LAST1 | blocksize);
2936 + /* move decrypted block into math0 and math1 */
2937 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_OUTFIFO | MOVE_DEST_MATH0 |
2940 + /* reset AES CHA */
2941 + append_load_imm_u32(desc, CCTRL_RESET_CHA_AESA, LDST_CLASS_IND_CCB |
2942 + LDST_SRCDST_WORD_CHACTRL | LDST_IMM);
2944 + /* rewind input sequence */
2945 + append_seq_in_ptr_intlen(desc, 0, 65535, SQIN_RTO);
2947 + /* key1 is in decryption form */
2948 + append_operation(desc, cdata->algtype | OP_ALG_AAI_DK |
2949 + OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
2951 + /* load iv in context1 */
2952 + append_cmd(desc, CMD_SEQ_LOAD | LDST_CLASS_1_CCB |
2953 + LDST_SRCDST_WORD_CLASS_CTX | ivsize);
2955 + /* read sequence number */
2956 + append_seq_fifo_load(desc, 8, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG);
2957 + /* load Type, Version and Len fields in math0 */
2958 + append_cmd(desc, CMD_SEQ_LOAD | LDST_CLASS_DECO |
2959 + LDST_SRCDST_WORD_DECO_MATH0 | (3 << LDST_OFFSET_SHIFT) | 5);
2961 + /* compute (padlen - 1) */
2962 + append_math_and_imm_u64(desc, REG1, REG1, IMM, 255);
2964 + /* math2 = icvlen + (padlen - 1) + 1 */
2965 + append_math_add_imm_u32(desc, REG2, REG1, IMM, authsize + 1);
2967 + append_jump(desc, JUMP_TEST_ALL | JUMP_COND_CALM | 1);
2969 + /* VSOL = payloadlen + icvlen + padlen */
2970 + append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, 4);
2972 + if (caam_little_end)
2973 + append_moveb(desc, MOVE_WAITCOMP |
2974 + MOVE_SRC_MATH0 | MOVE_DEST_MATH0 | 8);
2976 + /* update Len field */
2977 + append_math_sub(desc, REG0, REG0, REG2, 8);
2979 + /* store decrypted payload, icv and padding */
2980 + append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
2982 + /* VSIL = (payloadlen + icvlen + padlen) - (icvlen + padlen)*/
2983 + append_math_sub(desc, VARSEQINLEN, REG3, REG2, 4);
2985 + zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
2986 + JUMP_COND_MATH_Z);
2988 + /* send Type, Version and Len(pre ICV) fields to authentication */
2989 + append_move(desc, MOVE_WAITCOMP |
2990 + MOVE_SRC_MATH0 | MOVE_DEST_CLASS2INFIFO |
2991 + (3 << MOVE_OFFSET_SHIFT) | 5);
2993 + /* outsnooping payload */
2994 + append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
2995 + FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LAST2 |
2997 + skip_zero_jump_cmd = append_jump(desc, JUMP_TEST_ALL | 2);
2999 + set_jump_tgt_here(desc, zero_payload_jump_cmd);
3000 + /* send Type, Version and Len(pre ICV) fields to authentication */
3001 + append_move(desc, MOVE_WAITCOMP | MOVE_AUX_LS |
3002 + MOVE_SRC_MATH0 | MOVE_DEST_CLASS2INFIFO |
3003 + (3 << MOVE_OFFSET_SHIFT) | 5);
3005 + set_jump_tgt_here(desc, skip_zero_jump_cmd);
3006 + append_math_add(desc, VARSEQINLEN, ZERO, REG2, 4);
3008 + /* load icvlen and padlen */
3009 + append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG |
3010 + FIFOLD_TYPE_LAST1 | FIFOLDST_VLF);
3012 + /* VSIL = (payloadlen + icvlen + padlen) - icvlen + padlen */
3013 + append_math_sub(desc, VARSEQINLEN, REG3, REG2, 4);
3016 + * Start a new input sequence using the SEQ OUT PTR command options,
3017 + * pointer and length used when the current output sequence was defined.
3021 + * Move the lower 32 bits of Shared Descriptor address, the
3022 + * SEQ OUT PTR command, Output Pointer (2 words) and
3023 + * Output Length into math registers.
3025 + if (caam_little_end)
3026 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_DESCBUF |
3028 + (55 * 4 << MOVE_OFFSET_SHIFT) | 20);
3030 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_DESCBUF |
3032 + (54 * 4 << MOVE_OFFSET_SHIFT) | 20);
3034 + /* Transform SEQ OUT PTR command in SEQ IN PTR command */
3035 + append_math_and_imm_u32(desc, REG0, REG0, IMM,
3036 + ~(CMD_SEQ_IN_PTR ^ CMD_SEQ_OUT_PTR));
3037 + /* Append a JUMP command after the copied fields */
3038 + jumpback = CMD_JUMP | (char)-9;
3039 + append_load_imm_u32(desc, jumpback, LDST_CLASS_DECO | LDST_IMM |
3040 + LDST_SRCDST_WORD_DECO_MATH2 |
3041 + (4 << LDST_OFFSET_SHIFT));
3042 + append_jump(desc, JUMP_TEST_ALL | JUMP_COND_CALM | 1);
3043 + /* Move the updated fields back to the Job Descriptor */
3044 + if (caam_little_end)
3045 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH0 |
3046 + MOVE_DEST_DESCBUF |
3047 + (55 * 4 << MOVE_OFFSET_SHIFT) | 24);
3049 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH0 |
3050 + MOVE_DEST_DESCBUF |
3051 + (54 * 4 << MOVE_OFFSET_SHIFT) | 24);
3054 + * Read the new SEQ IN PTR command, Input Pointer, Input Length
3055 + * and then jump back to the next command from the
3056 + * Shared Descriptor.
3058 + append_jump(desc, JUMP_TEST_ALL | JUMP_COND_CALM | 6);
3061 + * Move the SEQ OUT PTR command, Output Pointer (1 word) and
3062 + * Output Length into math registers.
3064 + if (caam_little_end)
3065 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_DESCBUF |
3067 + (54 * 4 << MOVE_OFFSET_SHIFT) | 12);
3069 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_DESCBUF |
3071 + (53 * 4 << MOVE_OFFSET_SHIFT) | 12);
3073 + /* Transform SEQ OUT PTR command in SEQ IN PTR command */
3074 + append_math_and_imm_u64(desc, REG0, REG0, IMM,
3075 + ~(((u64)(CMD_SEQ_IN_PTR ^
3076 + CMD_SEQ_OUT_PTR)) << 32));
3077 + /* Append a JUMP command after the copied fields */
3078 + jumpback = CMD_JUMP | (char)-7;
3079 + append_load_imm_u32(desc, jumpback, LDST_CLASS_DECO | LDST_IMM |
3080 + LDST_SRCDST_WORD_DECO_MATH1 |
3081 + (4 << LDST_OFFSET_SHIFT));
3082 + append_jump(desc, JUMP_TEST_ALL | JUMP_COND_CALM | 1);
3083 + /* Move the updated fields back to the Job Descriptor */
3084 + if (caam_little_end)
3085 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH0 |
3086 + MOVE_DEST_DESCBUF |
3087 + (54 * 4 << MOVE_OFFSET_SHIFT) | 16);
3089 + append_move(desc, MOVE_WAITCOMP | MOVE_SRC_MATH0 |
3090 + MOVE_DEST_DESCBUF |
3091 + (53 * 4 << MOVE_OFFSET_SHIFT) | 16);
3094 + * Read the new SEQ IN PTR command, Input Pointer, Input Length
3095 + * and then jump back to the next command from the
3096 + * Shared Descriptor.
3098 + append_jump(desc, JUMP_TEST_ALL | JUMP_COND_CALM | 5);
3101 + /* skip payload */
3102 + append_seq_fifo_load(desc, 0, FIFOLD_CLASS_SKIP | FIFOLDST_VLF);
3104 + append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_ICV |
3105 + FIFOLD_TYPE_LAST2 | authsize);
3108 + print_hex_dump(KERN_ERR, "tls dec shdesc@" __stringify(__LINE__) ": ",
3109 + DUMP_PREFIX_ADDRESS, 16, 4, desc,
3110 + desc_bytes(desc), 1);
3113 +EXPORT_SYMBOL(cnstr_shdsc_tls_decap);
3116 * cnstr_shdsc_gcm_encap - gcm encapsulation shared descriptor
3117 * @desc: pointer to buffer used for descriptor construction
3118 * @cdata: pointer to block cipher transform definitions
3119 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
3120 + * @ivsize: initialization vector size
3121 * @icvsize: integrity check value (ICV) size (truncated or full)
3122 + * @is_qi: true when called from caam/qi
3124 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
3125 - unsigned int icvsize)
3126 + unsigned int ivsize, unsigned int icvsize,
3129 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1,
3130 *zero_assoc_jump_cmd2;
3131 @@ -612,11 +1067,35 @@ void cnstr_shdsc_gcm_encap(u32 * const d
3132 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
3136 + u32 *wait_load_cmd;
3138 + /* REG3 = assoclen */
3139 + append_seq_load(desc, 4, LDST_CLASS_DECO |
3140 + LDST_SRCDST_WORD_DECO_MATH3 |
3141 + (4 << LDST_OFFSET_SHIFT));
3143 + wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
3144 + JUMP_COND_CALM | JUMP_COND_NCP |
3145 + JUMP_COND_NOP | JUMP_COND_NIP |
3147 + set_jump_tgt_here(desc, wait_load_cmd);
3149 + append_math_sub_imm_u32(desc, VARSEQOUTLEN, SEQINLEN, IMM,
3152 + append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0,
3156 /* if assoclen + cryptlen is ZERO, skip to ICV write */
3157 - append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
3158 zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
3162 + append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
3163 + FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
3165 /* if assoclen is ZERO, skip reading the assoc data */
3166 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
3167 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
3168 @@ -648,8 +1127,11 @@ void cnstr_shdsc_gcm_encap(u32 * const d
3169 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
3170 FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
3172 - /* jump the zero-payload commands */
3173 - append_jump(desc, JUMP_TEST_ALL | 2);
3174 + /* jump to ICV writing */
3176 + append_jump(desc, JUMP_TEST_ALL | 4);
3178 + append_jump(desc, JUMP_TEST_ALL | 2);
3180 /* zero-payload commands */
3181 set_jump_tgt_here(desc, zero_payload_jump_cmd);
3182 @@ -657,10 +1139,18 @@ void cnstr_shdsc_gcm_encap(u32 * const d
3183 /* read assoc data */
3184 append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
3185 FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
3187 + /* jump to ICV writing */
3188 + append_jump(desc, JUMP_TEST_ALL | 2);
3190 /* There is no input data */
3191 set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
3194 + append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
3195 + FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 |
3196 + FIFOLD_TYPE_LAST1);
3199 append_seq_store(desc, icvsize, LDST_CLASS_1_CCB |
3200 LDST_SRCDST_BYTE_CONTEXT);
3201 @@ -677,10 +1167,13 @@ EXPORT_SYMBOL(cnstr_shdsc_gcm_encap);
3202 * @desc: pointer to buffer used for descriptor construction
3203 * @cdata: pointer to block cipher transform definitions
3204 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
3205 + * @ivsize: initialization vector size
3206 * @icvsize: integrity check value (ICV) size (truncated or full)
3207 + * @is_qi: true when called from caam/qi
3209 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
3210 - unsigned int icvsize)
3211 + unsigned int ivsize, unsigned int icvsize,
3214 u32 *key_jump_cmd, *zero_payload_jump_cmd, *zero_assoc_jump_cmd1;
3216 @@ -701,6 +1194,24 @@ void cnstr_shdsc_gcm_decap(u32 * const d
3217 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
3218 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
3221 + u32 *wait_load_cmd;
3223 + /* REG3 = assoclen */
3224 + append_seq_load(desc, 4, LDST_CLASS_DECO |
3225 + LDST_SRCDST_WORD_DECO_MATH3 |
3226 + (4 << LDST_OFFSET_SHIFT));
3228 + wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
3229 + JUMP_COND_CALM | JUMP_COND_NCP |
3230 + JUMP_COND_NOP | JUMP_COND_NIP |
3232 + set_jump_tgt_here(desc, wait_load_cmd);
3234 + append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
3235 + FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
3238 /* if assoclen is ZERO, skip reading the assoc data */
3239 append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
3240 zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
3241 @@ -753,10 +1264,13 @@ EXPORT_SYMBOL(cnstr_shdsc_gcm_decap);
3242 * @desc: pointer to buffer used for descriptor construction
3243 * @cdata: pointer to block cipher transform definitions
3244 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
3245 + * @ivsize: initialization vector size
3246 * @icvsize: integrity check value (ICV) size (truncated or full)
3247 + * @is_qi: true when called from caam/qi
3249 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
3250 - unsigned int icvsize)
3251 + unsigned int ivsize, unsigned int icvsize,
3256 @@ -777,7 +1291,29 @@ void cnstr_shdsc_rfc4106_encap(u32 * con
3257 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
3260 - append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
3262 + u32 *wait_load_cmd;
3264 + /* REG3 = assoclen */
3265 + append_seq_load(desc, 4, LDST_CLASS_DECO |
3266 + LDST_SRCDST_WORD_DECO_MATH3 |
3267 + (4 << LDST_OFFSET_SHIFT));
3269 + wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
3270 + JUMP_COND_CALM | JUMP_COND_NCP |
3271 + JUMP_COND_NOP | JUMP_COND_NIP |
3273 + set_jump_tgt_here(desc, wait_load_cmd);
3275 + /* Read salt and IV */
3276 + append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
3277 + cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
3279 + append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
3280 + FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
3283 + append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
3284 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
3286 /* Read assoc data */
3287 @@ -785,7 +1321,7 @@ void cnstr_shdsc_rfc4106_encap(u32 * con
3288 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
3291 - append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
3292 + append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
3294 /* Will read cryptlen bytes */
3295 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
3296 @@ -824,10 +1360,13 @@ EXPORT_SYMBOL(cnstr_shdsc_rfc4106_encap)
3297 * @desc: pointer to buffer used for descriptor construction
3298 * @cdata: pointer to block cipher transform definitions
3299 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
3300 + * @ivsize: initialization vector size
3301 * @icvsize: integrity check value (ICV) size (truncated or full)
3302 + * @is_qi: true when called from caam/qi
3304 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
3305 - unsigned int icvsize)
3306 + unsigned int ivsize, unsigned int icvsize,
3311 @@ -849,7 +1388,29 @@ void cnstr_shdsc_rfc4106_decap(u32 * con
3312 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
3313 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
3315 - append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
3317 + u32 *wait_load_cmd;
3319 + /* REG3 = assoclen */
3320 + append_seq_load(desc, 4, LDST_CLASS_DECO |
3321 + LDST_SRCDST_WORD_DECO_MATH3 |
3322 + (4 << LDST_OFFSET_SHIFT));
3324 + wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
3325 + JUMP_COND_CALM | JUMP_COND_NCP |
3326 + JUMP_COND_NOP | JUMP_COND_NIP |
3328 + set_jump_tgt_here(desc, wait_load_cmd);
3330 + /* Read salt and IV */
3331 + append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
3332 + cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
3334 + append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
3335 + FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
3338 + append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, ivsize);
3339 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
3341 /* Read assoc data */
3342 @@ -857,7 +1418,7 @@ void cnstr_shdsc_rfc4106_decap(u32 * con
3343 FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
3346 - append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
3347 + append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_SKIP);
3349 /* Will read cryptlen bytes */
3350 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
3351 @@ -896,10 +1457,13 @@ EXPORT_SYMBOL(cnstr_shdsc_rfc4106_decap)
3352 * @desc: pointer to buffer used for descriptor construction
3353 * @cdata: pointer to block cipher transform definitions
3354 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
3355 + * @ivsize: initialization vector size
3356 * @icvsize: integrity check value (ICV) size (truncated or full)
3357 + * @is_qi: true when called from caam/qi
3359 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
3360 - unsigned int icvsize)
3361 + unsigned int ivsize, unsigned int icvsize,
3364 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
3366 @@ -920,6 +1484,18 @@ void cnstr_shdsc_rfc4543_encap(u32 * con
3367 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
3371 + /* assoclen is not needed, skip it */
3372 + append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
3374 + /* Read salt and IV */
3375 + append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
3376 + cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
3378 + append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
3379 + FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
3382 /* assoclen + cryptlen = seqinlen */
3383 append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
3385 @@ -931,7 +1507,7 @@ void cnstr_shdsc_rfc4543_encap(u32 * con
3386 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
3387 (0x6 << MOVE_LEN_SHIFT));
3388 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
3389 - (0x8 << MOVE_LEN_SHIFT));
3390 + (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
3392 /* Will read assoclen + cryptlen bytes */
3393 append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
3394 @@ -966,10 +1542,13 @@ EXPORT_SYMBOL(cnstr_shdsc_rfc4543_encap)
3395 * @desc: pointer to buffer used for descriptor construction
3396 * @cdata: pointer to block cipher transform definitions
3397 * Valid algorithm values - OP_ALG_ALGSEL_AES ANDed with OP_ALG_AAI_GCM.
3398 + * @ivsize: initialization vector size
3399 * @icvsize: integrity check value (ICV) size (truncated or full)
3400 + * @is_qi: true when called from caam/qi
3402 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
3403 - unsigned int icvsize)
3404 + unsigned int ivsize, unsigned int icvsize,
3407 u32 *key_jump_cmd, *read_move_cmd, *write_move_cmd;
3409 @@ -990,6 +1569,18 @@ void cnstr_shdsc_rfc4543_decap(u32 * con
3410 append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
3411 OP_ALG_DECRYPT | OP_ALG_ICV_ON);
3414 + /* assoclen is not needed, skip it */
3415 + append_seq_fifo_load(desc, 4, FIFOLD_CLASS_SKIP);
3417 + /* Read salt and IV */
3418 + append_fifo_load_as_imm(desc, (void *)(cdata->key_virt +
3419 + cdata->keylen), 4, FIFOLD_CLASS_CLASS1 |
3421 + append_seq_fifo_load(desc, ivsize, FIFOLD_CLASS_CLASS1 |
3422 + FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1);
3425 /* assoclen + cryptlen = seqoutlen */
3426 append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
3428 @@ -1001,7 +1592,7 @@ void cnstr_shdsc_rfc4543_decap(u32 * con
3429 read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
3430 (0x6 << MOVE_LEN_SHIFT));
3431 write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
3432 - (0x8 << MOVE_LEN_SHIFT));
3433 + (0x8 << MOVE_LEN_SHIFT) | MOVE_WAITCOMP);
3435 /* Will read assoclen + cryptlen bytes */
3436 append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
3437 @@ -1035,6 +1626,138 @@ void cnstr_shdsc_rfc4543_decap(u32 * con
3439 EXPORT_SYMBOL(cnstr_shdsc_rfc4543_decap);
3442 + * cnstr_shdsc_chachapoly - Chacha20 + Poly1305 generic AEAD (rfc7539) and
3443 + * IPsec ESP (rfc7634, a.k.a. rfc7539esp) shared
3444 + * descriptor (non-protocol).
3445 + * @desc: pointer to buffer used for descriptor construction
3446 + * @cdata: pointer to block cipher transform definitions
3447 + * Valid algorithm values - OP_ALG_ALGSEL_CHACHA20 ANDed with
3448 + * OP_ALG_AAI_AEAD.
3449 + * @adata: pointer to authentication transform definitions
3450 + * Valid algorithm values - OP_ALG_ALGSEL_POLY1305 ANDed with
3451 + * OP_ALG_AAI_AEAD.
3452 + * @ivsize: initialization vector size
3453 + * @icvsize: integrity check value (ICV) size (truncated or full)
3454 + * @encap: true if encapsulation, false if decapsulation
3455 + * @is_qi: true when called from caam/qi
3457 +void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata,
3458 + struct alginfo *adata, unsigned int ivsize,
3459 + unsigned int icvsize, const bool encap,
3462 + u32 *key_jump_cmd, *wait_cmd;
3464 + const bool is_ipsec = (ivsize != CHACHAPOLY_IV_SIZE);
3466 + /* Note: Context registers are saved. */
3467 + init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
3469 + /* skip key loading if they are loaded due to sharing */
3470 + key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
3473 + append_key_as_imm(desc, cdata->key_virt, cdata->keylen, cdata->keylen,
3474 + CLASS_1 | KEY_DEST_CLASS_REG);
3476 + /* For IPsec load the salt from keymat in the context register */
3478 + append_load_as_imm(desc, cdata->key_virt + cdata->keylen, 4,
3479 + LDST_CLASS_1_CCB | LDST_SRCDST_BYTE_CONTEXT |
3480 + 4 << LDST_OFFSET_SHIFT);
3482 + set_jump_tgt_here(desc, key_jump_cmd);
3484 + /* Class 2 and 1 operations: Poly & ChaCha */
3486 + append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
3488 + append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
3491 + append_operation(desc, adata->algtype | OP_ALG_AS_INITFINAL |
3492 + OP_ALG_DECRYPT | OP_ALG_ICV_ON);
3493 + append_operation(desc, cdata->algtype | OP_ALG_AS_INITFINAL |
3498 + u32 *wait_load_cmd;
3499 + u32 ctx1_iv_off = is_ipsec ? 8 : 4;
3501 + /* REG3 = assoclen */
3502 + append_seq_load(desc, 4, LDST_CLASS_DECO |
3503 + LDST_SRCDST_WORD_DECO_MATH3 |
3504 + 4 << LDST_OFFSET_SHIFT);
3506 + wait_load_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
3507 + JUMP_COND_CALM | JUMP_COND_NCP |
3508 + JUMP_COND_NOP | JUMP_COND_NIP |
3510 + set_jump_tgt_here(desc, wait_load_cmd);
3512 + append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
3513 + LDST_SRCDST_BYTE_CONTEXT |
3514 + ctx1_iv_off << LDST_OFFSET_SHIFT);
3518 + * MAGIC with NFIFO
3519 + * Read associated data from the input and send them to class1 and
3520 + * class2 alignment blocks. From class1 send data to output fifo and
3521 + * then write it to memory since we don't need to encrypt AD.
3523 + nfifo = NFIFOENTRY_DEST_BOTH | NFIFOENTRY_FC1 | NFIFOENTRY_FC2 |
3524 + NFIFOENTRY_DTYPE_POLY | NFIFOENTRY_BND;
3525 + append_load_imm_u32(desc, nfifo, LDST_CLASS_IND_CCB |
3526 + LDST_SRCDST_WORD_INFO_FIFO_SM | LDLEN_MATH3);
3528 + append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
3529 + append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
3530 + append_seq_fifo_load(desc, 0, FIFOLD_TYPE_NOINFOFIFO |
3531 + FIFOLD_CLASS_CLASS1 | LDST_VLF);
3532 + append_move_len(desc, MOVE_AUX_LS | MOVE_SRC_AUX_ABLK |
3533 + MOVE_DEST_OUTFIFO | MOVELEN_MRSEL_MATH3);
3534 + append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | LDST_VLF);
3536 + /* IPsec - copy IV at the output */
3538 + append_seq_fifo_store(desc, ivsize, FIFOST_TYPE_METADATA |
3541 + wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TYPE_LOCAL |
3542 + JUMP_COND_NOP | JUMP_TEST_ALL);
3543 + set_jump_tgt_here(desc, wait_cmd);
3546 + /* Read and write cryptlen bytes */
3547 + append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
3548 + append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0,
3550 + aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
3553 + append_seq_store(desc, icvsize, LDST_CLASS_2_CCB |
3554 + LDST_SRCDST_BYTE_CONTEXT);
3556 + /* Read and write cryptlen bytes */
3557 + append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
3558 + append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0,
3560 + aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
3562 + /* Load ICV for verification */
3563 + append_seq_fifo_load(desc, icvsize, FIFOLD_CLASS_CLASS2 |
3564 + FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
3567 + print_hex_dump_debug("chachapoly shdesc@" __stringify(__LINE__)": ",
3568 + DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
3571 +EXPORT_SYMBOL(cnstr_shdsc_chachapoly);
3574 * For ablkcipher encrypt and decrypt, read from req->src and
3576 @@ -1053,7 +1776,8 @@ static inline void ablkcipher_append_src
3577 * @desc: pointer to buffer used for descriptor construction
3578 * @cdata: pointer to block cipher transform definitions
3579 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
3580 - * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
3581 + * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
3582 + * - OP_ALG_ALGSEL_CHACHA20
3583 * @ivsize: initialization vector size
3584 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
3585 * @ctx1_iv_off: IV offset in CONTEXT1 register
3586 @@ -1075,7 +1799,7 @@ void cnstr_shdsc_ablkcipher_encap(u32 *
3588 /* Load nonce into CONTEXT1 reg */
3590 - u8 *nonce = cdata->key_virt + cdata->keylen;
3591 + const u8 *nonce = cdata->key_virt + cdata->keylen;
3593 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
3594 LDST_CLASS_IND_CCB |
3595 @@ -1118,7 +1842,8 @@ EXPORT_SYMBOL(cnstr_shdsc_ablkcipher_enc
3596 * @desc: pointer to buffer used for descriptor construction
3597 * @cdata: pointer to block cipher transform definitions
3598 * Valid algorithm values - one of OP_ALG_ALGSEL_{AES, DES, 3DES} ANDed
3599 - * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128.
3600 + * with OP_ALG_AAI_CBC or OP_ALG_AAI_CTR_MOD128
3601 + * - OP_ALG_ALGSEL_CHACHA20
3602 * @ivsize: initialization vector size
3603 * @is_rfc3686: true when ctr(aes) is wrapped by rfc3686 template
3604 * @ctx1_iv_off: IV offset in CONTEXT1 register
3605 @@ -1140,7 +1865,7 @@ void cnstr_shdsc_ablkcipher_decap(u32 *
3607 /* Load nonce into CONTEXT1 reg */
3609 - u8 *nonce = cdata->key_virt + cdata->keylen;
3610 + const u8 *nonce = cdata->key_virt + cdata->keylen;
3612 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
3613 LDST_CLASS_IND_CCB |
3614 @@ -1209,7 +1934,7 @@ void cnstr_shdsc_ablkcipher_givencap(u32
3616 /* Load Nonce into CONTEXT1 reg */
3618 - u8 *nonce = cdata->key_virt + cdata->keylen;
3619 + const u8 *nonce = cdata->key_virt + cdata->keylen;
3621 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
3622 LDST_CLASS_IND_CCB |
3623 --- a/drivers/crypto/caam/caamalg_desc.h
3624 +++ b/drivers/crypto/caam/caamalg_desc.h
3626 #define DESC_QI_AEAD_DEC_LEN (DESC_AEAD_DEC_LEN + 3 * CAAM_CMD_SZ)
3627 #define DESC_QI_AEAD_GIVENC_LEN (DESC_AEAD_GIVENC_LEN + 3 * CAAM_CMD_SZ)
3629 +#define DESC_TLS_BASE (4 * CAAM_CMD_SZ)
3630 +#define DESC_TLS10_ENC_LEN (DESC_TLS_BASE + 29 * CAAM_CMD_SZ)
3632 /* Note: Nonce is counted in cdata.keylen */
3633 #define DESC_AEAD_CTR_RFC3686_LEN (4 * CAAM_CMD_SZ)
3636 #define DESC_GCM_BASE (3 * CAAM_CMD_SZ)
3637 #define DESC_GCM_ENC_LEN (DESC_GCM_BASE + 16 * CAAM_CMD_SZ)
3638 #define DESC_GCM_DEC_LEN (DESC_GCM_BASE + 12 * CAAM_CMD_SZ)
3639 +#define DESC_QI_GCM_ENC_LEN (DESC_GCM_ENC_LEN + 6 * CAAM_CMD_SZ)
3640 +#define DESC_QI_GCM_DEC_LEN (DESC_GCM_DEC_LEN + 3 * CAAM_CMD_SZ)
3642 #define DESC_RFC4106_BASE (3 * CAAM_CMD_SZ)
3643 #define DESC_RFC4106_ENC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
3644 #define DESC_RFC4106_DEC_LEN (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
3645 +#define DESC_QI_RFC4106_ENC_LEN (DESC_RFC4106_ENC_LEN + 5 * CAAM_CMD_SZ)
3646 +#define DESC_QI_RFC4106_DEC_LEN (DESC_RFC4106_DEC_LEN + 5 * CAAM_CMD_SZ)
3648 #define DESC_RFC4543_BASE (3 * CAAM_CMD_SZ)
3649 #define DESC_RFC4543_ENC_LEN (DESC_RFC4543_BASE + 11 * CAAM_CMD_SZ)
3650 #define DESC_RFC4543_DEC_LEN (DESC_RFC4543_BASE + 12 * CAAM_CMD_SZ)
3651 +#define DESC_QI_RFC4543_ENC_LEN (DESC_RFC4543_ENC_LEN + 4 * CAAM_CMD_SZ)
3652 +#define DESC_QI_RFC4543_DEC_LEN (DESC_RFC4543_DEC_LEN + 4 * CAAM_CMD_SZ)
3654 #define DESC_ABLKCIPHER_BASE (3 * CAAM_CMD_SZ)
3655 #define DESC_ABLKCIPHER_ENC_LEN (DESC_ABLKCIPHER_BASE + \
3659 void cnstr_shdsc_aead_null_encap(u32 * const desc, struct alginfo *adata,
3660 - unsigned int icvsize);
3661 + unsigned int icvsize, int era);
3663 void cnstr_shdsc_aead_null_decap(u32 * const desc, struct alginfo *adata,
3664 - unsigned int icvsize);
3665 + unsigned int icvsize, int era);
3667 void cnstr_shdsc_aead_encap(u32 * const desc, struct alginfo *cdata,
3668 struct alginfo *adata, unsigned int ivsize,
3669 unsigned int icvsize, const bool is_rfc3686,
3670 u32 *nonce, const u32 ctx1_iv_off,
3671 - const bool is_qi);
3672 + const bool is_qi, int era);
3674 void cnstr_shdsc_aead_decap(u32 * const desc, struct alginfo *cdata,
3675 struct alginfo *adata, unsigned int ivsize,
3676 unsigned int icvsize, const bool geniv,
3677 const bool is_rfc3686, u32 *nonce,
3678 - const u32 ctx1_iv_off, const bool is_qi);
3679 + const u32 ctx1_iv_off, const bool is_qi, int era);
3681 void cnstr_shdsc_aead_givencap(u32 * const desc, struct alginfo *cdata,
3682 struct alginfo *adata, unsigned int ivsize,
3683 unsigned int icvsize, const bool is_rfc3686,
3684 u32 *nonce, const u32 ctx1_iv_off,
3685 - const bool is_qi);
3686 + const bool is_qi, int era);
3688 +void cnstr_shdsc_tls_encap(u32 *const desc, struct alginfo *cdata,
3689 + struct alginfo *adata, unsigned int assoclen,
3690 + unsigned int ivsize, unsigned int authsize,
3691 + unsigned int blocksize, int era);
3693 +void cnstr_shdsc_tls_decap(u32 *const desc, struct alginfo *cdata,
3694 + struct alginfo *adata, unsigned int assoclen,
3695 + unsigned int ivsize, unsigned int authsize,
3696 + unsigned int blocksize, int era);
3698 void cnstr_shdsc_gcm_encap(u32 * const desc, struct alginfo *cdata,
3699 - unsigned int icvsize);
3700 + unsigned int ivsize, unsigned int icvsize,
3701 + const bool is_qi);
3703 void cnstr_shdsc_gcm_decap(u32 * const desc, struct alginfo *cdata,
3704 - unsigned int icvsize);
3705 + unsigned int ivsize, unsigned int icvsize,
3706 + const bool is_qi);
3708 void cnstr_shdsc_rfc4106_encap(u32 * const desc, struct alginfo *cdata,
3709 - unsigned int icvsize);
3710 + unsigned int ivsize, unsigned int icvsize,
3711 + const bool is_qi);
3713 void cnstr_shdsc_rfc4106_decap(u32 * const desc, struct alginfo *cdata,
3714 - unsigned int icvsize);
3715 + unsigned int ivsize, unsigned int icvsize,
3716 + const bool is_qi);
3718 void cnstr_shdsc_rfc4543_encap(u32 * const desc, struct alginfo *cdata,
3719 - unsigned int icvsize);
3720 + unsigned int ivsize, unsigned int icvsize,
3721 + const bool is_qi);
3723 void cnstr_shdsc_rfc4543_decap(u32 * const desc, struct alginfo *cdata,
3724 - unsigned int icvsize);
3725 + unsigned int ivsize, unsigned int icvsize,
3726 + const bool is_qi);
3728 +void cnstr_shdsc_chachapoly(u32 * const desc, struct alginfo *cdata,
3729 + struct alginfo *adata, unsigned int ivsize,
3730 + unsigned int icvsize, const bool encap,
3731 + const bool is_qi);
3733 void cnstr_shdsc_ablkcipher_encap(u32 * const desc, struct alginfo *cdata,
3734 unsigned int ivsize, const bool is_rfc3686,
3735 --- a/drivers/crypto/caam/caamalg_qi.c
3736 +++ b/drivers/crypto/caam/caamalg_qi.c
3745 #include "desc_constr.h"
3746 @@ -53,6 +53,7 @@ struct caam_ctx {
3747 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
3748 u8 key[CAAM_MAX_KEY_SIZE];
3750 + enum dma_data_direction dir;
3751 struct alginfo adata;
3752 struct alginfo cdata;
3753 unsigned int authsize;
3754 @@ -74,6 +75,7 @@ static int aead_set_sh_desc(struct crypt
3755 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
3756 OP_ALG_AAI_CTR_MOD128);
3757 const bool is_rfc3686 = alg->caam.rfc3686;
3758 + struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
3760 if (!ctx->cdata.keylen || !ctx->authsize)
3762 @@ -124,7 +126,7 @@ static int aead_set_sh_desc(struct crypt
3764 cnstr_shdsc_aead_encap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
3765 ivsize, ctx->authsize, is_rfc3686, nonce,
3766 - ctx1_iv_off, true);
3767 + ctx1_iv_off, true, ctrlpriv->era);
3770 /* aead_decrypt shared descriptor */
3771 @@ -149,7 +151,8 @@ skip_enc:
3773 cnstr_shdsc_aead_decap(ctx->sh_desc_dec, &ctx->cdata, &ctx->adata,
3774 ivsize, ctx->authsize, alg->caam.geniv,
3775 - is_rfc3686, nonce, ctx1_iv_off, true);
3776 + is_rfc3686, nonce, ctx1_iv_off, true,
3779 if (!alg->caam.geniv)
3781 @@ -176,7 +179,7 @@ skip_enc:
3783 cnstr_shdsc_aead_givencap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
3784 ivsize, ctx->authsize, is_rfc3686, nonce,
3785 - ctx1_iv_off, true);
3786 + ctx1_iv_off, true, ctrlpriv->era);
3790 @@ -197,6 +200,7 @@ static int aead_setkey(struct crypto_aea
3792 struct caam_ctx *ctx = crypto_aead_ctx(aead);
3793 struct device *jrdev = ctx->jrdev;
3794 + struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
3795 struct crypto_authenc_keys keys;
3798 @@ -211,6 +215,27 @@ static int aead_setkey(struct crypto_aea
3799 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
3803 + * If DKP is supported, use it in the shared descriptor to generate
3806 + if (ctrlpriv->era >= 6) {
3807 + ctx->adata.keylen = keys.authkeylen;
3808 + ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
3809 + OP_ALG_ALGSEL_MASK);
3811 + if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
3814 + memcpy(ctx->key, keys.authkey, keys.authkeylen);
3815 + memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
3817 + dma_sync_single_for_device(jrdev->parent, ctx->key_dma,
3818 + ctx->adata.keylen_pad +
3819 + keys.enckeylen, ctx->dir);
3820 + goto skip_split_key;
3823 ret = gen_split_key(jrdev, ctx->key, &ctx->adata, keys.authkey,
3824 keys.authkeylen, CAAM_MAX_KEY_SIZE -
3826 @@ -220,13 +245,14 @@ static int aead_setkey(struct crypto_aea
3827 /* postpend encryption key to auth split key */
3828 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
3829 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
3830 - keys.enckeylen, DMA_TO_DEVICE);
3831 + keys.enckeylen, ctx->dir);
3833 print_hex_dump(KERN_ERR, "ctx.key@" __stringify(__LINE__)": ",
3834 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
3835 ctx->adata.keylen_pad + keys.enckeylen, 1);
3839 ctx->cdata.keylen = keys.enckeylen;
3841 ret = aead_set_sh_desc(aead);
3842 @@ -258,55 +284,139 @@ badkey:
3846 -static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
3847 - const u8 *key, unsigned int keylen)
3848 +static int tls_set_sh_desc(struct crypto_aead *tls)
3850 - struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
3851 - struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
3852 - const char *alg_name = crypto_tfm_alg_name(tfm);
3853 + struct caam_ctx *ctx = crypto_aead_ctx(tls);
3854 + unsigned int ivsize = crypto_aead_ivsize(tls);
3855 + unsigned int blocksize = crypto_aead_blocksize(tls);
3856 + unsigned int assoclen = 13; /* always 13 bytes for TLS */
3857 + unsigned int data_len[2];
3859 + struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
3861 + if (!ctx->cdata.keylen || !ctx->authsize)
3865 + * TLS 1.0 encrypt shared descriptor
3866 + * Job Descriptor and Shared Descriptor
3867 + * must fit into the 64-word Descriptor h/w Buffer
3869 + data_len[0] = ctx->adata.keylen_pad;
3870 + data_len[1] = ctx->cdata.keylen;
3872 + if (desc_inline_query(DESC_TLS10_ENC_LEN, DESC_JOB_IO_LEN, data_len,
3873 + &inl_mask, ARRAY_SIZE(data_len)) < 0)
3877 + ctx->adata.key_virt = ctx->key;
3879 + ctx->adata.key_dma = ctx->key_dma;
3882 + ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
3884 + ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
3886 + ctx->adata.key_inline = !!(inl_mask & 1);
3887 + ctx->cdata.key_inline = !!(inl_mask & 2);
3889 + cnstr_shdsc_tls_encap(ctx->sh_desc_enc, &ctx->cdata, &ctx->adata,
3890 + assoclen, ivsize, ctx->authsize, blocksize,
3894 + * TLS 1.0 decrypt shared descriptor
3895 + * Keys do not fit inline, regardless of algorithms used
3897 + ctx->adata.key_inline = false;
3898 + ctx->adata.key_dma = ctx->key_dma;
3899 + ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
3901 + cnstr_shdsc_tls_decap(ctx->sh_desc_dec, &ctx->cdata, &ctx->adata,
3902 + assoclen, ivsize, ctx->authsize, blocksize,
3908 +static int tls_setauthsize(struct crypto_aead *tls, unsigned int authsize)
3910 + struct caam_ctx *ctx = crypto_aead_ctx(tls);
3912 + ctx->authsize = authsize;
3913 + tls_set_sh_desc(tls);
3918 +static int tls_setkey(struct crypto_aead *tls, const u8 *key,
3919 + unsigned int keylen)
3921 + struct caam_ctx *ctx = crypto_aead_ctx(tls);
3922 struct device *jrdev = ctx->jrdev;
3923 - unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
3924 - u32 ctx1_iv_off = 0;
3925 - const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
3926 - OP_ALG_AAI_CTR_MOD128);
3927 - const bool is_rfc3686 = (ctr_mode && strstr(alg_name, "rfc3686"));
3928 + struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
3929 + struct crypto_authenc_keys keys;
3932 - memcpy(ctx->key, key, keylen);
3933 + if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
3937 + dev_err(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
3938 + keys.authkeylen + keys.enckeylen, keys.enckeylen,
3940 print_hex_dump(KERN_ERR, "key in @" __stringify(__LINE__)": ",
3941 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
3944 - * AES-CTR needs to load IV in CONTEXT1 reg
3945 - * at an offset of 128bits (16bytes)
3946 - * CONTEXT1[255:128] = IV
3952 - * RFC3686 specific:
3953 - * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
3954 - * | *key = {KEY, NONCE}
3955 + * If DKP is supported, use it in the shared descriptor to generate
3959 - ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
3960 - keylen -= CTR_RFC3686_NONCE_SIZE;
3961 + if (ctrlpriv->era >= 6) {
3962 + ctx->adata.keylen = keys.authkeylen;
3963 + ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
3964 + OP_ALG_ALGSEL_MASK);
3966 + if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
3969 + memcpy(ctx->key, keys.authkey, keys.authkeylen);
3970 + memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
3972 + dma_sync_single_for_device(jrdev, ctx->key_dma,
3973 + ctx->adata.keylen_pad +
3974 + keys.enckeylen, ctx->dir);
3975 + goto skip_split_key;