kernel: 5.4: import wireguard backport
[openwrt/openwrt.git] / target / linux / generic / backport-5.4 / 080-wireguard-0016-crypto-x86-poly1305-depend-on-generic-library-not-ge.patch
1 From 0000000000000000000000000000000000000000 Mon Sep 17 00:00:00 2001
2 From: Ard Biesheuvel <ardb@kernel.org>
3 Date: Fri, 8 Nov 2019 13:22:22 +0100
4 Subject: [PATCH] crypto: x86/poly1305 - depend on generic library not generic
5 shash
6
7 commit 1b2c6a5120489d41c8ea3b8dacd0b4586289b158 upstream.
8
9 Remove the dependency on the generic Poly1305 driver. Instead, depend
10 on the generic library so that we only reuse code without pulling in
11 the generic skcipher implementation as well.
12
13 While at it, remove the logic that prefers the non-SIMD path for short
14 inputs - this is no longer necessary after recent FPU handling changes
15 on x86.
16
17 Since this removes the last remaining user of the routines exported
18 by the generic shash driver, unexport them and make them static.
19
20 Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
21 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
22 Signed-off-by: Jason A. Donenfeld <Jason@zx2c4.com>
23 ---
24 arch/x86/crypto/poly1305_glue.c | 66 +++++++++++++++++++++++++-----
25 crypto/Kconfig | 2 +-
26 crypto/poly1305_generic.c | 11 ++---
27 include/crypto/internal/poly1305.h | 9 ----
28 4 files changed, 60 insertions(+), 28 deletions(-)
29
30 --- a/arch/x86/crypto/poly1305_glue.c
31 +++ b/arch/x86/crypto/poly1305_glue.c
32 @@ -34,6 +34,24 @@ static void poly1305_simd_mult(u32 *a, c
33 poly1305_block_sse2(a, m, b, 1);
34 }
35
36 +static unsigned int poly1305_scalar_blocks(struct poly1305_desc_ctx *dctx,
37 + const u8 *src, unsigned int srclen)
38 +{
39 + unsigned int datalen;
40 +
41 + if (unlikely(!dctx->sset)) {
42 + datalen = crypto_poly1305_setdesckey(dctx, src, srclen);
43 + src += srclen - datalen;
44 + srclen = datalen;
45 + }
46 + if (srclen >= POLY1305_BLOCK_SIZE) {
47 + poly1305_core_blocks(&dctx->h, dctx->r, src,
48 + srclen / POLY1305_BLOCK_SIZE, 1);
49 + srclen %= POLY1305_BLOCK_SIZE;
50 + }
51 + return srclen;
52 +}
53 +
54 static unsigned int poly1305_simd_blocks(struct poly1305_desc_ctx *dctx,
55 const u8 *src, unsigned int srclen)
56 {
57 @@ -91,12 +109,6 @@ static int poly1305_simd_update(struct s
58 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
59 unsigned int bytes;
60
61 - /* kernel_fpu_begin/end is costly, use fallback for small updates */
62 - if (srclen <= 288 || !crypto_simd_usable())
63 - return crypto_poly1305_update(desc, src, srclen);
64 -
65 - kernel_fpu_begin();
66 -
67 if (unlikely(dctx->buflen)) {
68 bytes = min(srclen, POLY1305_BLOCK_SIZE - dctx->buflen);
69 memcpy(dctx->buf + dctx->buflen, src, bytes);
70 @@ -105,25 +117,57 @@ static int poly1305_simd_update(struct s
71 dctx->buflen += bytes;
72
73 if (dctx->buflen == POLY1305_BLOCK_SIZE) {
74 - poly1305_simd_blocks(dctx, dctx->buf,
75 - POLY1305_BLOCK_SIZE);
76 + if (likely(crypto_simd_usable())) {
77 + kernel_fpu_begin();
78 + poly1305_simd_blocks(dctx, dctx->buf,
79 + POLY1305_BLOCK_SIZE);
80 + kernel_fpu_end();
81 + } else {
82 + poly1305_scalar_blocks(dctx, dctx->buf,
83 + POLY1305_BLOCK_SIZE);
84 + }
85 dctx->buflen = 0;
86 }
87 }
88
89 if (likely(srclen >= POLY1305_BLOCK_SIZE)) {
90 - bytes = poly1305_simd_blocks(dctx, src, srclen);
91 + if (likely(crypto_simd_usable())) {
92 + kernel_fpu_begin();
93 + bytes = poly1305_simd_blocks(dctx, src, srclen);
94 + kernel_fpu_end();
95 + } else {
96 + bytes = poly1305_scalar_blocks(dctx, src, srclen);
97 + }
98 src += srclen - bytes;
99 srclen = bytes;
100 }
101
102 - kernel_fpu_end();
103 -
104 if (unlikely(srclen)) {
105 dctx->buflen = srclen;
106 memcpy(dctx->buf, src, srclen);
107 }
108 +}
109 +
110 +static int crypto_poly1305_init(struct shash_desc *desc)
111 +{
112 + struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
113 +
114 + poly1305_core_init(&dctx->h);
115 + dctx->buflen = 0;
116 + dctx->rset = 0;
117 + dctx->sset = false;
118 +
119 + return 0;
120 +}
121 +
122 +static int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
123 +{
124 + struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
125 +
126 + if (unlikely(!dctx->sset))
127 + return -ENOKEY;
128
129 + poly1305_final_generic(dctx, dst);
130 return 0;
131 }
132
133 --- a/crypto/Kconfig
134 +++ b/crypto/Kconfig
135 @@ -697,7 +697,7 @@ config CRYPTO_POLY1305
136 config CRYPTO_POLY1305_X86_64
137 tristate "Poly1305 authenticator algorithm (x86_64/SSE2/AVX2)"
138 depends on X86 && 64BIT
139 - select CRYPTO_POLY1305
140 + select CRYPTO_LIB_POLY1305_GENERIC
141 help
142 Poly1305 authenticator algorithm, RFC7539.
143
144 --- a/crypto/poly1305_generic.c
145 +++ b/crypto/poly1305_generic.c
146 @@ -19,7 +19,7 @@
147 #include <linux/module.h>
148 #include <asm/unaligned.h>
149
150 -int crypto_poly1305_init(struct shash_desc *desc)
151 +static int crypto_poly1305_init(struct shash_desc *desc)
152 {
153 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
154
155 @@ -30,7 +30,6 @@ int crypto_poly1305_init(struct shash_de
156
157 return 0;
158 }
159 -EXPORT_SYMBOL_GPL(crypto_poly1305_init);
160
161 static void poly1305_blocks(struct poly1305_desc_ctx *dctx, const u8 *src,
162 unsigned int srclen)
163 @@ -47,8 +46,8 @@ static void poly1305_blocks(struct poly1
164 srclen / POLY1305_BLOCK_SIZE, 1);
165 }
166
167 -int crypto_poly1305_update(struct shash_desc *desc,
168 - const u8 *src, unsigned int srclen)
169 +static int crypto_poly1305_update(struct shash_desc *desc,
170 + const u8 *src, unsigned int srclen)
171 {
172 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
173 unsigned int bytes;
174 @@ -80,9 +79,8 @@ int crypto_poly1305_update(struct shash_
175
176 return 0;
177 }
178 -EXPORT_SYMBOL_GPL(crypto_poly1305_update);
179
180 -int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
181 +static int crypto_poly1305_final(struct shash_desc *desc, u8 *dst)
182 {
183 struct poly1305_desc_ctx *dctx = shash_desc_ctx(desc);
184
185 @@ -92,7 +90,6 @@ int crypto_poly1305_final(struct shash_d
186 poly1305_final_generic(dctx, dst);
187 return 0;
188 }
189 -EXPORT_SYMBOL_GPL(crypto_poly1305_final);
190
191 static struct shash_alg poly1305_alg = {
192 .digestsize = POLY1305_DIGEST_SIZE,
193 --- a/include/crypto/internal/poly1305.h
194 +++ b/include/crypto/internal/poly1305.h
195 @@ -10,8 +10,6 @@
196 #include <linux/types.h>
197 #include <crypto/poly1305.h>
198
199 -struct shash_desc;
200 -
201 /*
202 * Poly1305 core functions. These implement the ε-almost-∆-universal hash
203 * function underlying the Poly1305 MAC, i.e. they don't add an encrypted nonce
204 @@ -28,13 +26,6 @@ void poly1305_core_blocks(struct poly130
205 unsigned int nblocks, u32 hibit);
206 void poly1305_core_emit(const struct poly1305_state *state, void *dst);
207
208 -/* Crypto API helper functions for the Poly1305 MAC */
209 -int crypto_poly1305_init(struct shash_desc *desc);
210 -
211 -int crypto_poly1305_update(struct shash_desc *desc,
212 - const u8 *src, unsigned int srclen);
213 -int crypto_poly1305_final(struct shash_desc *desc, u8 *dst);
214 -
215 /*
216 * Poly1305 requires a unique key for each tag, which implies that we can't set
217 * it on the tfm that gets accessed by multiple users simultaneously. Instead we