3d445f537bf248a904f8a83ad1aed9bfbe2eb50a
[openwrt/staging/chunkeey.git] / target / linux / apm821xx / patches-4.14 / 020-0012-crypto-crypto4xx-pointer-arithmetic-overhaul.patch
1 From 9e0a0b3a192af20193f074ed2ad9dd85a2e48d00 Mon Sep 17 00:00:00 2001
2 From: Christian Lamparter <chunkeey@googlemail.com>
3 Date: Fri, 25 Aug 2017 15:47:25 +0200
4 Subject: [PATCH 12/25] crypto: crypto4xx - pointer arithmetic overhaul
5
6 This patch improves the readability of various functions,
7 by replacing various void* pointers declarations with
8 their respective structs *. This makes it possible to go
9 for the eye-friendly array-indexing methods.
10
11 Signed-off-by: Christian Lamparter <chunkeey@googlemail.com>
12 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
13 ---
14 drivers/crypto/amcc/crypto4xx_alg.c | 26 ++++++++--------
15 drivers/crypto/amcc/crypto4xx_core.c | 60 +++++++++++++++---------------------
16 drivers/crypto/amcc/crypto4xx_core.h | 41 +++++++++++++-----------
17 3 files changed, 59 insertions(+), 68 deletions(-)
18
19 --- a/drivers/crypto/amcc/crypto4xx_alg.c
20 +++ b/drivers/crypto/amcc/crypto4xx_alg.c
21 @@ -134,7 +134,7 @@ static int crypto4xx_setkey_aes(struct c
22 }
23 }
24 /* Setup SA */
25 - sa = (struct dynamic_sa_ctl *) ctx->sa_in;
26 + sa = ctx->sa_in;
27
28 set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_CBC ?
29 SA_SAVE_IV : SA_NOT_SAVE_IV),
30 @@ -160,7 +160,7 @@ static int crypto4xx_setkey_aes(struct c
31 ctx->offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(sa);
32
33 memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
34 - sa = (struct dynamic_sa_ctl *) ctx->sa_out;
35 + sa = ctx->sa_out;
36 sa->sa_command_0.bf.dir = DIR_OUTBOUND;
37
38 return 0;
39 @@ -249,8 +249,7 @@ static int crypto4xx_hash_alg_init(struc
40 struct crypto_alg *alg = tfm->__crt_alg;
41 struct crypto4xx_alg *my_alg = crypto_alg_to_crypto4xx_alg(alg);
42 struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
43 - struct dynamic_sa_ctl *sa;
44 - struct dynamic_sa_hash160 *sa_in;
45 + struct dynamic_sa_hash160 *sa;
46 int rc;
47
48 ctx->dev = my_alg->dev;
49 @@ -274,25 +273,24 @@ static int crypto4xx_hash_alg_init(struc
50
51 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
52 sizeof(struct crypto4xx_ctx));
53 - sa = (struct dynamic_sa_ctl *) ctx->sa_in;
54 - set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
55 + sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
56 + set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
57 SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
58 SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
59 SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
60 SA_OPCODE_HASH, DIR_INBOUND);
61 - set_dynamic_sa_command_1(sa, 0, SA_HASH_MODE_HASH,
62 + set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
63 CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
64 SA_SEQ_MASK_OFF, SA_MC_ENABLE,
65 SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
66 SA_NOT_COPY_HDR);
67 ctx->direction = DIR_INBOUND;
68 - sa->sa_contents.w = SA_HASH160_CONTENTS;
69 - sa_in = (struct dynamic_sa_hash160 *) ctx->sa_in;
70 /* Need to zero hash digest in SA */
71 - memset(sa_in->inner_digest, 0, sizeof(sa_in->inner_digest));
72 - memset(sa_in->outer_digest, 0, sizeof(sa_in->outer_digest));
73 - sa_in->state_ptr = ctx->state_record_dma_addr;
74 - ctx->offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(sa);
75 + memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
76 + memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
77 + sa->state_ptr = ctx->state_record_dma_addr;
78 + ctx->offset_to_sr_ptr =
79 + get_dynamic_sa_offset_state_ptr_field(&sa->ctrl);
80
81 return 0;
82 }
83 @@ -303,7 +301,7 @@ int crypto4xx_hash_init(struct ahash_req
84 int ds;
85 struct dynamic_sa_ctl *sa;
86
87 - sa = (struct dynamic_sa_ctl *) ctx->sa_in;
88 + sa = ctx->sa_in;
89 ds = crypto_ahash_digestsize(
90 __crypto_ahash_cast(req->base.tfm));
91 sa->sa_command_0.bf.digest_len = ds >> 2;
92 --- a/drivers/crypto/amcc/crypto4xx_core.c
93 +++ b/drivers/crypto/amcc/crypto4xx_core.c
94 @@ -211,7 +211,7 @@ static u32 crypto4xx_build_pdr(struct cr
95 }
96 memset(dev->pdr, 0, sizeof(struct ce_pd) * PPC4XX_NUM_PD);
97 dev->shadow_sa_pool = dma_alloc_coherent(dev->core_dev->device,
98 - 256 * PPC4XX_NUM_PD,
99 + sizeof(union shadow_sa_buf) * PPC4XX_NUM_PD,
100 &dev->shadow_sa_pool_pa,
101 GFP_ATOMIC);
102 if (!dev->shadow_sa_pool)
103 @@ -223,16 +223,14 @@ static u32 crypto4xx_build_pdr(struct cr
104 if (!dev->shadow_sr_pool)
105 return -ENOMEM;
106 for (i = 0; i < PPC4XX_NUM_PD; i++) {
107 - pd_uinfo = (struct pd_uinfo *) (dev->pdr_uinfo +
108 - sizeof(struct pd_uinfo) * i);
109 + pd_uinfo = &dev->pdr_uinfo[i];
110
111 /* alloc 256 bytes which is enough for any kind of dynamic sa */
112 - pd_uinfo->sa_va = dev->shadow_sa_pool + 256 * i;
113 + pd_uinfo->sa_va = &dev->shadow_sa_pool[i].sa;
114 pd_uinfo->sa_pa = dev->shadow_sa_pool_pa + 256 * i;
115
116 /* alloc state record */
117 - pd_uinfo->sr_va = dev->shadow_sr_pool +
118 - sizeof(struct sa_state_record) * i;
119 + pd_uinfo->sr_va = &dev->shadow_sr_pool[i];
120 pd_uinfo->sr_pa = dev->shadow_sr_pool_pa +
121 sizeof(struct sa_state_record) * i;
122 }
123 @@ -248,8 +246,9 @@ static void crypto4xx_destroy_pdr(struct
124 dev->pdr, dev->pdr_pa);
125
126 if (dev->shadow_sa_pool)
127 - dma_free_coherent(dev->core_dev->device, 256 * PPC4XX_NUM_PD,
128 - dev->shadow_sa_pool, dev->shadow_sa_pool_pa);
129 + dma_free_coherent(dev->core_dev->device,
130 + sizeof(union shadow_sa_buf) * PPC4XX_NUM_PD,
131 + dev->shadow_sa_pool, dev->shadow_sa_pool_pa);
132
133 if (dev->shadow_sr_pool)
134 dma_free_coherent(dev->core_dev->device,
135 @@ -277,11 +276,9 @@ static u32 crypto4xx_get_pd_from_pdr_nol
136
137 static u32 crypto4xx_put_pd_to_pdr(struct crypto4xx_device *dev, u32 idx)
138 {
139 - struct pd_uinfo *pd_uinfo;
140 + struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx];
141 unsigned long flags;
142
143 - pd_uinfo = (struct pd_uinfo *)(dev->pdr_uinfo +
144 - sizeof(struct pd_uinfo) * idx);
145 spin_lock_irqsave(&dev->core_dev->lock, flags);
146 if (dev->pdr_tail != PPC4XX_LAST_PD)
147 dev->pdr_tail++;
148 @@ -298,7 +295,7 @@ static struct ce_pd *crypto4xx_get_pdp(s
149 {
150 *pd_dma = dev->pdr_pa + sizeof(struct ce_pd) * idx;
151
152 - return dev->pdr + sizeof(struct ce_pd) * idx;
153 + return &dev->pdr[idx];
154 }
155
156 /**
157 @@ -376,7 +373,7 @@ static inline struct ce_gd *crypto4xx_ge
158 {
159 *gd_dma = dev->gdr_pa + sizeof(struct ce_gd) * idx;
160
161 - return (struct ce_gd *) (dev->gdr + sizeof(struct ce_gd) * idx);
162 + return &dev->gdr[idx];
163 }
164
165 /**
166 @@ -387,7 +384,6 @@ static inline struct ce_gd *crypto4xx_ge
167 static u32 crypto4xx_build_sdr(struct crypto4xx_device *dev)
168 {
169 int i;
170 - struct ce_sd *sd_array;
171
172 /* alloc memory for scatter descriptor ring */
173 dev->sdr = dma_alloc_coherent(dev->core_dev->device,
174 @@ -407,10 +403,8 @@ static u32 crypto4xx_build_sdr(struct cr
175 return -ENOMEM;
176 }
177
178 - sd_array = dev->sdr;
179 -
180 for (i = 0; i < PPC4XX_NUM_SD; i++) {
181 - sd_array[i].ptr = dev->scatter_buffer_pa +
182 + dev->sdr[i].ptr = dev->scatter_buffer_pa +
183 PPC4XX_SD_BUFFER_SIZE * i;
184 }
185
186 @@ -480,7 +474,7 @@ static inline struct ce_sd *crypto4xx_ge
187 {
188 *sd_dma = dev->sdr_pa + sizeof(struct ce_sd) * idx;
189
190 - return (struct ce_sd *)(dev->sdr + sizeof(struct ce_sd) * idx);
191 + return &dev->sdr[idx];
192 }
193
194 static void crypto4xx_copy_pkt_to_dst(struct crypto4xx_device *dev,
195 @@ -529,11 +523,10 @@ static u32 crypto4xx_copy_digest_to_dst(
196 struct crypto4xx_ctx *ctx)
197 {
198 struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *) ctx->sa_in;
199 - struct sa_state_record *state_record =
200 - (struct sa_state_record *) pd_uinfo->sr_va;
201
202 if (sa->sa_command_0.bf.hash_alg == SA_HASH_ALG_SHA1) {
203 - memcpy((void *) pd_uinfo->dest_va, state_record->save_digest,
204 + memcpy((void *) pd_uinfo->dest_va,
205 + pd_uinfo->sr_va->save_digest,
206 SA_HASH_ALG_SHA1_DIGEST_SIZE);
207 }
208
209 @@ -616,11 +609,9 @@ static u32 crypto4xx_ahash_done(struct c
210
211 static u32 crypto4xx_pd_done(struct crypto4xx_device *dev, u32 idx)
212 {
213 - struct ce_pd *pd;
214 - struct pd_uinfo *pd_uinfo;
215 + struct ce_pd *pd = &dev->pdr[idx];
216 + struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx];
217
218 - pd = dev->pdr + sizeof(struct ce_pd)*idx;
219 - pd_uinfo = dev->pdr_uinfo + sizeof(struct pd_uinfo)*idx;
220 if (crypto_tfm_alg_type(pd_uinfo->async_req->tfm) ==
221 CRYPTO_ALG_TYPE_ABLKCIPHER)
222 return crypto4xx_ablkcipher_done(dev, pd_uinfo, pd);
223 @@ -721,7 +712,6 @@ u32 crypto4xx_build_pd(struct crypto_asy
224 unsigned long flags;
225 struct pd_uinfo *pd_uinfo = NULL;
226 unsigned int nbytes = datalen, idx;
227 - unsigned int ivlen = 0;
228 u32 gd_idx = 0;
229
230 /* figure how many gd is needed */
231 @@ -780,17 +770,15 @@ u32 crypto4xx_build_pd(struct crypto_asy
232 }
233 spin_unlock_irqrestore(&dev->core_dev->lock, flags);
234
235 - pd_uinfo = (struct pd_uinfo *)(dev->pdr_uinfo +
236 - sizeof(struct pd_uinfo) * pd_entry);
237 + pd_uinfo = &dev->pdr_uinfo[pd_entry];
238 pd = crypto4xx_get_pdp(dev, &pd_dma, pd_entry);
239 pd_uinfo->async_req = req;
240 pd_uinfo->num_gd = num_gd;
241 pd_uinfo->num_sd = num_sd;
242
243 if (iv_len || ctx->is_hash) {
244 - ivlen = iv_len;
245 pd->sa = pd_uinfo->sa_pa;
246 - sa = (struct dynamic_sa_ctl *) pd_uinfo->sa_va;
247 + sa = pd_uinfo->sa_va;
248 if (ctx->direction == DIR_INBOUND)
249 memcpy(sa, ctx->sa_in, ctx->sa_len * 4);
250 else
251 @@ -800,14 +788,15 @@ u32 crypto4xx_build_pd(struct crypto_asy
252 &pd_uinfo->sr_pa, 4);
253
254 if (iv_len)
255 - crypto4xx_memcpy_le(pd_uinfo->sr_va, iv, iv_len);
256 + crypto4xx_memcpy_le(pd_uinfo->sr_va->save_iv,
257 + iv, iv_len);
258 } else {
259 if (ctx->direction == DIR_INBOUND) {
260 pd->sa = ctx->sa_in_dma_addr;
261 - sa = (struct dynamic_sa_ctl *) ctx->sa_in;
262 + sa = ctx->sa_in;
263 } else {
264 pd->sa = ctx->sa_out_dma_addr;
265 - sa = (struct dynamic_sa_ctl *) ctx->sa_out;
266 + sa = ctx->sa_out;
267 }
268 }
269 pd->sa_len = ctx->sa_len;
270 @@ -1015,9 +1004,8 @@ static void crypto4xx_bh_tasklet_cb(unsi
271
272 while (core_dev->dev->pdr_head != core_dev->dev->pdr_tail) {
273 tail = core_dev->dev->pdr_tail;
274 - pd_uinfo = core_dev->dev->pdr_uinfo +
275 - sizeof(struct pd_uinfo)*tail;
276 - pd = core_dev->dev->pdr + sizeof(struct ce_pd) * tail;
277 + pd_uinfo = &core_dev->dev->pdr_uinfo[tail];
278 + pd = &core_dev->dev->pdr[tail];
279 if ((pd_uinfo->state == PD_ENTRY_INUSE) &&
280 pd->pd_ctl.bf.pe_done &&
281 !pd->pd_ctl.bf.host_ready) {
282 --- a/drivers/crypto/amcc/crypto4xx_core.h
283 +++ b/drivers/crypto/amcc/crypto4xx_core.h
284 @@ -23,6 +23,8 @@
285 #define __CRYPTO4XX_CORE_H__
286
287 #include <crypto/internal/hash.h>
288 +#include "crypto4xx_reg_def.h"
289 +#include "crypto4xx_sa.h"
290
291 #define MODULE_NAME "crypto4xx"
292
293 @@ -48,6 +50,13 @@
294
295 struct crypto4xx_device;
296
297 +union shadow_sa_buf {
298 + struct dynamic_sa_ctl sa;
299 +
300 + /* alloc 256 bytes which is enough for any kind of dynamic sa */
301 + u8 buf[256];
302 +} __packed;
303 +
304 struct pd_uinfo {
305 struct crypto4xx_device *dev;
306 u32 state;
307 @@ -60,9 +69,9 @@ struct pd_uinfo {
308 used by this packet */
309 u32 num_sd; /* number of scatter discriptors
310 used by this packet */
311 - void *sa_va; /* shadow sa, when using cp from ctx->sa */
312 + struct dynamic_sa_ctl *sa_va; /* shadow sa */
313 u32 sa_pa;
314 - void *sr_va; /* state record for shadow sa */
315 + struct sa_state_record *sr_va; /* state record for shadow sa */
316 u32 sr_pa;
317 struct scatterlist *dest_va;
318 struct crypto_async_request *async_req; /* base crypto request
319 @@ -75,22 +84,18 @@ struct crypto4xx_device {
320 void __iomem *ce_base;
321 void __iomem *trng_base;
322
323 - void *pdr; /* base address of packet
324 - descriptor ring */
325 - dma_addr_t pdr_pa; /* physical address used to
326 - program ce pdr_base_register */
327 - void *gdr; /* gather descriptor ring */
328 - dma_addr_t gdr_pa; /* physical address used to
329 - program ce gdr_base_register */
330 - void *sdr; /* scatter descriptor ring */
331 - dma_addr_t sdr_pa; /* physical address used to
332 - program ce sdr_base_register */
333 + struct ce_pd *pdr; /* base address of packet descriptor ring */
334 + dma_addr_t pdr_pa; /* physical address of pdr_base_register */
335 + struct ce_gd *gdr; /* gather descriptor ring */
336 + dma_addr_t gdr_pa; /* physical address of gdr_base_register */
337 + struct ce_sd *sdr; /* scatter descriptor ring */
338 + dma_addr_t sdr_pa; /* physical address of sdr_base_register */
339 void *scatter_buffer_va;
340 dma_addr_t scatter_buffer_pa;
341
342 - void *shadow_sa_pool; /* pool of memory for sa in pd_uinfo */
343 + union shadow_sa_buf *shadow_sa_pool;
344 dma_addr_t shadow_sa_pool_pa;
345 - void *shadow_sr_pool; /* pool of memory for sr in pd_uinfo */
346 + struct sa_state_record *shadow_sr_pool;
347 dma_addr_t shadow_sr_pool_pa;
348 u32 pdr_tail;
349 u32 pdr_head;
350 @@ -98,7 +103,7 @@ struct crypto4xx_device {
351 u32 gdr_head;
352 u32 sdr_tail;
353 u32 sdr_head;
354 - void *pdr_uinfo;
355 + struct pd_uinfo *pdr_uinfo;
356 struct list_head alg_list; /* List of algorithm supported
357 by this device */
358 };
359 @@ -116,11 +121,11 @@ struct crypto4xx_core_device {
360
361 struct crypto4xx_ctx {
362 struct crypto4xx_device *dev;
363 - void *sa_in;
364 + struct dynamic_sa_ctl *sa_in;
365 dma_addr_t sa_in_dma_addr;
366 - void *sa_out;
367 + struct dynamic_sa_ctl *sa_out;
368 dma_addr_t sa_out_dma_addr;
369 - void *state_record;
370 + struct sa_state_record *state_record;
371 dma_addr_t state_record_dma_addr;
372 u32 sa_len;
373 u32 offset_to_sr_ptr; /* offset to state ptr, in dynamic sa */