apm821xx: backport crypto4xx patches from 4.15
[oweals/openwrt.git] / target / linux / apm821xx / patches-4.14 / 020-0019-crypto-crypto4xx-overhaul-crypto4xx_build_pd.patch
1 From cd4dcd6da7a2610e0562a6e130bb68cc544a8fb1 Mon Sep 17 00:00:00 2001
2 From: Christian Lamparter <chunkeey@gmail.com>
3 Date: Wed, 4 Oct 2017 01:00:11 +0200
4 Subject: [PATCH 19/25] crypto: crypto4xx - overhaul crypto4xx_build_pd()
5
6 This patch overhauls and fixes code related to crypto4xx_build_pd()
7
8  * crypto4xx_build_pd() did not handle chained source scatterlist.
9    This is fixed by replacing the buggy indexed-access of &src[idx]
10    with sg_next() in the gather array setup loop.
11
12  * The redundant is_hash, direction, save_iv and pd_ctl members
13    in the crypto4xx_ctx struct have been removed.
14     - is_hash can be derived from the crypto_async_request parameter.
15     - direction is already part of the security association's
16       bf.dir bitfield.
17     - save_iv is unused.
18     - pd_ctl always had the host_ready bit enabled anyway.
19       (the hash_final case is rather pointless, since the ahash
20        code has been deactivated).
21
22  * make crypto4xx_build_pd()'s caller responsible for converting
23    the IV to the LE32 format.
24
25  * change crypto4xx_ahash_update() and crypto4xx_ahash_digest() to
26    initialize a temporary destination scatterlist. This allows the
27    removal of an ugly cast of req->result (which is a pointer to an
28    u8-array) to a scatterlist pointer.
29
30  * change crypto4xx_build_pd() return type to int. After all
31    it returns -EINPROGRESS/-EBUSY.
32
33  * fix crypto4xx_build_pd() thread-unsafe sa handling.
34
35 Signed-off-by: Christian Lamparter <chunkeey@gmail.com>
36 Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
37 ---
38  drivers/crypto/amcc/crypto4xx_alg.c  |  87 +++++++++++-------------
39  drivers/crypto/amcc/crypto4xx_core.c | 128 ++++++++++++++++-------------------
40  drivers/crypto/amcc/crypto4xx_core.h |  12 ++--
41  3 files changed, 103 insertions(+), 124 deletions(-)
42
43 --- a/drivers/crypto/amcc/crypto4xx_alg.c
44 +++ b/drivers/crypto/amcc/crypto4xx_alg.c
45 @@ -75,27 +75,29 @@ static void set_dynamic_sa_command_1(str
46  int crypto4xx_encrypt(struct ablkcipher_request *req)
47  {
48         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
49 +       unsigned int ivlen = crypto_ablkcipher_ivsize(
50 +               crypto_ablkcipher_reqtfm(req));
51 +       __le32 iv[ivlen];
52  
53 -       ctx->direction = DIR_OUTBOUND;
54 -       ctx->is_hash = 0;
55 -       ctx->pd_ctl = 0x1;
56 +       if (ivlen)
57 +               crypto4xx_memcpy_to_le32(iv, req->info, ivlen);
58  
59         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
60 -               req->nbytes, req->info,
61 -               crypto_ablkcipher_ivsize(crypto_ablkcipher_reqtfm(req)));
62 +               req->nbytes, iv, ivlen, ctx->sa_out, ctx->sa_len);
63  }
64  
65  int crypto4xx_decrypt(struct ablkcipher_request *req)
66  {
67         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
68 +       unsigned int ivlen = crypto_ablkcipher_ivsize(
69 +               crypto_ablkcipher_reqtfm(req));
70 +       __le32 iv[ivlen];
71  
72 -       ctx->direction = DIR_INBOUND;
73 -       ctx->is_hash = 0;
74 -       ctx->pd_ctl = 1;
75 +       if (ivlen)
76 +               crypto4xx_memcpy_to_le32(iv, req->info, ivlen);
77  
78         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
79 -               req->nbytes, req->info,
80 -               crypto_ablkcipher_ivsize(crypto_ablkcipher_reqtfm(req)));
81 +               req->nbytes, iv, ivlen, ctx->sa_in, ctx->sa_len);
82  }
83  
84  /**
85 @@ -153,11 +155,6 @@ static int crypto4xx_setkey_aes(struct c
86                                  key, keylen);
87         sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
88         sa->sa_command_1.bf.key_len = keylen >> 3;
89 -       ctx->is_hash = 0;
90 -       ctx->direction = DIR_INBOUND;
91 -       memcpy(sa + get_dynamic_sa_offset_state_ptr_field(sa),
92 -              (void *)&ctx->state_record_dma_addr, 4);
93 -       ctx->offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(sa);
94  
95         memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
96         sa = ctx->sa_out;
97 @@ -206,7 +203,7 @@ int crypto4xx_setkey_rfc3686(struct cryp
98         if (rc)
99                 return rc;
100  
101 -       memcpy(ctx->state_record,
102 +       crypto4xx_memcpy_to_le32(ctx->state_record->save_iv,
103                 key + keylen - CTR_RFC3686_NONCE_SIZE, CTR_RFC3686_NONCE_SIZE);
104  
105         return 0;
106 @@ -215,27 +212,29 @@ int crypto4xx_setkey_rfc3686(struct cryp
107  int crypto4xx_rfc3686_encrypt(struct ablkcipher_request *req)
108  {
109         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
110 -       __be32 iv[AES_IV_SIZE / 4] = { *(u32 *)ctx->state_record,
111 -               *(u32 *) req->info, *(u32 *) (req->info + 4), cpu_to_be32(1) };
112 -
113 -       ctx->direction = DIR_OUTBOUND;
114 -       ctx->pd_ctl = 1;
115 +       __le32 iv[AES_IV_SIZE / 4] = {
116 +               ctx->state_record->save_iv[0],
117 +               cpu_to_le32p((u32 *) req->info),
118 +               cpu_to_le32p((u32 *) (req->info + 4)),
119 +               cpu_to_le32(1) };
120  
121         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
122 -                                 req->nbytes, iv, AES_IV_SIZE);
123 +                                 req->nbytes, iv, AES_IV_SIZE,
124 +                                 ctx->sa_out, ctx->sa_len);
125  }
126  
127  int crypto4xx_rfc3686_decrypt(struct ablkcipher_request *req)
128  {
129         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
130 -       __be32 iv[AES_IV_SIZE / 4] = { *(u32 *)ctx->state_record,
131 -               *(u32 *) req->info, *(u32 *) (req->info + 4), cpu_to_be32(1) };
132 -
133 -       ctx->direction = DIR_INBOUND;
134 -       ctx->pd_ctl = 1;
135 +       __le32 iv[AES_IV_SIZE / 4] = {
136 +               ctx->state_record->save_iv[0],
137 +               cpu_to_le32p((u32 *) req->info),
138 +               cpu_to_le32p((u32 *) (req->info + 4)),
139 +               cpu_to_le32(1) };
140  
141         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
142 -                                 req->nbytes, iv, AES_IV_SIZE);
143 +                                 req->nbytes, iv, AES_IV_SIZE,
144 +                                 ctx->sa_out, ctx->sa_len);
145  }
146  
147  /**
148 @@ -253,7 +252,6 @@ static int crypto4xx_hash_alg_init(struc
149         int rc;
150  
151         ctx->dev   = my_alg->dev;
152 -       ctx->is_hash = 1;
153  
154         /* Create SA */
155         if (ctx->sa_in_dma_addr || ctx->sa_out_dma_addr)
156 @@ -284,13 +282,9 @@ static int crypto4xx_hash_alg_init(struc
157                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
158                                  SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
159                                  SA_NOT_COPY_HDR);
160 -       ctx->direction = DIR_INBOUND;
161         /* Need to zero hash digest in SA */
162         memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
163         memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
164 -       sa->state_ptr = ctx->state_record_dma_addr;
165 -       ctx->offset_to_sr_ptr =
166 -               get_dynamic_sa_offset_state_ptr_field(&sa->ctrl);
167  
168         return 0;
169  }
170 @@ -306,23 +300,22 @@ int crypto4xx_hash_init(struct ahash_req
171                         __crypto_ahash_cast(req->base.tfm));
172         sa->sa_command_0.bf.digest_len = ds >> 2;
173         sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
174 -       ctx->is_hash = 1;
175 -       ctx->direction = DIR_INBOUND;
176  
177         return 0;
178  }
179  
180  int crypto4xx_hash_update(struct ahash_request *req)
181  {
182 +       struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
183         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
184 +       struct scatterlist dst;
185 +       unsigned int ds = crypto_ahash_digestsize(ahash);
186 +
187 +       sg_init_one(&dst, req->result, ds);
188  
189 -       ctx->is_hash = 1;
190 -       ctx->pd_ctl = 0x11;
191 -       ctx->direction = DIR_INBOUND;
192 -
193 -       return crypto4xx_build_pd(&req->base, ctx, req->src,
194 -                                 (struct scatterlist *) req->result,
195 -                                 req->nbytes, NULL, 0);
196 +       return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
197 +                                 req->nbytes, NULL, 0, ctx->sa_in,
198 +                                 ctx->sa_len);
199  }
200  
201  int crypto4xx_hash_final(struct ahash_request *req)
202 @@ -332,14 +325,16 @@ int crypto4xx_hash_final(struct ahash_re
203  
204  int crypto4xx_hash_digest(struct ahash_request *req)
205  {
206 +       struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
207         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
208 +       struct scatterlist dst;
209 +       unsigned int ds = crypto_ahash_digestsize(ahash);
210  
211 -       ctx->pd_ctl = 0x11;
212 -       ctx->direction = DIR_INBOUND;
213 +       sg_init_one(&dst, req->result, ds);
214  
215 -       return crypto4xx_build_pd(&req->base, ctx, req->src,
216 -                                 (struct scatterlist *) req->result,
217 -                                 req->nbytes, NULL, 0);
218 +       return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
219 +                                 req->nbytes, NULL, 0, ctx->sa_in,
220 +                                 ctx->sa_len);
221  }
222  
223  /**
224 --- a/drivers/crypto/amcc/crypto4xx_core.c
225 +++ b/drivers/crypto/amcc/crypto4xx_core.c
226 @@ -194,7 +194,6 @@ void crypto4xx_free_state_record(struct
227  static u32 crypto4xx_build_pdr(struct crypto4xx_device *dev)
228  {
229         int i;
230 -       struct pd_uinfo *pd_uinfo;
231         dev->pdr = dma_alloc_coherent(dev->core_dev->device,
232                                       sizeof(struct ce_pd) * PPC4XX_NUM_PD,
233                                       &dev->pdr_pa, GFP_ATOMIC);
234 @@ -224,11 +223,14 @@ static u32 crypto4xx_build_pdr(struct cr
235         if (!dev->shadow_sr_pool)
236                 return -ENOMEM;
237         for (i = 0; i < PPC4XX_NUM_PD; i++) {
238 -               pd_uinfo = &dev->pdr_uinfo[i];
239 +               struct ce_pd *pd = &dev->pdr[i];
240 +               struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[i];
241 +
242 +               pd->sa = dev->shadow_sa_pool_pa +
243 +                       sizeof(union shadow_sa_buf) * i;
244  
245                 /* alloc 256 bytes which is enough for any kind of dynamic sa */
246                 pd_uinfo->sa_va = &dev->shadow_sa_pool[i].sa;
247 -               pd_uinfo->sa_pa = dev->shadow_sa_pool_pa + 256 * i;
248  
249                 /* alloc state record */
250                 pd_uinfo->sr_va = &dev->shadow_sr_pool[i];
251 @@ -291,14 +293,6 @@ static u32 crypto4xx_put_pd_to_pdr(struc
252         return 0;
253  }
254  
255 -static struct ce_pd *crypto4xx_get_pdp(struct crypto4xx_device *dev,
256 -                                      dma_addr_t *pd_dma, u32 idx)
257 -{
258 -       *pd_dma = dev->pdr_pa + sizeof(struct ce_pd) * idx;
259 -
260 -       return &dev->pdr[idx];
261 -}
262 -
263  /**
264   * alloc memory for the gather ring
265   * no need to alloc buf for the ring
266 @@ -520,18 +514,16 @@ static void crypto4xx_copy_pkt_to_dst(st
267         }
268  }
269  
270 -static u32 crypto4xx_copy_digest_to_dst(struct pd_uinfo *pd_uinfo,
271 +static void crypto4xx_copy_digest_to_dst(void *dst,
272 +                                       struct pd_uinfo *pd_uinfo,
273                                         struct crypto4xx_ctx *ctx)
274  {
275         struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *) ctx->sa_in;
276  
277         if (sa->sa_command_0.bf.hash_alg == SA_HASH_ALG_SHA1) {
278 -               memcpy((void *) pd_uinfo->dest_va,
279 -                      pd_uinfo->sr_va->save_digest,
280 +               memcpy(dst, pd_uinfo->sr_va->save_digest,
281                        SA_HASH_ALG_SHA1_DIGEST_SIZE);
282         }
283 -
284 -       return 0;
285  }
286  
287  static void crypto4xx_ret_sg_desc(struct crypto4xx_device *dev,
288 @@ -591,7 +583,7 @@ static u32 crypto4xx_ahash_done(struct c
289         ahash_req = ahash_request_cast(pd_uinfo->async_req);
290         ctx  = crypto_tfm_ctx(ahash_req->base.tfm);
291  
292 -       crypto4xx_copy_digest_to_dst(pd_uinfo,
293 +       crypto4xx_copy_digest_to_dst(ahash_req->result, pd_uinfo,
294                                      crypto_tfm_ctx(ahash_req->base.tfm));
295         crypto4xx_ret_sg_desc(dev, pd_uinfo);
296  
297 @@ -651,17 +643,17 @@ static u32 get_next_sd(u32 current)
298                 return 0;
299  }
300  
301 -u32 crypto4xx_build_pd(struct crypto_async_request *req,
302 +int crypto4xx_build_pd(struct crypto_async_request *req,
303                        struct crypto4xx_ctx *ctx,
304                        struct scatterlist *src,
305                        struct scatterlist *dst,
306 -                      unsigned int datalen,
307 -                      void *iv, u32 iv_len)
308 +                      const unsigned int datalen,
309 +                      const __le32 *iv, const u32 iv_len,
310 +                      const struct dynamic_sa_ctl *req_sa,
311 +                      const unsigned int sa_len)
312  {
313         struct crypto4xx_device *dev = ctx->dev;
314 -       dma_addr_t addr, pd_dma, sd_dma, gd_dma;
315         struct dynamic_sa_ctl *sa;
316 -       struct scatterlist *sg;
317         struct ce_gd *gd;
318         struct ce_pd *pd;
319         u32 num_gd, num_sd;
320 @@ -669,8 +661,9 @@ u32 crypto4xx_build_pd(struct crypto_asy
321         u32 fst_sd = 0xffffffff;
322         u32 pd_entry;
323         unsigned long flags;
324 -       struct pd_uinfo *pd_uinfo = NULL;
325 -       unsigned int nbytes = datalen, idx;
326 +       struct pd_uinfo *pd_uinfo;
327 +       unsigned int nbytes = datalen;
328 +       size_t offset_to_sr_ptr;
329         u32 gd_idx = 0;
330         bool is_busy;
331  
332 @@ -684,7 +677,7 @@ u32 crypto4xx_build_pd(struct crypto_asy
333                 num_gd = 0;
334  
335         /* figure how many sd is needed */
336 -       if (sg_is_last(dst) || ctx->is_hash) {
337 +       if (sg_is_last(dst)) {
338                 num_sd = 0;
339         } else {
340                 if (datalen > PPC4XX_SD_BUFFER_SIZE) {
341 @@ -755,37 +748,27 @@ u32 crypto4xx_build_pd(struct crypto_asy
342         }
343         spin_unlock_irqrestore(&dev->core_dev->lock, flags);
344  
345 +       pd = &dev->pdr[pd_entry];
346 +       pd->sa_len = sa_len;
347 +
348         pd_uinfo = &dev->pdr_uinfo[pd_entry];
349 -       pd = crypto4xx_get_pdp(dev, &pd_dma, pd_entry);
350         pd_uinfo->async_req = req;
351         pd_uinfo->num_gd = num_gd;
352         pd_uinfo->num_sd = num_sd;
353  
354 -       if (iv_len || ctx->is_hash) {
355 -               pd->sa = pd_uinfo->sa_pa;
356 -               sa = pd_uinfo->sa_va;
357 -               if (ctx->direction == DIR_INBOUND)
358 -                       memcpy(sa, ctx->sa_in, ctx->sa_len * 4);
359 -               else
360 -                       memcpy(sa, ctx->sa_out, ctx->sa_len * 4);
361 +       if (iv_len)
362 +               memcpy(pd_uinfo->sr_va->save_iv, iv, iv_len);
363  
364 -               memcpy((void *) sa + ctx->offset_to_sr_ptr,
365 -                       &pd_uinfo->sr_pa, 4);
366 +       sa = pd_uinfo->sa_va;
367 +       memcpy(sa, req_sa, sa_len * 4);
368 +
369 +       offset_to_sr_ptr = get_dynamic_sa_offset_state_ptr_field(sa);
370 +       *(u32 *)((unsigned long)sa + offset_to_sr_ptr) = pd_uinfo->sr_pa;
371  
372 -               if (iv_len)
373 -                       crypto4xx_memcpy_to_le32(pd_uinfo->sr_va->save_iv,
374 -                                                iv, iv_len);
375 -       } else {
376 -               if (ctx->direction == DIR_INBOUND) {
377 -                       pd->sa = ctx->sa_in_dma_addr;
378 -                       sa = ctx->sa_in;
379 -               } else {
380 -                       pd->sa = ctx->sa_out_dma_addr;
381 -                       sa = ctx->sa_out;
382 -               }
383 -       }
384 -       pd->sa_len = ctx->sa_len;
385         if (num_gd) {
386 +               dma_addr_t gd_dma;
387 +               struct scatterlist *sg;
388 +
389                 /* get first gd we are going to use */
390                 gd_idx = fst_gd;
391                 pd_uinfo->first_gd = fst_gd;
392 @@ -794,27 +777,30 @@ u32 crypto4xx_build_pd(struct crypto_asy
393                 pd->src = gd_dma;
394                 /* enable gather */
395                 sa->sa_command_0.bf.gather = 1;
396 -               idx = 0;
397 -               src = &src[0];
398                 /* walk the sg, and setup gather array */
399 +
400 +               sg = src;
401                 while (nbytes) {
402 -                       sg = &src[idx];
403 -                       addr = dma_map_page(dev->core_dev->device, sg_page(sg),
404 -                                   sg->offset, sg->length, DMA_TO_DEVICE);
405 -                       gd->ptr = addr;
406 -                       gd->ctl_len.len = sg->length;
407 +                       size_t len;
408 +
409 +                       len = min(sg->length, nbytes);
410 +                       gd->ptr = dma_map_page(dev->core_dev->device,
411 +                               sg_page(sg), sg->offset, len, DMA_TO_DEVICE);
412 +                       gd->ctl_len.len = len;
413                         gd->ctl_len.done = 0;
414                         gd->ctl_len.ready = 1;
415 -                       if (sg->length >= nbytes)
416 +                       if (len >= nbytes)
417                                 break;
418 +
419                         nbytes -= sg->length;
420                         gd_idx = get_next_gd(gd_idx);
421                         gd = crypto4xx_get_gdp(dev, &gd_dma, gd_idx);
422 -                       idx++;
423 +                       sg = sg_next(sg);
424                 }
425         } else {
426                 pd->src = (u32)dma_map_page(dev->core_dev->device, sg_page(src),
427 -                               src->offset, src->length, DMA_TO_DEVICE);
428 +                               src->offset, min(nbytes, src->length),
429 +                               DMA_TO_DEVICE);
430                 /*
431                  * Disable gather in sa command
432                  */
433 @@ -825,25 +811,24 @@ u32 crypto4xx_build_pd(struct crypto_asy
434                 pd_uinfo->first_gd = 0xffffffff;
435                 pd_uinfo->num_gd = 0;
436         }
437 -       if (ctx->is_hash || sg_is_last(dst)) {
438 +       if (sg_is_last(dst)) {
439                 /*
440                  * we know application give us dst a whole piece of memory
441                  * no need to use scatter ring.
442 -                * In case of is_hash, the icv is always at end of src data.
443                  */
444                 pd_uinfo->using_sd = 0;
445                 pd_uinfo->first_sd = 0xffffffff;
446                 pd_uinfo->num_sd = 0;
447                 pd_uinfo->dest_va = dst;
448                 sa->sa_command_0.bf.scatter = 0;
449 -               if (ctx->is_hash)
450 -                       pd->dest = virt_to_phys((void *)dst);
451 -               else
452 -                       pd->dest = (u32)dma_map_page(dev->core_dev->device,
453 -                                       sg_page(dst), dst->offset,
454 -                                       dst->length, DMA_TO_DEVICE);
455 +               pd->dest = (u32)dma_map_page(dev->core_dev->device,
456 +                                            sg_page(dst), dst->offset,
457 +                                            min(datalen, dst->length),
458 +                                            DMA_TO_DEVICE);
459         } else {
460 +               dma_addr_t sd_dma;
461                 struct ce_sd *sd = NULL;
462 +
463                 u32 sd_idx = fst_sd;
464                 nbytes = datalen;
465                 sa->sa_command_0.bf.scatter = 1;
466 @@ -857,7 +842,6 @@ u32 crypto4xx_build_pd(struct crypto_asy
467                 sd->ctl.done = 0;
468                 sd->ctl.rdy = 1;
469                 /* sd->ptr should be setup by sd_init routine*/
470 -               idx = 0;
471                 if (nbytes >= PPC4XX_SD_BUFFER_SIZE)
472                         nbytes -= PPC4XX_SD_BUFFER_SIZE;
473                 else
474 @@ -868,19 +852,23 @@ u32 crypto4xx_build_pd(struct crypto_asy
475                         /* setup scatter descriptor */
476                         sd->ctl.done = 0;
477                         sd->ctl.rdy = 1;
478 -                       if (nbytes >= PPC4XX_SD_BUFFER_SIZE)
479 +                       if (nbytes >= PPC4XX_SD_BUFFER_SIZE) {
480                                 nbytes -= PPC4XX_SD_BUFFER_SIZE;
481 -                       else
482 +                       } else {
483                                 /*
484                                  * SD entry can hold PPC4XX_SD_BUFFER_SIZE,
485                                  * which is more than nbytes, so done.
486                                  */
487                                 nbytes = 0;
488 +                       }
489                 }
490         }
491  
492         sa->sa_command_1.bf.hash_crypto_offset = 0;
493 -       pd->pd_ctl.w = ctx->pd_ctl;
494 +       pd->pd_ctl.w = 0;
495 +       pd->pd_ctl.bf.hash_final =
496 +               (crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AHASH);
497 +       pd->pd_ctl.bf.host_ready = 1;
498         pd->pd_ctl_len.w = 0x00400000 | datalen;
499         pd_uinfo->state = PD_ENTRY_INUSE | (is_busy ? PD_ENTRY_BUSY : 0);
500  
501 --- a/drivers/crypto/amcc/crypto4xx_core.h
502 +++ b/drivers/crypto/amcc/crypto4xx_core.h
503 @@ -71,7 +71,6 @@ struct pd_uinfo {
504         u32 num_sd;             /* number of scatter discriptors
505                                 used by this packet */
506         struct dynamic_sa_ctl *sa_va;   /* shadow sa */
507 -       u32 sa_pa;
508         struct sa_state_record *sr_va;  /* state record for shadow sa */
509         u32 sr_pa;
510         struct scatterlist *dest_va;
511 @@ -129,11 +128,6 @@ struct crypto4xx_ctx {
512         struct sa_state_record *state_record;
513         dma_addr_t state_record_dma_addr;
514         u32 sa_len;
515 -       u32 offset_to_sr_ptr;           /* offset to state ptr, in dynamic sa */
516 -       u32 direction;
517 -       u32 save_iv;
518 -       u32 pd_ctl;
519 -       u32 is_hash;
520  };
521  
522  struct crypto4xx_alg_common {
523 @@ -170,8 +164,10 @@ int crypto4xx_build_pd(struct crypto_asy
524                        struct crypto4xx_ctx *ctx,
525                        struct scatterlist *src,
526                        struct scatterlist *dst,
527 -                      unsigned int datalen,
528 -                      void *iv, u32 iv_len);
529 +                      const unsigned int datalen,
530 +                      const __le32 *iv, const u32 iv_len,
531 +                      const struct dynamic_sa_ctl *sa,
532 +                      const unsigned int sa_len);
533  int crypto4xx_setkey_aes_cbc(struct crypto_ablkcipher *cipher,
534                              const u8 *key, unsigned int keylen);
535  int crypto4xx_setkey_aes_cfb(struct crypto_ablkcipher *cipher,