Linux-libre 4.14.12-gnu
[librecmc/linux-libre.git] / drivers / crypto / rockchip / rk3288_crypto_ablkcipher.c
1 /*
2  * Crypto acceleration support for Rockchip RK3288
3  *
4  * Copyright (c) 2015, Fuzhou Rockchip Electronics Co., Ltd
5  *
6  * Author: Zain Wang <zain.wang@rock-chips.com>
7  *
8  * This program is free software; you can redistribute it and/or modify it
9  * under the terms and conditions of the GNU General Public License,
10  * version 2, as published by the Free Software Foundation.
11  *
12  * Some ideas are from marvell-cesa.c and s5p-sss.c driver.
13  */
14 #include "rk3288_crypto.h"
15
16 #define RK_CRYPTO_DEC                   BIT(0)
17
18 static void rk_crypto_complete(struct crypto_async_request *base, int err)
19 {
20         if (base->complete)
21                 base->complete(base, err);
22 }
23
24 static int rk_handle_req(struct rk_crypto_info *dev,
25                          struct ablkcipher_request *req)
26 {
27         if (!IS_ALIGNED(req->nbytes, dev->align_size))
28                 return -EINVAL;
29         else
30                 return dev->enqueue(dev, &req->base);
31 }
32
33 static int rk_aes_setkey(struct crypto_ablkcipher *cipher,
34                          const u8 *key, unsigned int keylen)
35 {
36         struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
37         struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
38
39         if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
40             keylen != AES_KEYSIZE_256) {
41                 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
42                 return -EINVAL;
43         }
44         ctx->keylen = keylen;
45         memcpy_toio(ctx->dev->reg + RK_CRYPTO_AES_KEY_0, key, keylen);
46         return 0;
47 }
48
49 static int rk_tdes_setkey(struct crypto_ablkcipher *cipher,
50                           const u8 *key, unsigned int keylen)
51 {
52         struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
53         struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
54         u32 tmp[DES_EXPKEY_WORDS];
55
56         if (keylen != DES_KEY_SIZE && keylen != DES3_EDE_KEY_SIZE) {
57                 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
58                 return -EINVAL;
59         }
60
61         if (keylen == DES_KEY_SIZE) {
62                 if (!des_ekey(tmp, key) &&
63                     (tfm->crt_flags & CRYPTO_TFM_REQ_WEAK_KEY)) {
64                         tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
65                         return -EINVAL;
66                 }
67         }
68
69         ctx->keylen = keylen;
70         memcpy_toio(ctx->dev->reg + RK_CRYPTO_TDES_KEY1_0, key, keylen);
71         return 0;
72 }
73
74 static int rk_aes_ecb_encrypt(struct ablkcipher_request *req)
75 {
76         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
77         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
78         struct rk_crypto_info *dev = ctx->dev;
79
80         ctx->mode = RK_CRYPTO_AES_ECB_MODE;
81         return rk_handle_req(dev, req);
82 }
83
84 static int rk_aes_ecb_decrypt(struct ablkcipher_request *req)
85 {
86         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
87         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
88         struct rk_crypto_info *dev = ctx->dev;
89
90         ctx->mode = RK_CRYPTO_AES_ECB_MODE | RK_CRYPTO_DEC;
91         return rk_handle_req(dev, req);
92 }
93
94 static int rk_aes_cbc_encrypt(struct ablkcipher_request *req)
95 {
96         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
97         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
98         struct rk_crypto_info *dev = ctx->dev;
99
100         ctx->mode = RK_CRYPTO_AES_CBC_MODE;
101         return rk_handle_req(dev, req);
102 }
103
104 static int rk_aes_cbc_decrypt(struct ablkcipher_request *req)
105 {
106         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
107         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
108         struct rk_crypto_info *dev = ctx->dev;
109
110         ctx->mode = RK_CRYPTO_AES_CBC_MODE | RK_CRYPTO_DEC;
111         return rk_handle_req(dev, req);
112 }
113
114 static int rk_des_ecb_encrypt(struct ablkcipher_request *req)
115 {
116         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
117         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
118         struct rk_crypto_info *dev = ctx->dev;
119
120         ctx->mode = 0;
121         return rk_handle_req(dev, req);
122 }
123
124 static int rk_des_ecb_decrypt(struct ablkcipher_request *req)
125 {
126         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
127         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
128         struct rk_crypto_info *dev = ctx->dev;
129
130         ctx->mode = RK_CRYPTO_DEC;
131         return rk_handle_req(dev, req);
132 }
133
134 static int rk_des_cbc_encrypt(struct ablkcipher_request *req)
135 {
136         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
137         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
138         struct rk_crypto_info *dev = ctx->dev;
139
140         ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC;
141         return rk_handle_req(dev, req);
142 }
143
144 static int rk_des_cbc_decrypt(struct ablkcipher_request *req)
145 {
146         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
147         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
148         struct rk_crypto_info *dev = ctx->dev;
149
150         ctx->mode = RK_CRYPTO_TDES_CHAINMODE_CBC | RK_CRYPTO_DEC;
151         return rk_handle_req(dev, req);
152 }
153
154 static int rk_des3_ede_ecb_encrypt(struct ablkcipher_request *req)
155 {
156         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
157         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
158         struct rk_crypto_info *dev = ctx->dev;
159
160         ctx->mode = RK_CRYPTO_TDES_SELECT;
161         return rk_handle_req(dev, req);
162 }
163
164 static int rk_des3_ede_ecb_decrypt(struct ablkcipher_request *req)
165 {
166         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
167         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
168         struct rk_crypto_info *dev = ctx->dev;
169
170         ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_DEC;
171         return rk_handle_req(dev, req);
172 }
173
174 static int rk_des3_ede_cbc_encrypt(struct ablkcipher_request *req)
175 {
176         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
177         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
178         struct rk_crypto_info *dev = ctx->dev;
179
180         ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC;
181         return rk_handle_req(dev, req);
182 }
183
184 static int rk_des3_ede_cbc_decrypt(struct ablkcipher_request *req)
185 {
186         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
187         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(tfm);
188         struct rk_crypto_info *dev = ctx->dev;
189
190         ctx->mode = RK_CRYPTO_TDES_SELECT | RK_CRYPTO_TDES_CHAINMODE_CBC |
191                     RK_CRYPTO_DEC;
192         return rk_handle_req(dev, req);
193 }
194
195 static void rk_ablk_hw_init(struct rk_crypto_info *dev)
196 {
197         struct ablkcipher_request *req =
198                 ablkcipher_request_cast(dev->async_req);
199         struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(req);
200         struct crypto_tfm *tfm = crypto_ablkcipher_tfm(cipher);
201         struct rk_cipher_ctx *ctx = crypto_ablkcipher_ctx(cipher);
202         u32 ivsize, block, conf_reg = 0;
203
204         block = crypto_tfm_alg_blocksize(tfm);
205         ivsize = crypto_ablkcipher_ivsize(cipher);
206
207         if (block == DES_BLOCK_SIZE) {
208                 ctx->mode |= RK_CRYPTO_TDES_FIFO_MODE |
209                              RK_CRYPTO_TDES_BYTESWAP_KEY |
210                              RK_CRYPTO_TDES_BYTESWAP_IV;
211                 CRYPTO_WRITE(dev, RK_CRYPTO_TDES_CTRL, ctx->mode);
212                 memcpy_toio(dev->reg + RK_CRYPTO_TDES_IV_0, req->info, ivsize);
213                 conf_reg = RK_CRYPTO_DESSEL;
214         } else {
215                 ctx->mode |= RK_CRYPTO_AES_FIFO_MODE |
216                              RK_CRYPTO_AES_KEY_CHANGE |
217                              RK_CRYPTO_AES_BYTESWAP_KEY |
218                              RK_CRYPTO_AES_BYTESWAP_IV;
219                 if (ctx->keylen == AES_KEYSIZE_192)
220                         ctx->mode |= RK_CRYPTO_AES_192BIT_key;
221                 else if (ctx->keylen == AES_KEYSIZE_256)
222                         ctx->mode |= RK_CRYPTO_AES_256BIT_key;
223                 CRYPTO_WRITE(dev, RK_CRYPTO_AES_CTRL, ctx->mode);
224                 memcpy_toio(dev->reg + RK_CRYPTO_AES_IV_0, req->info, ivsize);
225         }
226         conf_reg |= RK_CRYPTO_BYTESWAP_BTFIFO |
227                     RK_CRYPTO_BYTESWAP_BRFIFO;
228         CRYPTO_WRITE(dev, RK_CRYPTO_CONF, conf_reg);
229         CRYPTO_WRITE(dev, RK_CRYPTO_INTENA,
230                      RK_CRYPTO_BCDMA_ERR_ENA | RK_CRYPTO_BCDMA_DONE_ENA);
231 }
232
233 static void crypto_dma_start(struct rk_crypto_info *dev)
234 {
235         CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAS, dev->addr_in);
236         CRYPTO_WRITE(dev, RK_CRYPTO_BRDMAL, dev->count / 4);
237         CRYPTO_WRITE(dev, RK_CRYPTO_BTDMAS, dev->addr_out);
238         CRYPTO_WRITE(dev, RK_CRYPTO_CTRL, RK_CRYPTO_BLOCK_START |
239                      _SBF(RK_CRYPTO_BLOCK_START, 16));
240 }
241
242 static int rk_set_data_start(struct rk_crypto_info *dev)
243 {
244         int err;
245
246         err = dev->load_data(dev, dev->sg_src, dev->sg_dst);
247         if (!err)
248                 crypto_dma_start(dev);
249         return err;
250 }
251
252 static int rk_ablk_start(struct rk_crypto_info *dev)
253 {
254         struct ablkcipher_request *req =
255                 ablkcipher_request_cast(dev->async_req);
256         unsigned long flags;
257         int err = 0;
258
259         dev->left_bytes = req->nbytes;
260         dev->total = req->nbytes;
261         dev->sg_src = req->src;
262         dev->first = req->src;
263         dev->nents = sg_nents(req->src);
264         dev->sg_dst = req->dst;
265         dev->aligned = 1;
266
267         spin_lock_irqsave(&dev->lock, flags);
268         rk_ablk_hw_init(dev);
269         err = rk_set_data_start(dev);
270         spin_unlock_irqrestore(&dev->lock, flags);
271         return err;
272 }
273
274 static void rk_iv_copyback(struct rk_crypto_info *dev)
275 {
276         struct ablkcipher_request *req =
277                 ablkcipher_request_cast(dev->async_req);
278         struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
279         u32 ivsize = crypto_ablkcipher_ivsize(tfm);
280
281         if (ivsize == DES_BLOCK_SIZE)
282                 memcpy_fromio(req->info, dev->reg + RK_CRYPTO_TDES_IV_0,
283                               ivsize);
284         else if (ivsize == AES_BLOCK_SIZE)
285                 memcpy_fromio(req->info, dev->reg + RK_CRYPTO_AES_IV_0, ivsize);
286 }
287
288 /* return:
289  *      true    some err was occurred
290  *      fault   no err, continue
291  */
292 static int rk_ablk_rx(struct rk_crypto_info *dev)
293 {
294         int err = 0;
295         struct ablkcipher_request *req =
296                 ablkcipher_request_cast(dev->async_req);
297
298         dev->unload_data(dev);
299         if (!dev->aligned) {
300                 if (!sg_pcopy_from_buffer(req->dst, dev->nents,
301                                           dev->addr_vir, dev->count,
302                                           dev->total - dev->left_bytes -
303                                           dev->count)) {
304                         err = -EINVAL;
305                         goto out_rx;
306                 }
307         }
308         if (dev->left_bytes) {
309                 if (dev->aligned) {
310                         if (sg_is_last(dev->sg_src)) {
311                                 dev_err(dev->dev, "[%s:%d] Lack of data\n",
312                                         __func__, __LINE__);
313                                 err = -ENOMEM;
314                                 goto out_rx;
315                         }
316                         dev->sg_src = sg_next(dev->sg_src);
317                         dev->sg_dst = sg_next(dev->sg_dst);
318                 }
319                 err = rk_set_data_start(dev);
320         } else {
321                 rk_iv_copyback(dev);
322                 /* here show the calculation is over without any err */
323                 dev->complete(dev->async_req, 0);
324                 tasklet_schedule(&dev->queue_task);
325         }
326 out_rx:
327         return err;
328 }
329
330 static int rk_ablk_cra_init(struct crypto_tfm *tfm)
331 {
332         struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
333         struct crypto_alg *alg = tfm->__crt_alg;
334         struct rk_crypto_tmp *algt;
335
336         algt = container_of(alg, struct rk_crypto_tmp, alg.crypto);
337
338         ctx->dev = algt->dev;
339         ctx->dev->align_size = crypto_tfm_alg_alignmask(tfm) + 1;
340         ctx->dev->start = rk_ablk_start;
341         ctx->dev->update = rk_ablk_rx;
342         ctx->dev->complete = rk_crypto_complete;
343         ctx->dev->addr_vir = (char *)__get_free_page(GFP_KERNEL);
344
345         return ctx->dev->addr_vir ? ctx->dev->enable_clk(ctx->dev) : -ENOMEM;
346 }
347
348 static void rk_ablk_cra_exit(struct crypto_tfm *tfm)
349 {
350         struct rk_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
351
352         free_page((unsigned long)ctx->dev->addr_vir);
353         ctx->dev->disable_clk(ctx->dev);
354 }
355
356 struct rk_crypto_tmp rk_ecb_aes_alg = {
357         .type = ALG_TYPE_CIPHER,
358         .alg.crypto = {
359                 .cra_name               = "ecb(aes)",
360                 .cra_driver_name        = "ecb-aes-rk",
361                 .cra_priority           = 300,
362                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
363                                           CRYPTO_ALG_ASYNC,
364                 .cra_blocksize          = AES_BLOCK_SIZE,
365                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
366                 .cra_alignmask          = 0x0f,
367                 .cra_type               = &crypto_ablkcipher_type,
368                 .cra_module             = THIS_MODULE,
369                 .cra_init               = rk_ablk_cra_init,
370                 .cra_exit               = rk_ablk_cra_exit,
371                 .cra_u.ablkcipher       = {
372                         .min_keysize    = AES_MIN_KEY_SIZE,
373                         .max_keysize    = AES_MAX_KEY_SIZE,
374                         .setkey         = rk_aes_setkey,
375                         .encrypt        = rk_aes_ecb_encrypt,
376                         .decrypt        = rk_aes_ecb_decrypt,
377                 }
378         }
379 };
380
381 struct rk_crypto_tmp rk_cbc_aes_alg = {
382         .type = ALG_TYPE_CIPHER,
383         .alg.crypto = {
384                 .cra_name               = "cbc(aes)",
385                 .cra_driver_name        = "cbc-aes-rk",
386                 .cra_priority           = 300,
387                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
388                                           CRYPTO_ALG_ASYNC,
389                 .cra_blocksize          = AES_BLOCK_SIZE,
390                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
391                 .cra_alignmask          = 0x0f,
392                 .cra_type               = &crypto_ablkcipher_type,
393                 .cra_module             = THIS_MODULE,
394                 .cra_init               = rk_ablk_cra_init,
395                 .cra_exit               = rk_ablk_cra_exit,
396                 .cra_u.ablkcipher       = {
397                         .min_keysize    = AES_MIN_KEY_SIZE,
398                         .max_keysize    = AES_MAX_KEY_SIZE,
399                         .ivsize         = AES_BLOCK_SIZE,
400                         .setkey         = rk_aes_setkey,
401                         .encrypt        = rk_aes_cbc_encrypt,
402                         .decrypt        = rk_aes_cbc_decrypt,
403                 }
404         }
405 };
406
407 struct rk_crypto_tmp rk_ecb_des_alg = {
408         .type = ALG_TYPE_CIPHER,
409         .alg.crypto = {
410                 .cra_name               = "ecb(des)",
411                 .cra_driver_name        = "ecb-des-rk",
412                 .cra_priority           = 300,
413                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
414                                           CRYPTO_ALG_ASYNC,
415                 .cra_blocksize          = DES_BLOCK_SIZE,
416                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
417                 .cra_alignmask          = 0x07,
418                 .cra_type               = &crypto_ablkcipher_type,
419                 .cra_module             = THIS_MODULE,
420                 .cra_init               = rk_ablk_cra_init,
421                 .cra_exit               = rk_ablk_cra_exit,
422                 .cra_u.ablkcipher       = {
423                         .min_keysize    = DES_KEY_SIZE,
424                         .max_keysize    = DES_KEY_SIZE,
425                         .setkey         = rk_tdes_setkey,
426                         .encrypt        = rk_des_ecb_encrypt,
427                         .decrypt        = rk_des_ecb_decrypt,
428                 }
429         }
430 };
431
432 struct rk_crypto_tmp rk_cbc_des_alg = {
433         .type = ALG_TYPE_CIPHER,
434         .alg.crypto = {
435                 .cra_name               = "cbc(des)",
436                 .cra_driver_name        = "cbc-des-rk",
437                 .cra_priority           = 300,
438                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
439                                           CRYPTO_ALG_ASYNC,
440                 .cra_blocksize          = DES_BLOCK_SIZE,
441                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
442                 .cra_alignmask          = 0x07,
443                 .cra_type               = &crypto_ablkcipher_type,
444                 .cra_module             = THIS_MODULE,
445                 .cra_init               = rk_ablk_cra_init,
446                 .cra_exit               = rk_ablk_cra_exit,
447                 .cra_u.ablkcipher       = {
448                         .min_keysize    = DES_KEY_SIZE,
449                         .max_keysize    = DES_KEY_SIZE,
450                         .ivsize         = DES_BLOCK_SIZE,
451                         .setkey         = rk_tdes_setkey,
452                         .encrypt        = rk_des_cbc_encrypt,
453                         .decrypt        = rk_des_cbc_decrypt,
454                 }
455         }
456 };
457
458 struct rk_crypto_tmp rk_ecb_des3_ede_alg = {
459         .type = ALG_TYPE_CIPHER,
460         .alg.crypto = {
461                 .cra_name               = "ecb(des3_ede)",
462                 .cra_driver_name        = "ecb-des3-ede-rk",
463                 .cra_priority           = 300,
464                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
465                                           CRYPTO_ALG_ASYNC,
466                 .cra_blocksize          = DES_BLOCK_SIZE,
467                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
468                 .cra_alignmask          = 0x07,
469                 .cra_type               = &crypto_ablkcipher_type,
470                 .cra_module             = THIS_MODULE,
471                 .cra_init               = rk_ablk_cra_init,
472                 .cra_exit               = rk_ablk_cra_exit,
473                 .cra_u.ablkcipher       = {
474                         .min_keysize    = DES3_EDE_KEY_SIZE,
475                         .max_keysize    = DES3_EDE_KEY_SIZE,
476                         .ivsize         = DES_BLOCK_SIZE,
477                         .setkey         = rk_tdes_setkey,
478                         .encrypt        = rk_des3_ede_ecb_encrypt,
479                         .decrypt        = rk_des3_ede_ecb_decrypt,
480                 }
481         }
482 };
483
484 struct rk_crypto_tmp rk_cbc_des3_ede_alg = {
485         .type = ALG_TYPE_CIPHER,
486         .alg.crypto = {
487                 .cra_name               = "cbc(des3_ede)",
488                 .cra_driver_name        = "cbc-des3-ede-rk",
489                 .cra_priority           = 300,
490                 .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER |
491                                           CRYPTO_ALG_ASYNC,
492                 .cra_blocksize          = DES_BLOCK_SIZE,
493                 .cra_ctxsize            = sizeof(struct rk_cipher_ctx),
494                 .cra_alignmask          = 0x07,
495                 .cra_type               = &crypto_ablkcipher_type,
496                 .cra_module             = THIS_MODULE,
497                 .cra_init               = rk_ablk_cra_init,
498                 .cra_exit               = rk_ablk_cra_exit,
499                 .cra_u.ablkcipher       = {
500                         .min_keysize    = DES3_EDE_KEY_SIZE,
501                         .max_keysize    = DES3_EDE_KEY_SIZE,
502                         .ivsize         = DES_BLOCK_SIZE,
503                         .setkey         = rk_tdes_setkey,
504                         .encrypt        = rk_des3_ede_cbc_encrypt,
505                         .decrypt        = rk_des3_ede_cbc_decrypt,
506                 }
507         }
508 };