Upgrade to 4.4.50-rt62
[kvmfornfv.git] / kernel / arch / arm / crypto / aes-ce-glue.c
1 /*
2  * aes-ce-glue.c - wrapper code for ARMv8 AES
3  *
4  * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  */
10
11 #include <asm/hwcap.h>
12 #include <asm/neon.h>
13 #include <asm/hwcap.h>
14 #include <crypto/aes.h>
15 #include <crypto/ablk_helper.h>
16 #include <crypto/algapi.h>
17 #include <linux/module.h>
18
19 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
22
23 /* defined in aes-ce-core.S */
24 asmlinkage u32 ce_aes_sub(u32 input);
25 asmlinkage void ce_aes_invert(void *dst, void *src);
26
27 asmlinkage void ce_aes_ecb_encrypt(u8 out[], u8 const in[], u8 const rk[],
28                                    int rounds, int blocks);
29 asmlinkage void ce_aes_ecb_decrypt(u8 out[], u8 const in[], u8 const rk[],
30                                    int rounds, int blocks);
31
32 asmlinkage void ce_aes_cbc_encrypt(u8 out[], u8 const in[], u8 const rk[],
33                                    int rounds, int blocks, u8 iv[]);
34 asmlinkage void ce_aes_cbc_decrypt(u8 out[], u8 const in[], u8 const rk[],
35                                    int rounds, int blocks, u8 iv[]);
36
37 asmlinkage void ce_aes_ctr_encrypt(u8 out[], u8 const in[], u8 const rk[],
38                                    int rounds, int blocks, u8 ctr[]);
39
40 asmlinkage void ce_aes_xts_encrypt(u8 out[], u8 const in[], u8 const rk1[],
41                                    int rounds, int blocks, u8 iv[],
42                                    u8 const rk2[], int first);
43 asmlinkage void ce_aes_xts_decrypt(u8 out[], u8 const in[], u8 const rk1[],
44                                    int rounds, int blocks, u8 iv[],
45                                    u8 const rk2[], int first);
46
47 struct aes_block {
48         u8 b[AES_BLOCK_SIZE];
49 };
50
51 static int num_rounds(struct crypto_aes_ctx *ctx)
52 {
53         /*
54          * # of rounds specified by AES:
55          * 128 bit key          10 rounds
56          * 192 bit key          12 rounds
57          * 256 bit key          14 rounds
58          * => n byte key        => 6 + (n/4) rounds
59          */
60         return 6 + ctx->key_length / 4;
61 }
62
63 static int ce_aes_expandkey(struct crypto_aes_ctx *ctx, const u8 *in_key,
64                             unsigned int key_len)
65 {
66         /*
67          * The AES key schedule round constants
68          */
69         static u8 const rcon[] = {
70                 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
71         };
72
73         u32 kwords = key_len / sizeof(u32);
74         struct aes_block *key_enc, *key_dec;
75         int i, j;
76
77         if (key_len != AES_KEYSIZE_128 &&
78             key_len != AES_KEYSIZE_192 &&
79             key_len != AES_KEYSIZE_256)
80                 return -EINVAL;
81
82         memcpy(ctx->key_enc, in_key, key_len);
83         ctx->key_length = key_len;
84
85         kernel_neon_begin();
86         for (i = 0; i < sizeof(rcon); i++) {
87                 u32 *rki = ctx->key_enc + (i * kwords);
88                 u32 *rko = rki + kwords;
89
90 #ifndef CONFIG_CPU_BIG_ENDIAN
91                 rko[0] = ror32(ce_aes_sub(rki[kwords - 1]), 8);
92                 rko[0] = rko[0] ^ rki[0] ^ rcon[i];
93 #else
94                 rko[0] = rol32(ce_aes_sub(rki[kwords - 1]), 8);
95                 rko[0] = rko[0] ^ rki[0] ^ (rcon[i] << 24);
96 #endif
97                 rko[1] = rko[0] ^ rki[1];
98                 rko[2] = rko[1] ^ rki[2];
99                 rko[3] = rko[2] ^ rki[3];
100
101                 if (key_len == AES_KEYSIZE_192) {
102                         if (i >= 7)
103                                 break;
104                         rko[4] = rko[3] ^ rki[4];
105                         rko[5] = rko[4] ^ rki[5];
106                 } else if (key_len == AES_KEYSIZE_256) {
107                         if (i >= 6)
108                                 break;
109                         rko[4] = ce_aes_sub(rko[3]) ^ rki[4];
110                         rko[5] = rko[4] ^ rki[5];
111                         rko[6] = rko[5] ^ rki[6];
112                         rko[7] = rko[6] ^ rki[7];
113                 }
114         }
115
116         /*
117          * Generate the decryption keys for the Equivalent Inverse Cipher.
118          * This involves reversing the order of the round keys, and applying
119          * the Inverse Mix Columns transformation on all but the first and
120          * the last one.
121          */
122         key_enc = (struct aes_block *)ctx->key_enc;
123         key_dec = (struct aes_block *)ctx->key_dec;
124         j = num_rounds(ctx);
125
126         key_dec[0] = key_enc[j];
127         for (i = 1, j--; j > 0; i++, j--)
128                 ce_aes_invert(key_dec + i, key_enc + j);
129         key_dec[i] = key_enc[0];
130
131         kernel_neon_end();
132         return 0;
133 }
134
135 static int ce_aes_setkey(struct crypto_tfm *tfm, const u8 *in_key,
136                          unsigned int key_len)
137 {
138         struct crypto_aes_ctx *ctx = crypto_tfm_ctx(tfm);
139         int ret;
140
141         ret = ce_aes_expandkey(ctx, in_key, key_len);
142         if (!ret)
143                 return 0;
144
145         tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
146         return -EINVAL;
147 }
148
149 struct crypto_aes_xts_ctx {
150         struct crypto_aes_ctx key1;
151         struct crypto_aes_ctx __aligned(8) key2;
152 };
153
154 static int xts_set_key(struct crypto_tfm *tfm, const u8 *in_key,
155                        unsigned int key_len)
156 {
157         struct crypto_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm);
158         int ret;
159
160         ret = ce_aes_expandkey(&ctx->key1, in_key, key_len / 2);
161         if (!ret)
162                 ret = ce_aes_expandkey(&ctx->key2, &in_key[key_len / 2],
163                                        key_len / 2);
164         if (!ret)
165                 return 0;
166
167         tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
168         return -EINVAL;
169 }
170
171 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
172                        struct scatterlist *src, unsigned int nbytes)
173 {
174         struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
175         struct blkcipher_walk walk;
176         unsigned int blocks;
177         int err;
178
179         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
180         blkcipher_walk_init(&walk, dst, src, nbytes);
181         err = blkcipher_walk_virt(desc, &walk);
182
183         kernel_neon_begin();
184         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
185                 ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
186                                    (u8 *)ctx->key_enc, num_rounds(ctx), blocks);
187                 err = blkcipher_walk_done(desc, &walk,
188                                           walk.nbytes % AES_BLOCK_SIZE);
189         }
190         kernel_neon_end();
191         return err;
192 }
193
194 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
195                        struct scatterlist *src, unsigned int nbytes)
196 {
197         struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
198         struct blkcipher_walk walk;
199         unsigned int blocks;
200         int err;
201
202         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
203         blkcipher_walk_init(&walk, dst, src, nbytes);
204         err = blkcipher_walk_virt(desc, &walk);
205
206         kernel_neon_begin();
207         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
208                 ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
209                                    (u8 *)ctx->key_dec, num_rounds(ctx), blocks);
210                 err = blkcipher_walk_done(desc, &walk,
211                                           walk.nbytes % AES_BLOCK_SIZE);
212         }
213         kernel_neon_end();
214         return err;
215 }
216
217 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
218                        struct scatterlist *src, unsigned int nbytes)
219 {
220         struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
221         struct blkcipher_walk walk;
222         unsigned int blocks;
223         int err;
224
225         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
226         blkcipher_walk_init(&walk, dst, src, nbytes);
227         err = blkcipher_walk_virt(desc, &walk);
228
229         kernel_neon_begin();
230         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
231                 ce_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
232                                    (u8 *)ctx->key_enc, num_rounds(ctx), blocks,
233                                    walk.iv);
234                 err = blkcipher_walk_done(desc, &walk,
235                                           walk.nbytes % AES_BLOCK_SIZE);
236         }
237         kernel_neon_end();
238         return err;
239 }
240
241 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
242                        struct scatterlist *src, unsigned int nbytes)
243 {
244         struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
245         struct blkcipher_walk walk;
246         unsigned int blocks;
247         int err;
248
249         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
250         blkcipher_walk_init(&walk, dst, src, nbytes);
251         err = blkcipher_walk_virt(desc, &walk);
252
253         kernel_neon_begin();
254         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
255                 ce_aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
256                                    (u8 *)ctx->key_dec, num_rounds(ctx), blocks,
257                                    walk.iv);
258                 err = blkcipher_walk_done(desc, &walk,
259                                           walk.nbytes % AES_BLOCK_SIZE);
260         }
261         kernel_neon_end();
262         return err;
263 }
264
265 static int ctr_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
266                        struct scatterlist *src, unsigned int nbytes)
267 {
268         struct crypto_aes_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
269         struct blkcipher_walk walk;
270         int err, blocks;
271
272         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
273         blkcipher_walk_init(&walk, dst, src, nbytes);
274         err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
275
276         kernel_neon_begin();
277         while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
278                 ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
279                                    (u8 *)ctx->key_enc, num_rounds(ctx), blocks,
280                                    walk.iv);
281                 nbytes -= blocks * AES_BLOCK_SIZE;
282                 if (nbytes && nbytes == walk.nbytes % AES_BLOCK_SIZE)
283                         break;
284                 err = blkcipher_walk_done(desc, &walk,
285                                           walk.nbytes % AES_BLOCK_SIZE);
286         }
287         if (walk.nbytes % AES_BLOCK_SIZE) {
288                 u8 *tdst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
289                 u8 *tsrc = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
290                 u8 __aligned(8) tail[AES_BLOCK_SIZE];
291
292                 /*
293                  * Minimum alignment is 8 bytes, so if nbytes is <= 8, we need
294                  * to tell aes_ctr_encrypt() to only read half a block.
295                  */
296                 blocks = (nbytes <= 8) ? -1 : 1;
297
298                 ce_aes_ctr_encrypt(tail, tsrc, (u8 *)ctx->key_enc,
299                                    num_rounds(ctx), blocks, walk.iv);
300                 memcpy(tdst, tail, nbytes);
301                 err = blkcipher_walk_done(desc, &walk, 0);
302         }
303         kernel_neon_end();
304
305         return err;
306 }
307
308 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
309                        struct scatterlist *src, unsigned int nbytes)
310 {
311         struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
312         int err, first, rounds = num_rounds(&ctx->key1);
313         struct blkcipher_walk walk;
314         unsigned int blocks;
315
316         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
317         blkcipher_walk_init(&walk, dst, src, nbytes);
318         err = blkcipher_walk_virt(desc, &walk);
319
320         kernel_neon_begin();
321         for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
322                 ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
323                                    (u8 *)ctx->key1.key_enc, rounds, blocks,
324                                    walk.iv, (u8 *)ctx->key2.key_enc, first);
325                 err = blkcipher_walk_done(desc, &walk,
326                                           walk.nbytes % AES_BLOCK_SIZE);
327         }
328         kernel_neon_end();
329
330         return err;
331 }
332
333 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
334                        struct scatterlist *src, unsigned int nbytes)
335 {
336         struct crypto_aes_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
337         int err, first, rounds = num_rounds(&ctx->key1);
338         struct blkcipher_walk walk;
339         unsigned int blocks;
340
341         desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
342         blkcipher_walk_init(&walk, dst, src, nbytes);
343         err = blkcipher_walk_virt(desc, &walk);
344
345         kernel_neon_begin();
346         for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) {
347                 ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
348                                    (u8 *)ctx->key1.key_dec, rounds, blocks,
349                                    walk.iv, (u8 *)ctx->key2.key_enc, first);
350                 err = blkcipher_walk_done(desc, &walk,
351                                           walk.nbytes % AES_BLOCK_SIZE);
352         }
353         kernel_neon_end();
354
355         return err;
356 }
357
358 static struct crypto_alg aes_algs[] = { {
359         .cra_name               = "__ecb-aes-ce",
360         .cra_driver_name        = "__driver-ecb-aes-ce",
361         .cra_priority           = 0,
362         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
363                                   CRYPTO_ALG_INTERNAL,
364         .cra_blocksize          = AES_BLOCK_SIZE,
365         .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
366         .cra_alignmask          = 7,
367         .cra_type               = &crypto_blkcipher_type,
368         .cra_module             = THIS_MODULE,
369         .cra_blkcipher = {
370                 .min_keysize    = AES_MIN_KEY_SIZE,
371                 .max_keysize    = AES_MAX_KEY_SIZE,
372                 .ivsize         = AES_BLOCK_SIZE,
373                 .setkey         = ce_aes_setkey,
374                 .encrypt        = ecb_encrypt,
375                 .decrypt        = ecb_decrypt,
376         },
377 }, {
378         .cra_name               = "__cbc-aes-ce",
379         .cra_driver_name        = "__driver-cbc-aes-ce",
380         .cra_priority           = 0,
381         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
382                                   CRYPTO_ALG_INTERNAL,
383         .cra_blocksize          = AES_BLOCK_SIZE,
384         .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
385         .cra_alignmask          = 7,
386         .cra_type               = &crypto_blkcipher_type,
387         .cra_module             = THIS_MODULE,
388         .cra_blkcipher = {
389                 .min_keysize    = AES_MIN_KEY_SIZE,
390                 .max_keysize    = AES_MAX_KEY_SIZE,
391                 .ivsize         = AES_BLOCK_SIZE,
392                 .setkey         = ce_aes_setkey,
393                 .encrypt        = cbc_encrypt,
394                 .decrypt        = cbc_decrypt,
395         },
396 }, {
397         .cra_name               = "__ctr-aes-ce",
398         .cra_driver_name        = "__driver-ctr-aes-ce",
399         .cra_priority           = 0,
400         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
401                                   CRYPTO_ALG_INTERNAL,
402         .cra_blocksize          = 1,
403         .cra_ctxsize            = sizeof(struct crypto_aes_ctx),
404         .cra_alignmask          = 7,
405         .cra_type               = &crypto_blkcipher_type,
406         .cra_module             = THIS_MODULE,
407         .cra_blkcipher = {
408                 .min_keysize    = AES_MIN_KEY_SIZE,
409                 .max_keysize    = AES_MAX_KEY_SIZE,
410                 .ivsize         = AES_BLOCK_SIZE,
411                 .setkey         = ce_aes_setkey,
412                 .encrypt        = ctr_encrypt,
413                 .decrypt        = ctr_encrypt,
414         },
415 }, {
416         .cra_name               = "__xts-aes-ce",
417         .cra_driver_name        = "__driver-xts-aes-ce",
418         .cra_priority           = 0,
419         .cra_flags              = CRYPTO_ALG_TYPE_BLKCIPHER |
420                                   CRYPTO_ALG_INTERNAL,
421         .cra_blocksize          = AES_BLOCK_SIZE,
422         .cra_ctxsize            = sizeof(struct crypto_aes_xts_ctx),
423         .cra_alignmask          = 7,
424         .cra_type               = &crypto_blkcipher_type,
425         .cra_module             = THIS_MODULE,
426         .cra_blkcipher = {
427                 .min_keysize    = 2 * AES_MIN_KEY_SIZE,
428                 .max_keysize    = 2 * AES_MAX_KEY_SIZE,
429                 .ivsize         = AES_BLOCK_SIZE,
430                 .setkey         = xts_set_key,
431                 .encrypt        = xts_encrypt,
432                 .decrypt        = xts_decrypt,
433         },
434 }, {
435         .cra_name               = "ecb(aes)",
436         .cra_driver_name        = "ecb-aes-ce",
437         .cra_priority           = 300,
438         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
439         .cra_blocksize          = AES_BLOCK_SIZE,
440         .cra_ctxsize            = sizeof(struct async_helper_ctx),
441         .cra_alignmask          = 7,
442         .cra_type               = &crypto_ablkcipher_type,
443         .cra_module             = THIS_MODULE,
444         .cra_init               = ablk_init,
445         .cra_exit               = ablk_exit,
446         .cra_ablkcipher = {
447                 .min_keysize    = AES_MIN_KEY_SIZE,
448                 .max_keysize    = AES_MAX_KEY_SIZE,
449                 .ivsize         = AES_BLOCK_SIZE,
450                 .setkey         = ablk_set_key,
451                 .encrypt        = ablk_encrypt,
452                 .decrypt        = ablk_decrypt,
453         }
454 }, {
455         .cra_name               = "cbc(aes)",
456         .cra_driver_name        = "cbc-aes-ce",
457         .cra_priority           = 300,
458         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
459         .cra_blocksize          = AES_BLOCK_SIZE,
460         .cra_ctxsize            = sizeof(struct async_helper_ctx),
461         .cra_alignmask          = 7,
462         .cra_type               = &crypto_ablkcipher_type,
463         .cra_module             = THIS_MODULE,
464         .cra_init               = ablk_init,
465         .cra_exit               = ablk_exit,
466         .cra_ablkcipher = {
467                 .min_keysize    = AES_MIN_KEY_SIZE,
468                 .max_keysize    = AES_MAX_KEY_SIZE,
469                 .ivsize         = AES_BLOCK_SIZE,
470                 .setkey         = ablk_set_key,
471                 .encrypt        = ablk_encrypt,
472                 .decrypt        = ablk_decrypt,
473         }
474 }, {
475         .cra_name               = "ctr(aes)",
476         .cra_driver_name        = "ctr-aes-ce",
477         .cra_priority           = 300,
478         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
479         .cra_blocksize          = 1,
480         .cra_ctxsize            = sizeof(struct async_helper_ctx),
481         .cra_alignmask          = 7,
482         .cra_type               = &crypto_ablkcipher_type,
483         .cra_module             = THIS_MODULE,
484         .cra_init               = ablk_init,
485         .cra_exit               = ablk_exit,
486         .cra_ablkcipher = {
487                 .min_keysize    = AES_MIN_KEY_SIZE,
488                 .max_keysize    = AES_MAX_KEY_SIZE,
489                 .ivsize         = AES_BLOCK_SIZE,
490                 .setkey         = ablk_set_key,
491                 .encrypt        = ablk_encrypt,
492                 .decrypt        = ablk_decrypt,
493         }
494 }, {
495         .cra_name               = "xts(aes)",
496         .cra_driver_name        = "xts-aes-ce",
497         .cra_priority           = 300,
498         .cra_flags              = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
499         .cra_blocksize          = AES_BLOCK_SIZE,
500         .cra_ctxsize            = sizeof(struct async_helper_ctx),
501         .cra_alignmask          = 7,
502         .cra_type               = &crypto_ablkcipher_type,
503         .cra_module             = THIS_MODULE,
504         .cra_init               = ablk_init,
505         .cra_exit               = ablk_exit,
506         .cra_ablkcipher = {
507                 .min_keysize    = 2 * AES_MIN_KEY_SIZE,
508                 .max_keysize    = 2 * AES_MAX_KEY_SIZE,
509                 .ivsize         = AES_BLOCK_SIZE,
510                 .setkey         = ablk_set_key,
511                 .encrypt        = ablk_encrypt,
512                 .decrypt        = ablk_decrypt,
513         }
514 } };
515
516 static int __init aes_init(void)
517 {
518         if (!(elf_hwcap2 & HWCAP2_AES))
519                 return -ENODEV;
520         return crypto_register_algs(aes_algs, ARRAY_SIZE(aes_algs));
521 }
522
523 static void __exit aes_exit(void)
524 {
525         crypto_unregister_algs(aes_algs, ARRAY_SIZE(aes_algs));
526 }
527
528 module_init(aes_init);
529 module_exit(aes_exit);