These changes are the raw update to linux-4.4.6-rt14. Kernel sources
[kvmfornfv.git] / kernel / drivers / crypto / vmx / aes_ctr.c
1 /**
2  * AES CTR routines supporting VMX instructions on the Power 8
3  *
4  * Copyright (C) 2015 International Business Machines Inc.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License as published by
8  * the Free Software Foundation; version 2 only.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program; if not, write to the Free Software
17  * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
18  *
19  * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
20  */
21
22 #include <linux/types.h>
23 #include <linux/err.h>
24 #include <linux/crypto.h>
25 #include <linux/delay.h>
26 #include <linux/hardirq.h>
27 #include <asm/switch_to.h>
28 #include <crypto/aes.h>
29 #include <crypto/scatterwalk.h>
30 #include "aesp8-ppc.h"
31
32 struct p8_aes_ctr_ctx {
33         struct crypto_blkcipher *fallback;
34         struct aes_key enc_key;
35 };
36
37 static int p8_aes_ctr_init(struct crypto_tfm *tfm)
38 {
39         const char *alg;
40         struct crypto_blkcipher *fallback;
41         struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
42
43         if (!(alg = crypto_tfm_alg_name(tfm))) {
44                 printk(KERN_ERR "Failed to get algorithm name.\n");
45                 return -ENOENT;
46         }
47
48         fallback =
49             crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
50         if (IS_ERR(fallback)) {
51                 printk(KERN_ERR
52                        "Failed to allocate transformation for '%s': %ld\n",
53                        alg, PTR_ERR(fallback));
54                 return PTR_ERR(fallback);
55         }
56         printk(KERN_INFO "Using '%s' as fallback implementation.\n",
57                crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback));
58
59         crypto_blkcipher_set_flags(
60                 fallback,
61                 crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm));
62         ctx->fallback = fallback;
63
64         return 0;
65 }
66
67 static void p8_aes_ctr_exit(struct crypto_tfm *tfm)
68 {
69         struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
70
71         if (ctx->fallback) {
72                 crypto_free_blkcipher(ctx->fallback);
73                 ctx->fallback = NULL;
74         }
75 }
76
77 static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key,
78                              unsigned int keylen)
79 {
80         int ret;
81         struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
82
83         pagefault_disable();
84         enable_kernel_altivec();
85         enable_kernel_vsx();
86         ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key);
87         pagefault_enable();
88
89         ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen);
90         return ret;
91 }
92
93 static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx,
94                              struct blkcipher_walk *walk)
95 {
96         u8 *ctrblk = walk->iv;
97         u8 keystream[AES_BLOCK_SIZE];
98         u8 *src = walk->src.virt.addr;
99         u8 *dst = walk->dst.virt.addr;
100         unsigned int nbytes = walk->nbytes;
101
102         pagefault_disable();
103         enable_kernel_altivec();
104         enable_kernel_vsx();
105         aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key);
106         pagefault_enable();
107
108         crypto_xor(keystream, src, nbytes);
109         memcpy(dst, keystream, nbytes);
110         crypto_inc(ctrblk, AES_BLOCK_SIZE);
111 }
112
113 static int p8_aes_ctr_crypt(struct blkcipher_desc *desc,
114                             struct scatterlist *dst,
115                             struct scatterlist *src, unsigned int nbytes)
116 {
117         int ret;
118         u64 inc;
119         struct blkcipher_walk walk;
120         struct p8_aes_ctr_ctx *ctx =
121                 crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm));
122         struct blkcipher_desc fallback_desc = {
123                 .tfm = ctx->fallback,
124                 .info = desc->info,
125                 .flags = desc->flags
126         };
127
128         if (in_interrupt()) {
129                 ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src,
130                                                nbytes);
131         } else {
132                 blkcipher_walk_init(&walk, dst, src, nbytes);
133                 ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
134                 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
135                         pagefault_disable();
136                         enable_kernel_altivec();
137                         enable_kernel_vsx();
138                         aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
139                                                     walk.dst.virt.addr,
140                                                     (nbytes &
141                                                      AES_BLOCK_MASK) /
142                                                     AES_BLOCK_SIZE,
143                                                     &ctx->enc_key,
144                                                     walk.iv);
145                         pagefault_enable();
146
147                         /* We need to update IV mostly for last bytes/round */
148                         inc = (nbytes & AES_BLOCK_MASK) / AES_BLOCK_SIZE;
149                         if (inc > 0)
150                                 while (inc--)
151                                         crypto_inc(walk.iv, AES_BLOCK_SIZE);
152
153                         nbytes &= AES_BLOCK_SIZE - 1;
154                         ret = blkcipher_walk_done(desc, &walk, nbytes);
155                 }
156                 if (walk.nbytes) {
157                         p8_aes_ctr_final(ctx, &walk);
158                         ret = blkcipher_walk_done(desc, &walk, 0);
159                 }
160         }
161
162         return ret;
163 }
164
165 struct crypto_alg p8_aes_ctr_alg = {
166         .cra_name = "ctr(aes)",
167         .cra_driver_name = "p8_aes_ctr",
168         .cra_module = THIS_MODULE,
169         .cra_priority = 1000,
170         .cra_type = &crypto_blkcipher_type,
171         .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK,
172         .cra_alignmask = 0,
173         .cra_blocksize = 1,
174         .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx),
175         .cra_init = p8_aes_ctr_init,
176         .cra_exit = p8_aes_ctr_exit,
177         .cra_blkcipher = {
178                           .ivsize = 0,
179                           .min_keysize = AES_MIN_KEY_SIZE,
180                           .max_keysize = AES_MAX_KEY_SIZE,
181                           .setkey = p8_aes_ctr_setkey,
182                           .encrypt = p8_aes_ctr_crypt,
183                           .decrypt = p8_aes_ctr_crypt,
184         },
185 };