Upgrade to 4.4.50-rt62
[kvmfornfv.git] / kernel / drivers / crypto / caam / caamalg.c
1 /*
2  * caam - Freescale FSL CAAM support for crypto API
3  *
4  * Copyright 2008-2011 Freescale Semiconductor, Inc.
5  *
6  * Based on talitos crypto API driver.
7  *
8  * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
9  *
10  * ---------------                     ---------------
11  * | JobDesc #1  |-------------------->|  ShareDesc  |
12  * | *(packet 1) |                     |   (PDB)     |
13  * ---------------      |------------->|  (hashKey)  |
14  *       .              |              | (cipherKey) |
15  *       .              |    |-------->| (operation) |
16  * ---------------      |    |         ---------------
17  * | JobDesc #2  |------|    |
18  * | *(packet 2) |           |
19  * ---------------           |
20  *       .                   |
21  *       .                   |
22  * ---------------           |
23  * | JobDesc #3  |------------
24  * | *(packet 3) |
25  * ---------------
26  *
27  * The SharedDesc never changes for a connection unless rekeyed, but
28  * each packet will likely be in a different place. So all we need
29  * to know to process the packet is where the input is, where the
30  * output goes, and what context we want to process with. Context is
31  * in the SharedDesc, packet references in the JobDesc.
32  *
33  * So, a job desc looks like:
34  *
35  * ---------------------
36  * | Header            |
37  * | ShareDesc Pointer |
38  * | SEQ_OUT_PTR       |
39  * | (output buffer)   |
40  * | (output length)   |
41  * | SEQ_IN_PTR        |
42  * | (input buffer)    |
43  * | (input length)    |
44  * ---------------------
45  */
46
47 #include "compat.h"
48
49 #include "regs.h"
50 #include "intern.h"
51 #include "desc_constr.h"
52 #include "jr.h"
53 #include "error.h"
54 #include "sg_sw_sec4.h"
55 #include "key_gen.h"
56
57 /*
58  * crypto alg
59  */
60 #define CAAM_CRA_PRIORITY               3000
61 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
62 #define CAAM_MAX_KEY_SIZE               (AES_MAX_KEY_SIZE + \
63                                          CTR_RFC3686_NONCE_SIZE + \
64                                          SHA512_DIGEST_SIZE * 2)
65 /* max IV is max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
66 #define CAAM_MAX_IV_LENGTH              16
67
68 #define AEAD_DESC_JOB_IO_LEN            (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN             (AEAD_DESC_JOB_IO_LEN + \
70                                          CAAM_CMD_SZ * 4)
71 #define AUTHENC_DESC_JOB_IO_LEN         (AEAD_DESC_JOB_IO_LEN + \
72                                          CAAM_CMD_SZ * 5)
73
74 /* length of descriptors text */
75 #define DESC_AEAD_BASE                  (4 * CAAM_CMD_SZ)
76 #define DESC_AEAD_ENC_LEN               (DESC_AEAD_BASE + 11 * CAAM_CMD_SZ)
77 #define DESC_AEAD_DEC_LEN               (DESC_AEAD_BASE + 15 * CAAM_CMD_SZ)
78 #define DESC_AEAD_GIVENC_LEN            (DESC_AEAD_ENC_LEN + 9 * CAAM_CMD_SZ)
79
80 /* Note: Nonce is counted in enckeylen */
81 #define DESC_AEAD_CTR_RFC3686_LEN       (4 * CAAM_CMD_SZ)
82
83 #define DESC_AEAD_NULL_BASE             (3 * CAAM_CMD_SZ)
84 #define DESC_AEAD_NULL_ENC_LEN          (DESC_AEAD_NULL_BASE + 11 * CAAM_CMD_SZ)
85 #define DESC_AEAD_NULL_DEC_LEN          (DESC_AEAD_NULL_BASE + 13 * CAAM_CMD_SZ)
86
87 #define DESC_GCM_BASE                   (3 * CAAM_CMD_SZ)
88 #define DESC_GCM_ENC_LEN                (DESC_GCM_BASE + 16 * CAAM_CMD_SZ)
89 #define DESC_GCM_DEC_LEN                (DESC_GCM_BASE + 12 * CAAM_CMD_SZ)
90
91 #define DESC_RFC4106_BASE               (3 * CAAM_CMD_SZ)
92 #define DESC_RFC4106_ENC_LEN            (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
93 #define DESC_RFC4106_DEC_LEN            (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
94
95 #define DESC_RFC4543_BASE               (3 * CAAM_CMD_SZ)
96 #define DESC_RFC4543_ENC_LEN            (DESC_RFC4543_BASE + 11 * CAAM_CMD_SZ)
97 #define DESC_RFC4543_DEC_LEN            (DESC_RFC4543_BASE + 12 * CAAM_CMD_SZ)
98
99 #define DESC_ABLKCIPHER_BASE            (3 * CAAM_CMD_SZ)
100 #define DESC_ABLKCIPHER_ENC_LEN         (DESC_ABLKCIPHER_BASE + \
101                                          20 * CAAM_CMD_SZ)
102 #define DESC_ABLKCIPHER_DEC_LEN         (DESC_ABLKCIPHER_BASE + \
103                                          15 * CAAM_CMD_SZ)
104
105 #define DESC_MAX_USED_BYTES             (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
106 #define DESC_MAX_USED_LEN               (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
107
108 #ifdef DEBUG
109 /* for print_hex_dumps with line references */
110 #define debug(format, arg...) printk(format, arg)
111 #else
112 #define debug(format, arg...)
113 #endif
114 static struct list_head alg_list;
115
116 struct caam_alg_entry {
117         int class1_alg_type;
118         int class2_alg_type;
119         int alg_op;
120         bool rfc3686;
121         bool geniv;
122 };
123
124 struct caam_aead_alg {
125         struct aead_alg aead;
126         struct caam_alg_entry caam;
127         bool registered;
128 };
129
130 /* Set DK bit in class 1 operation if shared */
131 static inline void append_dec_op1(u32 *desc, u32 type)
132 {
133         u32 *jump_cmd, *uncond_jump_cmd;
134
135         /* DK bit is valid only for AES */
136         if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
137                 append_operation(desc, type | OP_ALG_AS_INITFINAL |
138                                  OP_ALG_DECRYPT);
139                 return;
140         }
141
142         jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
143         append_operation(desc, type | OP_ALG_AS_INITFINAL |
144                          OP_ALG_DECRYPT);
145         uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
146         set_jump_tgt_here(desc, jump_cmd);
147         append_operation(desc, type | OP_ALG_AS_INITFINAL |
148                          OP_ALG_DECRYPT | OP_ALG_AAI_DK);
149         set_jump_tgt_here(desc, uncond_jump_cmd);
150 }
151
152 /*
153  * For aead functions, read payload and write payload,
154  * both of which are specified in req->src and req->dst
155  */
156 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
157 {
158         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
159         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
160                              KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
161 }
162
163 /*
164  * For ablkcipher encrypt and decrypt, read from req->src and
165  * write to req->dst
166  */
167 static inline void ablkcipher_append_src_dst(u32 *desc)
168 {
169         append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
170         append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
171         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
172                              KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
173         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
174 }
175
176 /*
177  * per-session context
178  */
179 struct caam_ctx {
180         struct device *jrdev;
181         u32 sh_desc_enc[DESC_MAX_USED_LEN];
182         u32 sh_desc_dec[DESC_MAX_USED_LEN];
183         u32 sh_desc_givenc[DESC_MAX_USED_LEN];
184         dma_addr_t sh_desc_enc_dma;
185         dma_addr_t sh_desc_dec_dma;
186         dma_addr_t sh_desc_givenc_dma;
187         u32 class1_alg_type;
188         u32 class2_alg_type;
189         u32 alg_op;
190         u8 key[CAAM_MAX_KEY_SIZE];
191         dma_addr_t key_dma;
192         unsigned int enckeylen;
193         unsigned int split_key_len;
194         unsigned int split_key_pad_len;
195         unsigned int authsize;
196 };
197
198 static void append_key_aead(u32 *desc, struct caam_ctx *ctx,
199                             int keys_fit_inline, bool is_rfc3686)
200 {
201         u32 *nonce;
202         unsigned int enckeylen = ctx->enckeylen;
203
204         /*
205          * RFC3686 specific:
206          *      | ctx->key = {AUTH_KEY, ENC_KEY, NONCE}
207          *      | enckeylen = encryption key size + nonce size
208          */
209         if (is_rfc3686)
210                 enckeylen -= CTR_RFC3686_NONCE_SIZE;
211
212         if (keys_fit_inline) {
213                 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
214                                   ctx->split_key_len, CLASS_2 |
215                                   KEY_DEST_MDHA_SPLIT | KEY_ENC);
216                 append_key_as_imm(desc, (void *)ctx->key +
217                                   ctx->split_key_pad_len, enckeylen,
218                                   enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
219         } else {
220                 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
221                            KEY_DEST_MDHA_SPLIT | KEY_ENC);
222                 append_key(desc, ctx->key_dma + ctx->split_key_pad_len,
223                            enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
224         }
225
226         /* Load Counter into CONTEXT1 reg */
227         if (is_rfc3686) {
228                 nonce = (u32 *)((void *)ctx->key + ctx->split_key_pad_len +
229                                enckeylen);
230                 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
231                                     LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
232                 append_move(desc,
233                             MOVE_SRC_OUTFIFO |
234                             MOVE_DEST_CLASS1CTX |
235                             (16 << MOVE_OFFSET_SHIFT) |
236                             (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
237         }
238 }
239
240 static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx,
241                                   int keys_fit_inline, bool is_rfc3686)
242 {
243         u32 *key_jump_cmd;
244
245         /* Note: Context registers are saved. */
246         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
247
248         /* Skip if already shared */
249         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
250                                    JUMP_COND_SHRD);
251
252         append_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
253
254         set_jump_tgt_here(desc, key_jump_cmd);
255 }
256
257 static int aead_null_set_sh_desc(struct crypto_aead *aead)
258 {
259         struct caam_ctx *ctx = crypto_aead_ctx(aead);
260         struct device *jrdev = ctx->jrdev;
261         bool keys_fit_inline = false;
262         u32 *key_jump_cmd, *jump_cmd, *read_move_cmd, *write_move_cmd;
263         u32 *desc;
264
265         /*
266          * Job Descriptor and Shared Descriptors
267          * must all fit into the 64-word Descriptor h/w Buffer
268          */
269         if (DESC_AEAD_NULL_ENC_LEN + AEAD_DESC_JOB_IO_LEN +
270             ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
271                 keys_fit_inline = true;
272
273         /* aead_encrypt shared descriptor */
274         desc = ctx->sh_desc_enc;
275
276         init_sh_desc(desc, HDR_SHARE_SERIAL);
277
278         /* Skip if already shared */
279         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
280                                    JUMP_COND_SHRD);
281         if (keys_fit_inline)
282                 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
283                                   ctx->split_key_len, CLASS_2 |
284                                   KEY_DEST_MDHA_SPLIT | KEY_ENC);
285         else
286                 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
287                            KEY_DEST_MDHA_SPLIT | KEY_ENC);
288         set_jump_tgt_here(desc, key_jump_cmd);
289
290         /* assoclen + cryptlen = seqinlen */
291         append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
292
293         /* Prepare to read and write cryptlen + assoclen bytes */
294         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
295         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
296
297         /*
298          * MOVE_LEN opcode is not available in all SEC HW revisions,
299          * thus need to do some magic, i.e. self-patch the descriptor
300          * buffer.
301          */
302         read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
303                                     MOVE_DEST_MATH3 |
304                                     (0x6 << MOVE_LEN_SHIFT));
305         write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
306                                      MOVE_DEST_DESCBUF |
307                                      MOVE_WAITCOMP |
308                                      (0x8 << MOVE_LEN_SHIFT));
309
310         /* Class 2 operation */
311         append_operation(desc, ctx->class2_alg_type |
312                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
313
314         /* Read and write cryptlen bytes */
315         aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
316
317         set_move_tgt_here(desc, read_move_cmd);
318         set_move_tgt_here(desc, write_move_cmd);
319         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
320         append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
321                     MOVE_AUX_LS);
322
323         /* Write ICV */
324         append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
325                          LDST_SRCDST_BYTE_CONTEXT);
326
327         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
328                                               desc_bytes(desc),
329                                               DMA_TO_DEVICE);
330         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
331                 dev_err(jrdev, "unable to map shared descriptor\n");
332                 return -ENOMEM;
333         }
334 #ifdef DEBUG
335         print_hex_dump(KERN_ERR,
336                        "aead null enc shdesc@"__stringify(__LINE__)": ",
337                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
338                        desc_bytes(desc), 1);
339 #endif
340
341         /*
342          * Job Descriptor and Shared Descriptors
343          * must all fit into the 64-word Descriptor h/w Buffer
344          */
345         keys_fit_inline = false;
346         if (DESC_AEAD_NULL_DEC_LEN + DESC_JOB_IO_LEN +
347             ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
348                 keys_fit_inline = true;
349
350         desc = ctx->sh_desc_dec;
351
352         /* aead_decrypt shared descriptor */
353         init_sh_desc(desc, HDR_SHARE_SERIAL);
354
355         /* Skip if already shared */
356         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
357                                    JUMP_COND_SHRD);
358         if (keys_fit_inline)
359                 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
360                                   ctx->split_key_len, CLASS_2 |
361                                   KEY_DEST_MDHA_SPLIT | KEY_ENC);
362         else
363                 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
364                            KEY_DEST_MDHA_SPLIT | KEY_ENC);
365         set_jump_tgt_here(desc, key_jump_cmd);
366
367         /* Class 2 operation */
368         append_operation(desc, ctx->class2_alg_type |
369                          OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
370
371         /* assoclen + cryptlen = seqoutlen */
372         append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
373
374         /* Prepare to read and write cryptlen + assoclen bytes */
375         append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
376         append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
377
378         /*
379          * MOVE_LEN opcode is not available in all SEC HW revisions,
380          * thus need to do some magic, i.e. self-patch the descriptor
381          * buffer.
382          */
383         read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
384                                     MOVE_DEST_MATH2 |
385                                     (0x6 << MOVE_LEN_SHIFT));
386         write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
387                                      MOVE_DEST_DESCBUF |
388                                      MOVE_WAITCOMP |
389                                      (0x8 << MOVE_LEN_SHIFT));
390
391         /* Read and write cryptlen bytes */
392         aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
393
394         /*
395          * Insert a NOP here, since we need at least 4 instructions between
396          * code patching the descriptor buffer and the location being patched.
397          */
398         jump_cmd = append_jump(desc, JUMP_TEST_ALL);
399         set_jump_tgt_here(desc, jump_cmd);
400
401         set_move_tgt_here(desc, read_move_cmd);
402         set_move_tgt_here(desc, write_move_cmd);
403         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
404         append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
405                     MOVE_AUX_LS);
406         append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
407
408         /* Load ICV */
409         append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
410                              FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
411
412         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
413                                               desc_bytes(desc),
414                                               DMA_TO_DEVICE);
415         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
416                 dev_err(jrdev, "unable to map shared descriptor\n");
417                 return -ENOMEM;
418         }
419 #ifdef DEBUG
420         print_hex_dump(KERN_ERR,
421                        "aead null dec shdesc@"__stringify(__LINE__)": ",
422                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
423                        desc_bytes(desc), 1);
424 #endif
425
426         return 0;
427 }
428
429 static int aead_set_sh_desc(struct crypto_aead *aead)
430 {
431         struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
432                                                  struct caam_aead_alg, aead);
433         unsigned int ivsize = crypto_aead_ivsize(aead);
434         struct caam_ctx *ctx = crypto_aead_ctx(aead);
435         struct device *jrdev = ctx->jrdev;
436         bool keys_fit_inline;
437         u32 geniv, moveiv;
438         u32 ctx1_iv_off = 0;
439         u32 *desc;
440         const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
441                                OP_ALG_AAI_CTR_MOD128);
442         const bool is_rfc3686 = alg->caam.rfc3686;
443
444         if (!ctx->authsize)
445                 return 0;
446
447         /* NULL encryption / decryption */
448         if (!ctx->enckeylen)
449                 return aead_null_set_sh_desc(aead);
450
451         /*
452          * AES-CTR needs to load IV in CONTEXT1 reg
453          * at an offset of 128bits (16bytes)
454          * CONTEXT1[255:128] = IV
455          */
456         if (ctr_mode)
457                 ctx1_iv_off = 16;
458
459         /*
460          * RFC3686 specific:
461          *      CONTEXT1[255:128] = {NONCE, IV, COUNTER}
462          */
463         if (is_rfc3686)
464                 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
465
466         if (alg->caam.geniv)
467                 goto skip_enc;
468
469         /*
470          * Job Descriptor and Shared Descriptors
471          * must all fit into the 64-word Descriptor h/w Buffer
472          */
473         keys_fit_inline = false;
474         if (DESC_AEAD_ENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
475             ctx->split_key_pad_len + ctx->enckeylen +
476             (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
477             CAAM_DESC_BYTES_MAX)
478                 keys_fit_inline = true;
479
480         /* aead_encrypt shared descriptor */
481         desc = ctx->sh_desc_enc;
482
483         /* Note: Context registers are saved. */
484         init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
485
486         /* Class 2 operation */
487         append_operation(desc, ctx->class2_alg_type |
488                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
489
490         /* Read and write assoclen bytes */
491         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
492         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
493
494         /* Skip assoc data */
495         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
496
497         /* read assoc before reading payload */
498         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
499                                       FIFOLDST_VLF);
500
501         /* Load Counter into CONTEXT1 reg */
502         if (is_rfc3686)
503                 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
504                                     LDST_CLASS_1_CCB |
505                                     LDST_SRCDST_BYTE_CONTEXT |
506                                     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
507                                      LDST_OFFSET_SHIFT));
508
509         /* Class 1 operation */
510         append_operation(desc, ctx->class1_alg_type |
511                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
512
513         /* Read and write cryptlen bytes */
514         append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
515         append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
516         aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
517
518         /* Write ICV */
519         append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
520                          LDST_SRCDST_BYTE_CONTEXT);
521
522         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
523                                               desc_bytes(desc),
524                                               DMA_TO_DEVICE);
525         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
526                 dev_err(jrdev, "unable to map shared descriptor\n");
527                 return -ENOMEM;
528         }
529 #ifdef DEBUG
530         print_hex_dump(KERN_ERR, "aead enc shdesc@"__stringify(__LINE__)": ",
531                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
532                        desc_bytes(desc), 1);
533 #endif
534
535 skip_enc:
536         /*
537          * Job Descriptor and Shared Descriptors
538          * must all fit into the 64-word Descriptor h/w Buffer
539          */
540         keys_fit_inline = false;
541         if (DESC_AEAD_DEC_LEN + AUTHENC_DESC_JOB_IO_LEN +
542             ctx->split_key_pad_len + ctx->enckeylen +
543             (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
544             CAAM_DESC_BYTES_MAX)
545                 keys_fit_inline = true;
546
547         /* aead_decrypt shared descriptor */
548         desc = ctx->sh_desc_dec;
549
550         /* Note: Context registers are saved. */
551         init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
552
553         /* Class 2 operation */
554         append_operation(desc, ctx->class2_alg_type |
555                          OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
556
557         /* Read and write assoclen bytes */
558         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
559         if (alg->caam.geniv)
560                 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, ivsize);
561         else
562                 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
563
564         /* Skip assoc data */
565         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
566
567         /* read assoc before reading payload */
568         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
569                              KEY_VLF);
570
571         if (alg->caam.geniv) {
572                 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
573                                 LDST_SRCDST_BYTE_CONTEXT |
574                                 (ctx1_iv_off << LDST_OFFSET_SHIFT));
575                 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
576                             (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
577         }
578
579         /* Load Counter into CONTEXT1 reg */
580         if (is_rfc3686)
581                 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
582                                     LDST_CLASS_1_CCB |
583                                     LDST_SRCDST_BYTE_CONTEXT |
584                                     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
585                                      LDST_OFFSET_SHIFT));
586
587         /* Choose operation */
588         if (ctr_mode)
589                 append_operation(desc, ctx->class1_alg_type |
590                                  OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
591         else
592                 append_dec_op1(desc, ctx->class1_alg_type);
593
594         /* Read and write cryptlen bytes */
595         append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
596         append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
597         aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
598
599         /* Load ICV */
600         append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
601                              FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
602
603         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
604                                               desc_bytes(desc),
605                                               DMA_TO_DEVICE);
606         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
607                 dev_err(jrdev, "unable to map shared descriptor\n");
608                 return -ENOMEM;
609         }
610 #ifdef DEBUG
611         print_hex_dump(KERN_ERR, "aead dec shdesc@"__stringify(__LINE__)": ",
612                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
613                        desc_bytes(desc), 1);
614 #endif
615
616         if (!alg->caam.geniv)
617                 goto skip_givenc;
618
619         /*
620          * Job Descriptor and Shared Descriptors
621          * must all fit into the 64-word Descriptor h/w Buffer
622          */
623         keys_fit_inline = false;
624         if (DESC_AEAD_GIVENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
625             ctx->split_key_pad_len + ctx->enckeylen +
626             (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
627             CAAM_DESC_BYTES_MAX)
628                 keys_fit_inline = true;
629
630         /* aead_givencrypt shared descriptor */
631         desc = ctx->sh_desc_enc;
632
633         /* Note: Context registers are saved. */
634         init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
635
636         if (is_rfc3686)
637                 goto copy_iv;
638
639         /* Generate IV */
640         geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
641                 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
642                 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
643         append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
644                             LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
645         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
646         append_move(desc, MOVE_WAITCOMP |
647                     MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
648                     (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
649                     (ivsize << MOVE_LEN_SHIFT));
650         append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
651
652 copy_iv:
653         /* Copy IV to class 1 context */
654         append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
655                     (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
656                     (ivsize << MOVE_LEN_SHIFT));
657
658         /* Return to encryption */
659         append_operation(desc, ctx->class2_alg_type |
660                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
661
662         /* Read and write assoclen bytes */
663         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
664         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
665
666         /* ivsize + cryptlen = seqoutlen - authsize */
667         append_math_sub_imm_u32(desc, REG3, SEQOUTLEN, IMM, ctx->authsize);
668
669         /* Skip assoc data */
670         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
671
672         /* read assoc before reading payload */
673         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
674                              KEY_VLF);
675
676         /* Copy iv from outfifo to class 2 fifo */
677         moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
678                  NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
679         append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
680                             LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
681         append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
682                             LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
683
684         /* Load Counter into CONTEXT1 reg */
685         if (is_rfc3686)
686                 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
687                                     LDST_CLASS_1_CCB |
688                                     LDST_SRCDST_BYTE_CONTEXT |
689                                     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
690                                      LDST_OFFSET_SHIFT));
691
692         /* Class 1 operation */
693         append_operation(desc, ctx->class1_alg_type |
694                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
695
696         /* Will write ivsize + cryptlen */
697         append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
698
699         /* Not need to reload iv */
700         append_seq_fifo_load(desc, ivsize,
701                              FIFOLD_CLASS_SKIP);
702
703         /* Will read cryptlen */
704         append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
705         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
706                              FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
707         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
708
709         /* Write ICV */
710         append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
711                          LDST_SRCDST_BYTE_CONTEXT);
712
713         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
714                                               desc_bytes(desc),
715                                               DMA_TO_DEVICE);
716         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
717                 dev_err(jrdev, "unable to map shared descriptor\n");
718                 return -ENOMEM;
719         }
720 #ifdef DEBUG
721         print_hex_dump(KERN_ERR, "aead givenc shdesc@"__stringify(__LINE__)": ",
722                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
723                        desc_bytes(desc), 1);
724 #endif
725
726 skip_givenc:
727         return 0;
728 }
729
730 static int aead_setauthsize(struct crypto_aead *authenc,
731                                     unsigned int authsize)
732 {
733         struct caam_ctx *ctx = crypto_aead_ctx(authenc);
734
735         ctx->authsize = authsize;
736         aead_set_sh_desc(authenc);
737
738         return 0;
739 }
740
741 static int gcm_set_sh_desc(struct crypto_aead *aead)
742 {
743         struct caam_ctx *ctx = crypto_aead_ctx(aead);
744         struct device *jrdev = ctx->jrdev;
745         bool keys_fit_inline = false;
746         u32 *key_jump_cmd, *zero_payload_jump_cmd,
747             *zero_assoc_jump_cmd1, *zero_assoc_jump_cmd2;
748         u32 *desc;
749
750         if (!ctx->enckeylen || !ctx->authsize)
751                 return 0;
752
753         /*
754          * AES GCM encrypt shared descriptor
755          * Job Descriptor and Shared Descriptor
756          * must fit into the 64-word Descriptor h/w Buffer
757          */
758         if (DESC_GCM_ENC_LEN + GCM_DESC_JOB_IO_LEN +
759             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
760                 keys_fit_inline = true;
761
762         desc = ctx->sh_desc_enc;
763
764         init_sh_desc(desc, HDR_SHARE_SERIAL);
765
766         /* skip key loading if they are loaded due to sharing */
767         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
768                                    JUMP_COND_SHRD | JUMP_COND_SELF);
769         if (keys_fit_inline)
770                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
771                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
772         else
773                 append_key(desc, ctx->key_dma, ctx->enckeylen,
774                            CLASS_1 | KEY_DEST_CLASS_REG);
775         set_jump_tgt_here(desc, key_jump_cmd);
776
777         /* class 1 operation */
778         append_operation(desc, ctx->class1_alg_type |
779                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
780
781         /* if assoclen + cryptlen is ZERO, skip to ICV write */
782         append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
783         zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
784                                                  JUMP_COND_MATH_Z);
785
786         /* if assoclen is ZERO, skip reading the assoc data */
787         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
788         zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
789                                                  JUMP_COND_MATH_Z);
790
791         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
792
793         /* skip assoc data */
794         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
795
796         /* cryptlen = seqinlen - assoclen */
797         append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
798
799         /* if cryptlen is ZERO jump to zero-payload commands */
800         zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
801                                             JUMP_COND_MATH_Z);
802
803         /* read assoc data */
804         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
805                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
806         set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
807
808         append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
809
810         /* write encrypted data */
811         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
812
813         /* read payload data */
814         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
815                              FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
816
817         /* jump the zero-payload commands */
818         append_jump(desc, JUMP_TEST_ALL | 2);
819
820         /* zero-payload commands */
821         set_jump_tgt_here(desc, zero_payload_jump_cmd);
822
823         /* read assoc data */
824         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
825                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
826
827         /* There is no input data */
828         set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
829
830         /* write ICV */
831         append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
832                          LDST_SRCDST_BYTE_CONTEXT);
833
834         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
835                                               desc_bytes(desc),
836                                               DMA_TO_DEVICE);
837         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
838                 dev_err(jrdev, "unable to map shared descriptor\n");
839                 return -ENOMEM;
840         }
841 #ifdef DEBUG
842         print_hex_dump(KERN_ERR, "gcm enc shdesc@"__stringify(__LINE__)": ",
843                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
844                        desc_bytes(desc), 1);
845 #endif
846
847         /*
848          * Job Descriptor and Shared Descriptors
849          * must all fit into the 64-word Descriptor h/w Buffer
850          */
851         keys_fit_inline = false;
852         if (DESC_GCM_DEC_LEN + GCM_DESC_JOB_IO_LEN +
853             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
854                 keys_fit_inline = true;
855
856         desc = ctx->sh_desc_dec;
857
858         init_sh_desc(desc, HDR_SHARE_SERIAL);
859
860         /* skip key loading if they are loaded due to sharing */
861         key_jump_cmd = append_jump(desc, JUMP_JSL |
862                                    JUMP_TEST_ALL | JUMP_COND_SHRD |
863                                    JUMP_COND_SELF);
864         if (keys_fit_inline)
865                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
866                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
867         else
868                 append_key(desc, ctx->key_dma, ctx->enckeylen,
869                            CLASS_1 | KEY_DEST_CLASS_REG);
870         set_jump_tgt_here(desc, key_jump_cmd);
871
872         /* class 1 operation */
873         append_operation(desc, ctx->class1_alg_type |
874                          OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
875
876         /* if assoclen is ZERO, skip reading the assoc data */
877         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
878         zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
879                                                  JUMP_COND_MATH_Z);
880
881         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
882
883         /* skip assoc data */
884         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
885
886         /* read assoc data */
887         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
888                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
889
890         set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
891
892         /* cryptlen = seqoutlen - assoclen */
893         append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
894
895         /* jump to zero-payload command if cryptlen is zero */
896         zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
897                                             JUMP_COND_MATH_Z);
898
899         append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
900
901         /* store encrypted data */
902         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
903
904         /* read payload data */
905         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
906                              FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
907
908         /* zero-payload command */
909         set_jump_tgt_here(desc, zero_payload_jump_cmd);
910
911         /* read ICV */
912         append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
913                              FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
914
915         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
916                                               desc_bytes(desc),
917                                               DMA_TO_DEVICE);
918         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
919                 dev_err(jrdev, "unable to map shared descriptor\n");
920                 return -ENOMEM;
921         }
922 #ifdef DEBUG
923         print_hex_dump(KERN_ERR, "gcm dec shdesc@"__stringify(__LINE__)": ",
924                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
925                        desc_bytes(desc), 1);
926 #endif
927
928         return 0;
929 }
930
931 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
932 {
933         struct caam_ctx *ctx = crypto_aead_ctx(authenc);
934
935         ctx->authsize = authsize;
936         gcm_set_sh_desc(authenc);
937
938         return 0;
939 }
940
941 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
942 {
943         struct caam_ctx *ctx = crypto_aead_ctx(aead);
944         struct device *jrdev = ctx->jrdev;
945         bool keys_fit_inline = false;
946         u32 *key_jump_cmd;
947         u32 *desc;
948
949         if (!ctx->enckeylen || !ctx->authsize)
950                 return 0;
951
952         /*
953          * RFC4106 encrypt shared descriptor
954          * Job Descriptor and Shared Descriptor
955          * must fit into the 64-word Descriptor h/w Buffer
956          */
957         if (DESC_RFC4106_ENC_LEN + GCM_DESC_JOB_IO_LEN +
958             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
959                 keys_fit_inline = true;
960
961         desc = ctx->sh_desc_enc;
962
963         init_sh_desc(desc, HDR_SHARE_SERIAL);
964
965         /* Skip key loading if it is loaded due to sharing */
966         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
967                                    JUMP_COND_SHRD);
968         if (keys_fit_inline)
969                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
970                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
971         else
972                 append_key(desc, ctx->key_dma, ctx->enckeylen,
973                            CLASS_1 | KEY_DEST_CLASS_REG);
974         set_jump_tgt_here(desc, key_jump_cmd);
975
976         /* Class 1 operation */
977         append_operation(desc, ctx->class1_alg_type |
978                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
979
980         append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
981         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
982
983         /* Read assoc data */
984         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
985                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
986
987         /* Skip IV */
988         append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
989
990         /* Will read cryptlen bytes */
991         append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
992
993         /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
994         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
995
996         /* Skip assoc data */
997         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
998
999         /* cryptlen = seqoutlen - assoclen */
1000         append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
1001
1002         /* Write encrypted data */
1003         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1004
1005         /* Read payload data */
1006         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1007                              FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1008
1009         /* Write ICV */
1010         append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1011                          LDST_SRCDST_BYTE_CONTEXT);
1012
1013         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1014                                               desc_bytes(desc),
1015                                               DMA_TO_DEVICE);
1016         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1017                 dev_err(jrdev, "unable to map shared descriptor\n");
1018                 return -ENOMEM;
1019         }
1020 #ifdef DEBUG
1021         print_hex_dump(KERN_ERR, "rfc4106 enc shdesc@"__stringify(__LINE__)": ",
1022                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1023                        desc_bytes(desc), 1);
1024 #endif
1025
1026         /*
1027          * Job Descriptor and Shared Descriptors
1028          * must all fit into the 64-word Descriptor h/w Buffer
1029          */
1030         keys_fit_inline = false;
1031         if (DESC_RFC4106_DEC_LEN + DESC_JOB_IO_LEN +
1032             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1033                 keys_fit_inline = true;
1034
1035         desc = ctx->sh_desc_dec;
1036
1037         init_sh_desc(desc, HDR_SHARE_SERIAL);
1038
1039         /* Skip key loading if it is loaded due to sharing */
1040         key_jump_cmd = append_jump(desc, JUMP_JSL |
1041                                    JUMP_TEST_ALL | JUMP_COND_SHRD);
1042         if (keys_fit_inline)
1043                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1044                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1045         else
1046                 append_key(desc, ctx->key_dma, ctx->enckeylen,
1047                            CLASS_1 | KEY_DEST_CLASS_REG);
1048         set_jump_tgt_here(desc, key_jump_cmd);
1049
1050         /* Class 1 operation */
1051         append_operation(desc, ctx->class1_alg_type |
1052                          OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1053
1054         append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
1055         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1056
1057         /* Read assoc data */
1058         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1059                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1060
1061         /* Skip IV */
1062         append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1063
1064         /* Will read cryptlen bytes */
1065         append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1066
1067         /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1068         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1069
1070         /* Skip assoc data */
1071         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1072
1073         /* Will write cryptlen bytes */
1074         append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1075
1076         /* Store payload data */
1077         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1078
1079         /* Read encrypted data */
1080         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1081                              FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1082
1083         /* Read ICV */
1084         append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1085                              FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1086
1087         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1088                                               desc_bytes(desc),
1089                                               DMA_TO_DEVICE);
1090         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1091                 dev_err(jrdev, "unable to map shared descriptor\n");
1092                 return -ENOMEM;
1093         }
1094 #ifdef DEBUG
1095         print_hex_dump(KERN_ERR, "rfc4106 dec shdesc@"__stringify(__LINE__)": ",
1096                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1097                        desc_bytes(desc), 1);
1098 #endif
1099
1100         return 0;
1101 }
1102
1103 static int rfc4106_setauthsize(struct crypto_aead *authenc,
1104                                unsigned int authsize)
1105 {
1106         struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1107
1108         ctx->authsize = authsize;
1109         rfc4106_set_sh_desc(authenc);
1110
1111         return 0;
1112 }
1113
1114 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
1115 {
1116         struct caam_ctx *ctx = crypto_aead_ctx(aead);
1117         struct device *jrdev = ctx->jrdev;
1118         bool keys_fit_inline = false;
1119         u32 *key_jump_cmd;
1120         u32 *read_move_cmd, *write_move_cmd;
1121         u32 *desc;
1122
1123         if (!ctx->enckeylen || !ctx->authsize)
1124                 return 0;
1125
1126         /*
1127          * RFC4543 encrypt shared descriptor
1128          * Job Descriptor and Shared Descriptor
1129          * must fit into the 64-word Descriptor h/w Buffer
1130          */
1131         if (DESC_RFC4543_ENC_LEN + GCM_DESC_JOB_IO_LEN +
1132             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1133                 keys_fit_inline = true;
1134
1135         desc = ctx->sh_desc_enc;
1136
1137         init_sh_desc(desc, HDR_SHARE_SERIAL);
1138
1139         /* Skip key loading if it is loaded due to sharing */
1140         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1141                                    JUMP_COND_SHRD);
1142         if (keys_fit_inline)
1143                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1144                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1145         else
1146                 append_key(desc, ctx->key_dma, ctx->enckeylen,
1147                            CLASS_1 | KEY_DEST_CLASS_REG);
1148         set_jump_tgt_here(desc, key_jump_cmd);
1149
1150         /* Class 1 operation */
1151         append_operation(desc, ctx->class1_alg_type |
1152                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1153
1154         /* assoclen + cryptlen = seqinlen */
1155         append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1156
1157         /*
1158          * MOVE_LEN opcode is not available in all SEC HW revisions,
1159          * thus need to do some magic, i.e. self-patch the descriptor
1160          * buffer.
1161          */
1162         read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1163                                     (0x6 << MOVE_LEN_SHIFT));
1164         write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1165                                      (0x8 << MOVE_LEN_SHIFT));
1166
1167         /* Will read assoclen + cryptlen bytes */
1168         append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1169
1170         /* Will write assoclen + cryptlen bytes */
1171         append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1172
1173         /* Read and write assoclen + cryptlen bytes */
1174         aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1175
1176         set_move_tgt_here(desc, read_move_cmd);
1177         set_move_tgt_here(desc, write_move_cmd);
1178         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1179         /* Move payload data to OFIFO */
1180         append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1181
1182         /* Write ICV */
1183         append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1184                          LDST_SRCDST_BYTE_CONTEXT);
1185
1186         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1187                                               desc_bytes(desc),
1188                                               DMA_TO_DEVICE);
1189         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1190                 dev_err(jrdev, "unable to map shared descriptor\n");
1191                 return -ENOMEM;
1192         }
1193 #ifdef DEBUG
1194         print_hex_dump(KERN_ERR, "rfc4543 enc shdesc@"__stringify(__LINE__)": ",
1195                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1196                        desc_bytes(desc), 1);
1197 #endif
1198
1199         /*
1200          * Job Descriptor and Shared Descriptors
1201          * must all fit into the 64-word Descriptor h/w Buffer
1202          */
1203         keys_fit_inline = false;
1204         if (DESC_RFC4543_DEC_LEN + GCM_DESC_JOB_IO_LEN +
1205             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1206                 keys_fit_inline = true;
1207
1208         desc = ctx->sh_desc_dec;
1209
1210         init_sh_desc(desc, HDR_SHARE_SERIAL);
1211
1212         /* Skip key loading if it is loaded due to sharing */
1213         key_jump_cmd = append_jump(desc, JUMP_JSL |
1214                                    JUMP_TEST_ALL | JUMP_COND_SHRD);
1215         if (keys_fit_inline)
1216                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1217                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1218         else
1219                 append_key(desc, ctx->key_dma, ctx->enckeylen,
1220                            CLASS_1 | KEY_DEST_CLASS_REG);
1221         set_jump_tgt_here(desc, key_jump_cmd);
1222
1223         /* Class 1 operation */
1224         append_operation(desc, ctx->class1_alg_type |
1225                          OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1226
1227         /* assoclen + cryptlen = seqoutlen */
1228         append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1229
1230         /*
1231          * MOVE_LEN opcode is not available in all SEC HW revisions,
1232          * thus need to do some magic, i.e. self-patch the descriptor
1233          * buffer.
1234          */
1235         read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1236                                     (0x6 << MOVE_LEN_SHIFT));
1237         write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1238                                      (0x8 << MOVE_LEN_SHIFT));
1239
1240         /* Will read assoclen + cryptlen bytes */
1241         append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1242
1243         /* Will write assoclen + cryptlen bytes */
1244         append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1245
1246         /* Store payload data */
1247         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1248
1249         /* In-snoop assoclen + cryptlen data */
1250         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1251                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1252
1253         set_move_tgt_here(desc, read_move_cmd);
1254         set_move_tgt_here(desc, write_move_cmd);
1255         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1256         /* Move payload data to OFIFO */
1257         append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1258         append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1259
1260         /* Read ICV */
1261         append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1262                              FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1263
1264         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1265                                               desc_bytes(desc),
1266                                               DMA_TO_DEVICE);
1267         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1268                 dev_err(jrdev, "unable to map shared descriptor\n");
1269                 return -ENOMEM;
1270         }
1271 #ifdef DEBUG
1272         print_hex_dump(KERN_ERR, "rfc4543 dec shdesc@"__stringify(__LINE__)": ",
1273                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1274                        desc_bytes(desc), 1);
1275 #endif
1276
1277         return 0;
1278 }
1279
1280 static int rfc4543_setauthsize(struct crypto_aead *authenc,
1281                                unsigned int authsize)
1282 {
1283         struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1284
1285         ctx->authsize = authsize;
1286         rfc4543_set_sh_desc(authenc);
1287
1288         return 0;
1289 }
1290
1291 static u32 gen_split_aead_key(struct caam_ctx *ctx, const u8 *key_in,
1292                               u32 authkeylen)
1293 {
1294         return gen_split_key(ctx->jrdev, ctx->key, ctx->split_key_len,
1295                                ctx->split_key_pad_len, key_in, authkeylen,
1296                                ctx->alg_op);
1297 }
1298
1299 static int aead_setkey(struct crypto_aead *aead,
1300                                const u8 *key, unsigned int keylen)
1301 {
1302         /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */
1303         static const u8 mdpadlen[] = { 16, 20, 32, 32, 64, 64 };
1304         struct caam_ctx *ctx = crypto_aead_ctx(aead);
1305         struct device *jrdev = ctx->jrdev;
1306         struct crypto_authenc_keys keys;
1307         int ret = 0;
1308
1309         if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
1310                 goto badkey;
1311
1312         /* Pick class 2 key length from algorithm submask */
1313         ctx->split_key_len = mdpadlen[(ctx->alg_op & OP_ALG_ALGSEL_SUBMASK) >>
1314                                       OP_ALG_ALGSEL_SHIFT] * 2;
1315         ctx->split_key_pad_len = ALIGN(ctx->split_key_len, 16);
1316
1317         if (ctx->split_key_pad_len + keys.enckeylen > CAAM_MAX_KEY_SIZE)
1318                 goto badkey;
1319
1320 #ifdef DEBUG
1321         printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
1322                keys.authkeylen + keys.enckeylen, keys.enckeylen,
1323                keys.authkeylen);
1324         printk(KERN_ERR "split_key_len %d split_key_pad_len %d\n",
1325                ctx->split_key_len, ctx->split_key_pad_len);
1326         print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1327                        DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1328 #endif
1329
1330         ret = gen_split_aead_key(ctx, keys.authkey, keys.authkeylen);
1331         if (ret) {
1332                 goto badkey;
1333         }
1334
1335         /* postpend encryption key to auth split key */
1336         memcpy(ctx->key + ctx->split_key_pad_len, keys.enckey, keys.enckeylen);
1337
1338         ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->split_key_pad_len +
1339                                       keys.enckeylen, DMA_TO_DEVICE);
1340         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1341                 dev_err(jrdev, "unable to map key i/o memory\n");
1342                 return -ENOMEM;
1343         }
1344 #ifdef DEBUG
1345         print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
1346                        DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
1347                        ctx->split_key_pad_len + keys.enckeylen, 1);
1348 #endif
1349
1350         ctx->enckeylen = keys.enckeylen;
1351
1352         ret = aead_set_sh_desc(aead);
1353         if (ret) {
1354                 dma_unmap_single(jrdev, ctx->key_dma, ctx->split_key_pad_len +
1355                                  keys.enckeylen, DMA_TO_DEVICE);
1356         }
1357
1358         return ret;
1359 badkey:
1360         crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
1361         return -EINVAL;
1362 }
1363
1364 static int gcm_setkey(struct crypto_aead *aead,
1365                       const u8 *key, unsigned int keylen)
1366 {
1367         struct caam_ctx *ctx = crypto_aead_ctx(aead);
1368         struct device *jrdev = ctx->jrdev;
1369         int ret = 0;
1370
1371 #ifdef DEBUG
1372         print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1373                        DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1374 #endif
1375
1376         memcpy(ctx->key, key, keylen);
1377         ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1378                                       DMA_TO_DEVICE);
1379         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1380                 dev_err(jrdev, "unable to map key i/o memory\n");
1381                 return -ENOMEM;
1382         }
1383         ctx->enckeylen = keylen;
1384
1385         ret = gcm_set_sh_desc(aead);
1386         if (ret) {
1387                 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1388                                  DMA_TO_DEVICE);
1389         }
1390
1391         return ret;
1392 }
1393
1394 static int rfc4106_setkey(struct crypto_aead *aead,
1395                           const u8 *key, unsigned int keylen)
1396 {
1397         struct caam_ctx *ctx = crypto_aead_ctx(aead);
1398         struct device *jrdev = ctx->jrdev;
1399         int ret = 0;
1400
1401         if (keylen < 4)
1402                 return -EINVAL;
1403
1404 #ifdef DEBUG
1405         print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1406                        DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1407 #endif
1408
1409         memcpy(ctx->key, key, keylen);
1410
1411         /*
1412          * The last four bytes of the key material are used as the salt value
1413          * in the nonce. Update the AES key length.
1414          */
1415         ctx->enckeylen = keylen - 4;
1416
1417         ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1418                                       DMA_TO_DEVICE);
1419         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1420                 dev_err(jrdev, "unable to map key i/o memory\n");
1421                 return -ENOMEM;
1422         }
1423
1424         ret = rfc4106_set_sh_desc(aead);
1425         if (ret) {
1426                 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1427                                  DMA_TO_DEVICE);
1428         }
1429
1430         return ret;
1431 }
1432
1433 static int rfc4543_setkey(struct crypto_aead *aead,
1434                           const u8 *key, unsigned int keylen)
1435 {
1436         struct caam_ctx *ctx = crypto_aead_ctx(aead);
1437         struct device *jrdev = ctx->jrdev;
1438         int ret = 0;
1439
1440         if (keylen < 4)
1441                 return -EINVAL;
1442
1443 #ifdef DEBUG
1444         print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1445                        DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1446 #endif
1447
1448         memcpy(ctx->key, key, keylen);
1449
1450         /*
1451          * The last four bytes of the key material are used as the salt value
1452          * in the nonce. Update the AES key length.
1453          */
1454         ctx->enckeylen = keylen - 4;
1455
1456         ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1457                                       DMA_TO_DEVICE);
1458         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1459                 dev_err(jrdev, "unable to map key i/o memory\n");
1460                 return -ENOMEM;
1461         }
1462
1463         ret = rfc4543_set_sh_desc(aead);
1464         if (ret) {
1465                 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1466                                  DMA_TO_DEVICE);
1467         }
1468
1469         return ret;
1470 }
1471
1472 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1473                              const u8 *key, unsigned int keylen)
1474 {
1475         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1476         struct ablkcipher_tfm *crt = &ablkcipher->base.crt_ablkcipher;
1477         struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
1478         const char *alg_name = crypto_tfm_alg_name(tfm);
1479         struct device *jrdev = ctx->jrdev;
1480         int ret = 0;
1481         u32 *key_jump_cmd;
1482         u32 *desc;
1483         u32 *nonce;
1484         u32 geniv;
1485         u32 ctx1_iv_off = 0;
1486         const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
1487                                OP_ALG_AAI_CTR_MOD128);
1488         const bool is_rfc3686 = (ctr_mode &&
1489                                  (strstr(alg_name, "rfc3686") != NULL));
1490
1491 #ifdef DEBUG
1492         print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1493                        DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1494 #endif
1495         /*
1496          * AES-CTR needs to load IV in CONTEXT1 reg
1497          * at an offset of 128bits (16bytes)
1498          * CONTEXT1[255:128] = IV
1499          */
1500         if (ctr_mode)
1501                 ctx1_iv_off = 16;
1502
1503         /*
1504          * RFC3686 specific:
1505          *      | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1506          *      | *key = {KEY, NONCE}
1507          */
1508         if (is_rfc3686) {
1509                 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
1510                 keylen -= CTR_RFC3686_NONCE_SIZE;
1511         }
1512
1513         memcpy(ctx->key, key, keylen);
1514         ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1515                                       DMA_TO_DEVICE);
1516         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1517                 dev_err(jrdev, "unable to map key i/o memory\n");
1518                 return -ENOMEM;
1519         }
1520         ctx->enckeylen = keylen;
1521
1522         /* ablkcipher_encrypt shared descriptor */
1523         desc = ctx->sh_desc_enc;
1524         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1525         /* Skip if already shared */
1526         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1527                                    JUMP_COND_SHRD);
1528
1529         /* Load class1 key only */
1530         append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1531                           ctx->enckeylen, CLASS_1 |
1532                           KEY_DEST_CLASS_REG);
1533
1534         /* Load nonce into CONTEXT1 reg */
1535         if (is_rfc3686) {
1536                 nonce = (u32 *)(key + keylen);
1537                 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1538                                     LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1539                 append_move(desc, MOVE_WAITCOMP |
1540                             MOVE_SRC_OUTFIFO |
1541                             MOVE_DEST_CLASS1CTX |
1542                             (16 << MOVE_OFFSET_SHIFT) |
1543                             (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1544         }
1545
1546         set_jump_tgt_here(desc, key_jump_cmd);
1547
1548         /* Load iv */
1549         append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1550                         LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1551
1552         /* Load counter into CONTEXT1 reg */
1553         if (is_rfc3686)
1554                 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
1555                                     LDST_CLASS_1_CCB |
1556                                     LDST_SRCDST_BYTE_CONTEXT |
1557                                     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1558                                      LDST_OFFSET_SHIFT));
1559
1560         /* Load operation */
1561         append_operation(desc, ctx->class1_alg_type |
1562                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1563
1564         /* Perform operation */
1565         ablkcipher_append_src_dst(desc);
1566
1567         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1568                                               desc_bytes(desc),
1569                                               DMA_TO_DEVICE);
1570         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1571                 dev_err(jrdev, "unable to map shared descriptor\n");
1572                 return -ENOMEM;
1573         }
1574 #ifdef DEBUG
1575         print_hex_dump(KERN_ERR,
1576                        "ablkcipher enc shdesc@"__stringify(__LINE__)": ",
1577                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1578                        desc_bytes(desc), 1);
1579 #endif
1580         /* ablkcipher_decrypt shared descriptor */
1581         desc = ctx->sh_desc_dec;
1582
1583         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1584         /* Skip if already shared */
1585         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1586                                    JUMP_COND_SHRD);
1587
1588         /* Load class1 key only */
1589         append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1590                           ctx->enckeylen, CLASS_1 |
1591                           KEY_DEST_CLASS_REG);
1592
1593         /* Load nonce into CONTEXT1 reg */
1594         if (is_rfc3686) {
1595                 nonce = (u32 *)(key + keylen);
1596                 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1597                                     LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1598                 append_move(desc, MOVE_WAITCOMP |
1599                             MOVE_SRC_OUTFIFO |
1600                             MOVE_DEST_CLASS1CTX |
1601                             (16 << MOVE_OFFSET_SHIFT) |
1602                             (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1603         }
1604
1605         set_jump_tgt_here(desc, key_jump_cmd);
1606
1607         /* load IV */
1608         append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1609                         LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1610
1611         /* Load counter into CONTEXT1 reg */
1612         if (is_rfc3686)
1613                 append_load_imm_u32(desc, be32_to_cpu(1), LDST_IMM |
1614                                     LDST_CLASS_1_CCB |
1615                                     LDST_SRCDST_BYTE_CONTEXT |
1616                                     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1617                                      LDST_OFFSET_SHIFT));
1618
1619         /* Choose operation */
1620         if (ctr_mode)
1621                 append_operation(desc, ctx->class1_alg_type |
1622                                  OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
1623         else
1624                 append_dec_op1(desc, ctx->class1_alg_type);
1625
1626         /* Perform operation */
1627         ablkcipher_append_src_dst(desc);
1628
1629         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1630                                               desc_bytes(desc),
1631                                               DMA_TO_DEVICE);
1632         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1633                 dev_err(jrdev, "unable to map shared descriptor\n");
1634                 return -ENOMEM;
1635         }
1636
1637 #ifdef DEBUG
1638         print_hex_dump(KERN_ERR,
1639                        "ablkcipher dec shdesc@"__stringify(__LINE__)": ",
1640                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1641                        desc_bytes(desc), 1);
1642 #endif
1643         /* ablkcipher_givencrypt shared descriptor */
1644         desc = ctx->sh_desc_givenc;
1645
1646         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1647         /* Skip if already shared */
1648         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1649                                    JUMP_COND_SHRD);
1650
1651         /* Load class1 key only */
1652         append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1653                           ctx->enckeylen, CLASS_1 |
1654                           KEY_DEST_CLASS_REG);
1655
1656         /* Load Nonce into CONTEXT1 reg */
1657         if (is_rfc3686) {
1658                 nonce = (u32 *)(key + keylen);
1659                 append_load_imm_u32(desc, *nonce, LDST_CLASS_IND_CCB |
1660                                     LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1661                 append_move(desc, MOVE_WAITCOMP |
1662                             MOVE_SRC_OUTFIFO |
1663                             MOVE_DEST_CLASS1CTX |
1664                             (16 << MOVE_OFFSET_SHIFT) |
1665                             (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1666         }
1667         set_jump_tgt_here(desc, key_jump_cmd);
1668
1669         /* Generate IV */
1670         geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
1671                 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
1672                 NFIFOENTRY_PTYPE_RND | (crt->ivsize << NFIFOENTRY_DLEN_SHIFT);
1673         append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
1674                             LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
1675         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1676         append_move(desc, MOVE_WAITCOMP |
1677                     MOVE_SRC_INFIFO |
1678                     MOVE_DEST_CLASS1CTX |
1679                     (crt->ivsize << MOVE_LEN_SHIFT) |
1680                     (ctx1_iv_off << MOVE_OFFSET_SHIFT));
1681         append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1682
1683         /* Copy generated IV to memory */
1684         append_seq_store(desc, crt->ivsize,
1685                          LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1686                          (ctx1_iv_off << LDST_OFFSET_SHIFT));
1687
1688         /* Load Counter into CONTEXT1 reg */
1689         if (is_rfc3686)
1690                 append_load_imm_u32(desc, (u32)1, LDST_IMM |
1691                                     LDST_CLASS_1_CCB |
1692                                     LDST_SRCDST_BYTE_CONTEXT |
1693                                     ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1694                                      LDST_OFFSET_SHIFT));
1695
1696         if (ctx1_iv_off)
1697                 append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
1698                             (1 << JUMP_OFFSET_SHIFT));
1699
1700         /* Load operation */
1701         append_operation(desc, ctx->class1_alg_type |
1702                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1703
1704         /* Perform operation */
1705         ablkcipher_append_src_dst(desc);
1706
1707         ctx->sh_desc_givenc_dma = dma_map_single(jrdev, desc,
1708                                                  desc_bytes(desc),
1709                                                  DMA_TO_DEVICE);
1710         if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) {
1711                 dev_err(jrdev, "unable to map shared descriptor\n");
1712                 return -ENOMEM;
1713         }
1714 #ifdef DEBUG
1715         print_hex_dump(KERN_ERR,
1716                        "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
1717                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1718                        desc_bytes(desc), 1);
1719 #endif
1720
1721         return ret;
1722 }
1723
1724 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1725                                  const u8 *key, unsigned int keylen)
1726 {
1727         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1728         struct device *jrdev = ctx->jrdev;
1729         u32 *key_jump_cmd, *desc;
1730         __be64 sector_size = cpu_to_be64(512);
1731
1732         if (keylen != 2 * AES_MIN_KEY_SIZE  && keylen != 2 * AES_MAX_KEY_SIZE) {
1733                 crypto_ablkcipher_set_flags(ablkcipher,
1734                                             CRYPTO_TFM_RES_BAD_KEY_LEN);
1735                 dev_err(jrdev, "key size mismatch\n");
1736                 return -EINVAL;
1737         }
1738
1739         memcpy(ctx->key, key, keylen);
1740         ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, DMA_TO_DEVICE);
1741         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1742                 dev_err(jrdev, "unable to map key i/o memory\n");
1743                 return -ENOMEM;
1744         }
1745         ctx->enckeylen = keylen;
1746
1747         /* xts_ablkcipher_encrypt shared descriptor */
1748         desc = ctx->sh_desc_enc;
1749         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1750         /* Skip if already shared */
1751         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1752                                    JUMP_COND_SHRD);
1753
1754         /* Load class1 keys only */
1755         append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1756                           ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1757
1758         /* Load sector size with index 40 bytes (0x28) */
1759         append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1760                    LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1761         append_data(desc, (void *)&sector_size, 8);
1762
1763         set_jump_tgt_here(desc, key_jump_cmd);
1764
1765         /*
1766          * create sequence for loading the sector index
1767          * Upper 8B of IV - will be used as sector index
1768          * Lower 8B of IV - will be discarded
1769          */
1770         append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1771                    LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1772         append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1773
1774         /* Load operation */
1775         append_operation(desc, ctx->class1_alg_type | OP_ALG_AS_INITFINAL |
1776                          OP_ALG_ENCRYPT);
1777
1778         /* Perform operation */
1779         ablkcipher_append_src_dst(desc);
1780
1781         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1782                                               DMA_TO_DEVICE);
1783         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1784                 dev_err(jrdev, "unable to map shared descriptor\n");
1785                 return -ENOMEM;
1786         }
1787 #ifdef DEBUG
1788         print_hex_dump(KERN_ERR,
1789                        "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
1790                        DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1791 #endif
1792
1793         /* xts_ablkcipher_decrypt shared descriptor */
1794         desc = ctx->sh_desc_dec;
1795
1796         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1797         /* Skip if already shared */
1798         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1799                                    JUMP_COND_SHRD);
1800
1801         /* Load class1 key only */
1802         append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1803                           ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1804
1805         /* Load sector size with index 40 bytes (0x28) */
1806         append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1807                    LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1808         append_data(desc, (void *)&sector_size, 8);
1809
1810         set_jump_tgt_here(desc, key_jump_cmd);
1811
1812         /*
1813          * create sequence for loading the sector index
1814          * Upper 8B of IV - will be used as sector index
1815          * Lower 8B of IV - will be discarded
1816          */
1817         append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1818                    LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1819         append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1820
1821         /* Load operation */
1822         append_dec_op1(desc, ctx->class1_alg_type);
1823
1824         /* Perform operation */
1825         ablkcipher_append_src_dst(desc);
1826
1827         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1828                                               DMA_TO_DEVICE);
1829         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1830                 dma_unmap_single(jrdev, ctx->sh_desc_enc_dma,
1831                                  desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
1832                 dev_err(jrdev, "unable to map shared descriptor\n");
1833                 return -ENOMEM;
1834         }
1835 #ifdef DEBUG
1836         print_hex_dump(KERN_ERR,
1837                        "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
1838                        DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1839 #endif
1840
1841         return 0;
1842 }
1843
1844 /*
1845  * aead_edesc - s/w-extended aead descriptor
1846  * @assoc_nents: number of segments in associated data (SPI+Seq) scatterlist
1847  * @src_nents: number of segments in input scatterlist
1848  * @dst_nents: number of segments in output scatterlist
1849  * @iv_dma: dma address of iv for checking continuity and link table
1850  * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1851  * @sec4_sg_bytes: length of dma mapped sec4_sg space
1852  * @sec4_sg_dma: bus physical mapped address of h/w link table
1853  * @hw_desc: the h/w job descriptor followed by any referenced link tables
1854  */
1855 struct aead_edesc {
1856         int assoc_nents;
1857         int src_nents;
1858         int dst_nents;
1859         dma_addr_t iv_dma;
1860         int sec4_sg_bytes;
1861         dma_addr_t sec4_sg_dma;
1862         struct sec4_sg_entry *sec4_sg;
1863         u32 hw_desc[];
1864 };
1865
1866 /*
1867  * ablkcipher_edesc - s/w-extended ablkcipher descriptor
1868  * @src_nents: number of segments in input scatterlist
1869  * @dst_nents: number of segments in output scatterlist
1870  * @iv_dma: dma address of iv for checking continuity and link table
1871  * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1872  * @sec4_sg_bytes: length of dma mapped sec4_sg space
1873  * @sec4_sg_dma: bus physical mapped address of h/w link table
1874  * @hw_desc: the h/w job descriptor followed by any referenced link tables
1875  */
1876 struct ablkcipher_edesc {
1877         int src_nents;
1878         int dst_nents;
1879         dma_addr_t iv_dma;
1880         int sec4_sg_bytes;
1881         dma_addr_t sec4_sg_dma;
1882         struct sec4_sg_entry *sec4_sg;
1883         u32 hw_desc[0];
1884 };
1885
1886 static void caam_unmap(struct device *dev, struct scatterlist *src,
1887                        struct scatterlist *dst, int src_nents,
1888                        int dst_nents,
1889                        dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
1890                        int sec4_sg_bytes)
1891 {
1892         if (dst != src) {
1893                 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_TO_DEVICE);
1894                 dma_unmap_sg(dev, dst, dst_nents ? : 1, DMA_FROM_DEVICE);
1895         } else {
1896                 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_BIDIRECTIONAL);
1897         }
1898
1899         if (iv_dma)
1900                 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
1901         if (sec4_sg_bytes)
1902                 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
1903                                  DMA_TO_DEVICE);
1904 }
1905
1906 static void aead_unmap(struct device *dev,
1907                        struct aead_edesc *edesc,
1908                        struct aead_request *req)
1909 {
1910         caam_unmap(dev, req->src, req->dst,
1911                    edesc->src_nents, edesc->dst_nents, 0, 0,
1912                    edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1913 }
1914
1915 static void ablkcipher_unmap(struct device *dev,
1916                              struct ablkcipher_edesc *edesc,
1917                              struct ablkcipher_request *req)
1918 {
1919         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1920         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1921
1922         caam_unmap(dev, req->src, req->dst,
1923                    edesc->src_nents, edesc->dst_nents,
1924                    edesc->iv_dma, ivsize,
1925                    edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1926 }
1927
1928 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1929                                    void *context)
1930 {
1931         struct aead_request *req = context;
1932         struct aead_edesc *edesc;
1933
1934 #ifdef DEBUG
1935         dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1936 #endif
1937
1938         edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1939
1940         if (err)
1941                 caam_jr_strstatus(jrdev, err);
1942
1943         aead_unmap(jrdev, edesc, req);
1944
1945         kfree(edesc);
1946
1947         aead_request_complete(req, err);
1948 }
1949
1950 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1951                                    void *context)
1952 {
1953         struct aead_request *req = context;
1954         struct aead_edesc *edesc;
1955
1956 #ifdef DEBUG
1957         dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1958 #endif
1959
1960         edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1961
1962         if (err)
1963                 caam_jr_strstatus(jrdev, err);
1964
1965         aead_unmap(jrdev, edesc, req);
1966
1967         /*
1968          * verify hw auth check passed else return -EBADMSG
1969          */
1970         if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
1971                 err = -EBADMSG;
1972
1973         kfree(edesc);
1974
1975         aead_request_complete(req, err);
1976 }
1977
1978 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1979                                    void *context)
1980 {
1981         struct ablkcipher_request *req = context;
1982         struct ablkcipher_edesc *edesc;
1983 #ifdef DEBUG
1984         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1985         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1986
1987         dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1988 #endif
1989
1990         edesc = (struct ablkcipher_edesc *)((char *)desc -
1991                  offsetof(struct ablkcipher_edesc, hw_desc));
1992
1993         if (err)
1994                 caam_jr_strstatus(jrdev, err);
1995
1996 #ifdef DEBUG
1997         print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
1998                        DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1999                        edesc->src_nents > 1 ? 100 : ivsize, 1);
2000         print_hex_dump(KERN_ERR, "dst    @"__stringify(__LINE__)": ",
2001                        DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2002                        edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
2003 #endif
2004
2005         ablkcipher_unmap(jrdev, edesc, req);
2006         kfree(edesc);
2007
2008         ablkcipher_request_complete(req, err);
2009 }
2010
2011 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
2012                                     void *context)
2013 {
2014         struct ablkcipher_request *req = context;
2015         struct ablkcipher_edesc *edesc;
2016 #ifdef DEBUG
2017         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2018         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2019
2020         dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
2021 #endif
2022
2023         edesc = (struct ablkcipher_edesc *)((char *)desc -
2024                  offsetof(struct ablkcipher_edesc, hw_desc));
2025         if (err)
2026                 caam_jr_strstatus(jrdev, err);
2027
2028 #ifdef DEBUG
2029         print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
2030                        DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2031                        ivsize, 1);
2032         print_hex_dump(KERN_ERR, "dst    @"__stringify(__LINE__)": ",
2033                        DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2034                        edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
2035 #endif
2036
2037         ablkcipher_unmap(jrdev, edesc, req);
2038         kfree(edesc);
2039
2040         ablkcipher_request_complete(req, err);
2041 }
2042
2043 /*
2044  * Fill in aead job descriptor
2045  */
2046 static void init_aead_job(struct aead_request *req,
2047                           struct aead_edesc *edesc,
2048                           bool all_contig, bool encrypt)
2049 {
2050         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2051         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2052         int authsize = ctx->authsize;
2053         u32 *desc = edesc->hw_desc;
2054         u32 out_options, in_options;
2055         dma_addr_t dst_dma, src_dma;
2056         int len, sec4_sg_index = 0;
2057         dma_addr_t ptr;
2058         u32 *sh_desc;
2059
2060         sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
2061         ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
2062
2063         len = desc_len(sh_desc);
2064         init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2065
2066         if (all_contig) {
2067                 src_dma = sg_dma_address(req->src);
2068                 in_options = 0;
2069         } else {
2070                 src_dma = edesc->sec4_sg_dma;
2071                 sec4_sg_index += edesc->src_nents;
2072                 in_options = LDST_SGF;
2073         }
2074
2075         append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
2076                           in_options);
2077
2078         dst_dma = src_dma;
2079         out_options = in_options;
2080
2081         if (unlikely(req->src != req->dst)) {
2082                 if (!edesc->dst_nents) {
2083                         dst_dma = sg_dma_address(req->dst);
2084                 } else {
2085                         dst_dma = edesc->sec4_sg_dma +
2086                                   sec4_sg_index *
2087                                   sizeof(struct sec4_sg_entry);
2088                         out_options = LDST_SGF;
2089                 }
2090         }
2091
2092         if (encrypt)
2093                 append_seq_out_ptr(desc, dst_dma,
2094                                    req->assoclen + req->cryptlen + authsize,
2095                                    out_options);
2096         else
2097                 append_seq_out_ptr(desc, dst_dma,
2098                                    req->assoclen + req->cryptlen - authsize,
2099                                    out_options);
2100
2101         /* REG3 = assoclen */
2102         append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
2103 }
2104
2105 static void init_gcm_job(struct aead_request *req,
2106                          struct aead_edesc *edesc,
2107                          bool all_contig, bool encrypt)
2108 {
2109         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2110         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2111         unsigned int ivsize = crypto_aead_ivsize(aead);
2112         u32 *desc = edesc->hw_desc;
2113         bool generic_gcm = (ivsize == 12);
2114         unsigned int last;
2115
2116         init_aead_job(req, edesc, all_contig, encrypt);
2117
2118         /* BUG This should not be specific to generic GCM. */
2119         last = 0;
2120         if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
2121                 last = FIFOLD_TYPE_LAST1;
2122
2123         /* Read GCM IV */
2124         append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
2125                          FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
2126         /* Append Salt */
2127         if (!generic_gcm)
2128                 append_data(desc, ctx->key + ctx->enckeylen, 4);
2129         /* Append IV */
2130         append_data(desc, req->iv, ivsize);
2131         /* End of blank commands */
2132 }
2133
2134 static void init_authenc_job(struct aead_request *req,
2135                              struct aead_edesc *edesc,
2136                              bool all_contig, bool encrypt)
2137 {
2138         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2139         struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
2140                                                  struct caam_aead_alg, aead);
2141         unsigned int ivsize = crypto_aead_ivsize(aead);
2142         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2143         const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
2144                                OP_ALG_AAI_CTR_MOD128);
2145         const bool is_rfc3686 = alg->caam.rfc3686;
2146         u32 *desc = edesc->hw_desc;
2147         u32 ivoffset = 0;
2148
2149         /*
2150          * AES-CTR needs to load IV in CONTEXT1 reg
2151          * at an offset of 128bits (16bytes)
2152          * CONTEXT1[255:128] = IV
2153          */
2154         if (ctr_mode)
2155                 ivoffset = 16;
2156
2157         /*
2158          * RFC3686 specific:
2159          *      CONTEXT1[255:128] = {NONCE, IV, COUNTER}
2160          */
2161         if (is_rfc3686)
2162                 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
2163
2164         init_aead_job(req, edesc, all_contig, encrypt);
2165
2166         if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
2167                 append_load_as_imm(desc, req->iv, ivsize,
2168                                    LDST_CLASS_1_CCB |
2169                                    LDST_SRCDST_BYTE_CONTEXT |
2170                                    (ivoffset << LDST_OFFSET_SHIFT));
2171 }
2172
2173 /*
2174  * Fill in ablkcipher job descriptor
2175  */
2176 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
2177                                 struct ablkcipher_edesc *edesc,
2178                                 struct ablkcipher_request *req,
2179                                 bool iv_contig)
2180 {
2181         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2182         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2183         u32 *desc = edesc->hw_desc;
2184         u32 out_options = 0, in_options;
2185         dma_addr_t dst_dma, src_dma;
2186         int len, sec4_sg_index = 0;
2187
2188 #ifdef DEBUG
2189         print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
2190                        DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2191                        ivsize, 1);
2192         print_hex_dump(KERN_ERR, "src    @"__stringify(__LINE__)": ",
2193                        DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2194                        edesc->src_nents ? 100 : req->nbytes, 1);
2195 #endif
2196
2197         len = desc_len(sh_desc);
2198         init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2199
2200         if (iv_contig) {
2201                 src_dma = edesc->iv_dma;
2202                 in_options = 0;
2203         } else {
2204                 src_dma = edesc->sec4_sg_dma;
2205                 sec4_sg_index += edesc->src_nents + 1;
2206                 in_options = LDST_SGF;
2207         }
2208         append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
2209
2210         if (likely(req->src == req->dst)) {
2211                 if (!edesc->src_nents && iv_contig) {
2212                         dst_dma = sg_dma_address(req->src);
2213                 } else {
2214                         dst_dma = edesc->sec4_sg_dma +
2215                                 sizeof(struct sec4_sg_entry);
2216                         out_options = LDST_SGF;
2217                 }
2218         } else {
2219                 if (!edesc->dst_nents) {
2220                         dst_dma = sg_dma_address(req->dst);
2221                 } else {
2222                         dst_dma = edesc->sec4_sg_dma +
2223                                 sec4_sg_index * sizeof(struct sec4_sg_entry);
2224                         out_options = LDST_SGF;
2225                 }
2226         }
2227         append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
2228 }
2229
2230 /*
2231  * Fill in ablkcipher givencrypt job descriptor
2232  */
2233 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
2234                                     struct ablkcipher_edesc *edesc,
2235                                     struct ablkcipher_request *req,
2236                                     bool iv_contig)
2237 {
2238         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2239         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2240         u32 *desc = edesc->hw_desc;
2241         u32 out_options, in_options;
2242         dma_addr_t dst_dma, src_dma;
2243         int len, sec4_sg_index = 0;
2244
2245 #ifdef DEBUG
2246         print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
2247                        DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2248                        ivsize, 1);
2249         print_hex_dump(KERN_ERR, "src    @" __stringify(__LINE__) ": ",
2250                        DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2251                        edesc->src_nents ? 100 : req->nbytes, 1);
2252 #endif
2253
2254         len = desc_len(sh_desc);
2255         init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2256
2257         if (!edesc->src_nents) {
2258                 src_dma = sg_dma_address(req->src);
2259                 in_options = 0;
2260         } else {
2261                 src_dma = edesc->sec4_sg_dma;
2262                 sec4_sg_index += edesc->src_nents;
2263                 in_options = LDST_SGF;
2264         }
2265         append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
2266
2267         if (iv_contig) {
2268                 dst_dma = edesc->iv_dma;
2269                 out_options = 0;
2270         } else {
2271                 dst_dma = edesc->sec4_sg_dma +
2272                           sec4_sg_index * sizeof(struct sec4_sg_entry);
2273                 out_options = LDST_SGF;
2274         }
2275         append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
2276 }
2277
2278 /*
2279  * allocate and map the aead extended descriptor
2280  */
2281 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
2282                                            int desc_bytes, bool *all_contig_ptr,
2283                                            bool encrypt)
2284 {
2285         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2286         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2287         struct device *jrdev = ctx->jrdev;
2288         gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2289                        CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
2290         int src_nents, dst_nents = 0;
2291         struct aead_edesc *edesc;
2292         int sgc;
2293         bool all_contig = true;
2294         int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes;
2295         unsigned int authsize = ctx->authsize;
2296
2297         if (unlikely(req->dst != req->src)) {
2298                 src_nents = sg_count(req->src, req->assoclen + req->cryptlen);
2299                 dst_nents = sg_count(req->dst,
2300                                      req->assoclen + req->cryptlen +
2301                                         (encrypt ? authsize : (-authsize)));
2302         } else {
2303                 src_nents = sg_count(req->src,
2304                                      req->assoclen + req->cryptlen +
2305                                         (encrypt ? authsize : 0));
2306         }
2307
2308         /* Check if data are contiguous. */
2309         all_contig = !src_nents;
2310         if (!all_contig) {
2311                 src_nents = src_nents ? : 1;
2312                 sec4_sg_len = src_nents;
2313         }
2314
2315         sec4_sg_len += dst_nents;
2316
2317         sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
2318
2319         /* allocate space for base edesc and hw desc commands, link tables */
2320         edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2321                         GFP_DMA | flags);
2322         if (!edesc) {
2323                 dev_err(jrdev, "could not allocate extended descriptor\n");
2324                 return ERR_PTR(-ENOMEM);
2325         }
2326
2327         if (likely(req->src == req->dst)) {
2328                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2329                                  DMA_BIDIRECTIONAL);
2330                 if (unlikely(!sgc)) {
2331                         dev_err(jrdev, "unable to map source\n");
2332                         kfree(edesc);
2333                         return ERR_PTR(-ENOMEM);
2334                 }
2335         } else {
2336                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2337                                  DMA_TO_DEVICE);
2338                 if (unlikely(!sgc)) {
2339                         dev_err(jrdev, "unable to map source\n");
2340                         kfree(edesc);
2341                         return ERR_PTR(-ENOMEM);
2342                 }
2343
2344                 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2345                                  DMA_FROM_DEVICE);
2346                 if (unlikely(!sgc)) {
2347                         dev_err(jrdev, "unable to map destination\n");
2348                         dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
2349                                      DMA_TO_DEVICE);
2350                         kfree(edesc);
2351                         return ERR_PTR(-ENOMEM);
2352                 }
2353         }
2354
2355         edesc->src_nents = src_nents;
2356         edesc->dst_nents = dst_nents;
2357         edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
2358                          desc_bytes;
2359         *all_contig_ptr = all_contig;
2360
2361         sec4_sg_index = 0;
2362         if (!all_contig) {
2363                 sg_to_sec4_sg_last(req->src, src_nents,
2364                               edesc->sec4_sg + sec4_sg_index, 0);
2365                 sec4_sg_index += src_nents;
2366         }
2367         if (dst_nents) {
2368                 sg_to_sec4_sg_last(req->dst, dst_nents,
2369                                    edesc->sec4_sg + sec4_sg_index, 0);
2370         }
2371
2372         if (!sec4_sg_bytes)
2373                 return edesc;
2374
2375         edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2376                                             sec4_sg_bytes, DMA_TO_DEVICE);
2377         if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2378                 dev_err(jrdev, "unable to map S/G table\n");
2379                 aead_unmap(jrdev, edesc, req);
2380                 kfree(edesc);
2381                 return ERR_PTR(-ENOMEM);
2382         }
2383
2384         edesc->sec4_sg_bytes = sec4_sg_bytes;
2385
2386         return edesc;
2387 }
2388
2389 static int gcm_encrypt(struct aead_request *req)
2390 {
2391         struct aead_edesc *edesc;
2392         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2393         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2394         struct device *jrdev = ctx->jrdev;
2395         bool all_contig;
2396         u32 *desc;
2397         int ret = 0;
2398
2399         /* allocate extended descriptor */
2400         edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
2401         if (IS_ERR(edesc))
2402                 return PTR_ERR(edesc);
2403
2404         /* Create and submit job descriptor */
2405         init_gcm_job(req, edesc, all_contig, true);
2406 #ifdef DEBUG
2407         print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2408                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2409                        desc_bytes(edesc->hw_desc), 1);
2410 #endif
2411
2412         desc = edesc->hw_desc;
2413         ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2414         if (!ret) {
2415                 ret = -EINPROGRESS;
2416         } else {
2417                 aead_unmap(jrdev, edesc, req);
2418                 kfree(edesc);
2419         }
2420
2421         return ret;
2422 }
2423
2424 static int ipsec_gcm_encrypt(struct aead_request *req)
2425 {
2426         if (req->assoclen < 8)
2427                 return -EINVAL;
2428
2429         return gcm_encrypt(req);
2430 }
2431
2432 static int aead_encrypt(struct aead_request *req)
2433 {
2434         struct aead_edesc *edesc;
2435         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2436         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2437         struct device *jrdev = ctx->jrdev;
2438         bool all_contig;
2439         u32 *desc;
2440         int ret = 0;
2441
2442         /* allocate extended descriptor */
2443         edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2444                                  &all_contig, true);
2445         if (IS_ERR(edesc))
2446                 return PTR_ERR(edesc);
2447
2448         /* Create and submit job descriptor */
2449         init_authenc_job(req, edesc, all_contig, true);
2450 #ifdef DEBUG
2451         print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2452                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2453                        desc_bytes(edesc->hw_desc), 1);
2454 #endif
2455
2456         desc = edesc->hw_desc;
2457         ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2458         if (!ret) {
2459                 ret = -EINPROGRESS;
2460         } else {
2461                 aead_unmap(jrdev, edesc, req);
2462                 kfree(edesc);
2463         }
2464
2465         return ret;
2466 }
2467
2468 static int gcm_decrypt(struct aead_request *req)
2469 {
2470         struct aead_edesc *edesc;
2471         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2472         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2473         struct device *jrdev = ctx->jrdev;
2474         bool all_contig;
2475         u32 *desc;
2476         int ret = 0;
2477
2478         /* allocate extended descriptor */
2479         edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
2480         if (IS_ERR(edesc))
2481                 return PTR_ERR(edesc);
2482
2483         /* Create and submit job descriptor*/
2484         init_gcm_job(req, edesc, all_contig, false);
2485 #ifdef DEBUG
2486         print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2487                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2488                        desc_bytes(edesc->hw_desc), 1);
2489 #endif
2490
2491         desc = edesc->hw_desc;
2492         ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2493         if (!ret) {
2494                 ret = -EINPROGRESS;
2495         } else {
2496                 aead_unmap(jrdev, edesc, req);
2497                 kfree(edesc);
2498         }
2499
2500         return ret;
2501 }
2502
2503 static int ipsec_gcm_decrypt(struct aead_request *req)
2504 {
2505         if (req->assoclen < 8)
2506                 return -EINVAL;
2507
2508         return gcm_decrypt(req);
2509 }
2510
2511 static int aead_decrypt(struct aead_request *req)
2512 {
2513         struct aead_edesc *edesc;
2514         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2515         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2516         struct device *jrdev = ctx->jrdev;
2517         bool all_contig;
2518         u32 *desc;
2519         int ret = 0;
2520
2521         /* allocate extended descriptor */
2522         edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2523                                  &all_contig, false);
2524         if (IS_ERR(edesc))
2525                 return PTR_ERR(edesc);
2526
2527 #ifdef DEBUG
2528         print_hex_dump(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
2529                        DUMP_PREFIX_ADDRESS, 16, 4, sg_virt(req->src),
2530                        req->assoclen + req->cryptlen, 1);
2531 #endif
2532
2533         /* Create and submit job descriptor*/
2534         init_authenc_job(req, edesc, all_contig, false);
2535 #ifdef DEBUG
2536         print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2537                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2538                        desc_bytes(edesc->hw_desc), 1);
2539 #endif
2540
2541         desc = edesc->hw_desc;
2542         ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2543         if (!ret) {
2544                 ret = -EINPROGRESS;
2545         } else {
2546                 aead_unmap(jrdev, edesc, req);
2547                 kfree(edesc);
2548         }
2549
2550         return ret;
2551 }
2552
2553 /*
2554  * allocate and map the ablkcipher extended descriptor for ablkcipher
2555  */
2556 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
2557                                                        *req, int desc_bytes,
2558                                                        bool *iv_contig_out)
2559 {
2560         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2561         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2562         struct device *jrdev = ctx->jrdev;
2563         gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2564                                           CRYPTO_TFM_REQ_MAY_SLEEP)) ?
2565                        GFP_KERNEL : GFP_ATOMIC;
2566         int src_nents, dst_nents = 0, sec4_sg_bytes;
2567         struct ablkcipher_edesc *edesc;
2568         dma_addr_t iv_dma = 0;
2569         bool iv_contig = false;
2570         int sgc;
2571         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2572         int sec4_sg_index;
2573
2574         src_nents = sg_count(req->src, req->nbytes);
2575
2576         if (req->dst != req->src)
2577                 dst_nents = sg_count(req->dst, req->nbytes);
2578
2579         if (likely(req->src == req->dst)) {
2580                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2581                                  DMA_BIDIRECTIONAL);
2582         } else {
2583                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2584                                  DMA_TO_DEVICE);
2585                 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2586                                  DMA_FROM_DEVICE);
2587         }
2588
2589         iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
2590         if (dma_mapping_error(jrdev, iv_dma)) {
2591                 dev_err(jrdev, "unable to map IV\n");
2592                 return ERR_PTR(-ENOMEM);
2593         }
2594
2595         /*
2596          * Check if iv can be contiguous with source and destination.
2597          * If so, include it. If not, create scatterlist.
2598          */
2599         if (!src_nents && iv_dma + ivsize == sg_dma_address(req->src))
2600                 iv_contig = true;
2601         else
2602                 src_nents = src_nents ? : 1;
2603         sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2604                         sizeof(struct sec4_sg_entry);
2605
2606         /* allocate space for base edesc and hw desc commands, link tables */
2607         edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2608                         GFP_DMA | flags);
2609         if (!edesc) {
2610                 dev_err(jrdev, "could not allocate extended descriptor\n");
2611                 return ERR_PTR(-ENOMEM);
2612         }
2613
2614         edesc->src_nents = src_nents;
2615         edesc->dst_nents = dst_nents;
2616         edesc->sec4_sg_bytes = sec4_sg_bytes;
2617         edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2618                          desc_bytes;
2619
2620         sec4_sg_index = 0;
2621         if (!iv_contig) {
2622                 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
2623                 sg_to_sec4_sg_last(req->src, src_nents,
2624                                    edesc->sec4_sg + 1, 0);
2625                 sec4_sg_index += 1 + src_nents;
2626         }
2627
2628         if (dst_nents) {
2629                 sg_to_sec4_sg_last(req->dst, dst_nents,
2630                         edesc->sec4_sg + sec4_sg_index, 0);
2631         }
2632
2633         edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2634                                             sec4_sg_bytes, DMA_TO_DEVICE);
2635         if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2636                 dev_err(jrdev, "unable to map S/G table\n");
2637                 return ERR_PTR(-ENOMEM);
2638         }
2639
2640         edesc->iv_dma = iv_dma;
2641
2642 #ifdef DEBUG
2643         print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
2644                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2645                        sec4_sg_bytes, 1);
2646 #endif
2647
2648         *iv_contig_out = iv_contig;
2649         return edesc;
2650 }
2651
2652 static int ablkcipher_encrypt(struct ablkcipher_request *req)
2653 {
2654         struct ablkcipher_edesc *edesc;
2655         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2656         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2657         struct device *jrdev = ctx->jrdev;
2658         bool iv_contig;
2659         u32 *desc;
2660         int ret = 0;
2661
2662         /* allocate extended descriptor */
2663         edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2664                                        CAAM_CMD_SZ, &iv_contig);
2665         if (IS_ERR(edesc))
2666                 return PTR_ERR(edesc);
2667
2668         /* Create and submit job descriptor*/
2669         init_ablkcipher_job(ctx->sh_desc_enc,
2670                 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
2671 #ifdef DEBUG
2672         print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2673                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2674                        desc_bytes(edesc->hw_desc), 1);
2675 #endif
2676         desc = edesc->hw_desc;
2677         ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2678
2679         if (!ret) {
2680                 ret = -EINPROGRESS;
2681         } else {
2682                 ablkcipher_unmap(jrdev, edesc, req);
2683                 kfree(edesc);
2684         }
2685
2686         return ret;
2687 }
2688
2689 static int ablkcipher_decrypt(struct ablkcipher_request *req)
2690 {
2691         struct ablkcipher_edesc *edesc;
2692         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2693         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2694         struct device *jrdev = ctx->jrdev;
2695         bool iv_contig;
2696         u32 *desc;
2697         int ret = 0;
2698
2699         /* allocate extended descriptor */
2700         edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2701                                        CAAM_CMD_SZ, &iv_contig);
2702         if (IS_ERR(edesc))
2703                 return PTR_ERR(edesc);
2704
2705         /* Create and submit job descriptor*/
2706         init_ablkcipher_job(ctx->sh_desc_dec,
2707                 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
2708         desc = edesc->hw_desc;
2709 #ifdef DEBUG
2710         print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2711                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2712                        desc_bytes(edesc->hw_desc), 1);
2713 #endif
2714
2715         ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
2716         if (!ret) {
2717                 ret = -EINPROGRESS;
2718         } else {
2719                 ablkcipher_unmap(jrdev, edesc, req);
2720                 kfree(edesc);
2721         }
2722
2723         return ret;
2724 }
2725
2726 /*
2727  * allocate and map the ablkcipher extended descriptor
2728  * for ablkcipher givencrypt
2729  */
2730 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
2731                                 struct skcipher_givcrypt_request *greq,
2732                                 int desc_bytes,
2733                                 bool *iv_contig_out)
2734 {
2735         struct ablkcipher_request *req = &greq->creq;
2736         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2737         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2738         struct device *jrdev = ctx->jrdev;
2739         gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2740                                           CRYPTO_TFM_REQ_MAY_SLEEP)) ?
2741                        GFP_KERNEL : GFP_ATOMIC;
2742         int src_nents, dst_nents = 0, sec4_sg_bytes;
2743         struct ablkcipher_edesc *edesc;
2744         dma_addr_t iv_dma = 0;
2745         bool iv_contig = false;
2746         int sgc;
2747         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2748         int sec4_sg_index;
2749
2750         src_nents = sg_count(req->src, req->nbytes);
2751
2752         if (unlikely(req->dst != req->src))
2753                 dst_nents = sg_count(req->dst, req->nbytes);
2754
2755         if (likely(req->src == req->dst)) {
2756                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2757                                  DMA_BIDIRECTIONAL);
2758         } else {
2759                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2760                                  DMA_TO_DEVICE);
2761                 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2762                                  DMA_FROM_DEVICE);
2763         }
2764
2765         /*
2766          * Check if iv can be contiguous with source and destination.
2767          * If so, include it. If not, create scatterlist.
2768          */
2769         iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
2770         if (dma_mapping_error(jrdev, iv_dma)) {
2771                 dev_err(jrdev, "unable to map IV\n");
2772                 return ERR_PTR(-ENOMEM);
2773         }
2774
2775         if (!dst_nents && iv_dma + ivsize == sg_dma_address(req->dst))
2776                 iv_contig = true;
2777         else
2778                 dst_nents = dst_nents ? : 1;
2779         sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2780                         sizeof(struct sec4_sg_entry);
2781
2782         /* allocate space for base edesc and hw desc commands, link tables */
2783         edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2784                         GFP_DMA | flags);
2785         if (!edesc) {
2786                 dev_err(jrdev, "could not allocate extended descriptor\n");
2787                 return ERR_PTR(-ENOMEM);
2788         }
2789
2790         edesc->src_nents = src_nents;
2791         edesc->dst_nents = dst_nents;
2792         edesc->sec4_sg_bytes = sec4_sg_bytes;
2793         edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2794                          desc_bytes;
2795
2796         sec4_sg_index = 0;
2797         if (src_nents) {
2798                 sg_to_sec4_sg_last(req->src, src_nents, edesc->sec4_sg, 0);
2799                 sec4_sg_index += src_nents;
2800         }
2801
2802         if (!iv_contig) {
2803                 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
2804                                    iv_dma, ivsize, 0);
2805                 sec4_sg_index += 1;
2806                 sg_to_sec4_sg_last(req->dst, dst_nents,
2807                                    edesc->sec4_sg + sec4_sg_index, 0);
2808         }
2809
2810         edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2811                                             sec4_sg_bytes, DMA_TO_DEVICE);
2812         if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2813                 dev_err(jrdev, "unable to map S/G table\n");
2814                 return ERR_PTR(-ENOMEM);
2815         }
2816         edesc->iv_dma = iv_dma;
2817
2818 #ifdef DEBUG
2819         print_hex_dump(KERN_ERR,
2820                        "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
2821                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2822                        sec4_sg_bytes, 1);
2823 #endif
2824
2825         *iv_contig_out = iv_contig;
2826         return edesc;
2827 }
2828
2829 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
2830 {
2831         struct ablkcipher_request *req = &creq->creq;
2832         struct ablkcipher_edesc *edesc;
2833         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2834         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2835         struct device *jrdev = ctx->jrdev;
2836         bool iv_contig;
2837         u32 *desc;
2838         int ret = 0;
2839
2840         /* allocate extended descriptor */
2841         edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
2842                                        CAAM_CMD_SZ, &iv_contig);
2843         if (IS_ERR(edesc))
2844                 return PTR_ERR(edesc);
2845
2846         /* Create and submit job descriptor*/
2847         init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
2848                                 edesc, req, iv_contig);
2849 #ifdef DEBUG
2850         print_hex_dump(KERN_ERR,
2851                        "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
2852                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2853                        desc_bytes(edesc->hw_desc), 1);
2854 #endif
2855         desc = edesc->hw_desc;
2856         ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2857
2858         if (!ret) {
2859                 ret = -EINPROGRESS;
2860         } else {
2861                 ablkcipher_unmap(jrdev, edesc, req);
2862                 kfree(edesc);
2863         }
2864
2865         return ret;
2866 }
2867
2868 #define template_aead           template_u.aead
2869 #define template_ablkcipher     template_u.ablkcipher
2870 struct caam_alg_template {
2871         char name[CRYPTO_MAX_ALG_NAME];
2872         char driver_name[CRYPTO_MAX_ALG_NAME];
2873         unsigned int blocksize;
2874         u32 type;
2875         union {
2876                 struct ablkcipher_alg ablkcipher;
2877         } template_u;
2878         u32 class1_alg_type;
2879         u32 class2_alg_type;
2880         u32 alg_op;
2881 };
2882
2883 static struct caam_alg_template driver_algs[] = {
2884         /* ablkcipher descriptor */
2885         {
2886                 .name = "cbc(aes)",
2887                 .driver_name = "cbc-aes-caam",
2888                 .blocksize = AES_BLOCK_SIZE,
2889                 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2890                 .template_ablkcipher = {
2891                         .setkey = ablkcipher_setkey,
2892                         .encrypt = ablkcipher_encrypt,
2893                         .decrypt = ablkcipher_decrypt,
2894                         .givencrypt = ablkcipher_givencrypt,
2895                         .geniv = "<built-in>",
2896                         .min_keysize = AES_MIN_KEY_SIZE,
2897                         .max_keysize = AES_MAX_KEY_SIZE,
2898                         .ivsize = AES_BLOCK_SIZE,
2899                         },
2900                 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2901         },
2902         {
2903                 .name = "cbc(des3_ede)",
2904                 .driver_name = "cbc-3des-caam",
2905                 .blocksize = DES3_EDE_BLOCK_SIZE,
2906                 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2907                 .template_ablkcipher = {
2908                         .setkey = ablkcipher_setkey,
2909                         .encrypt = ablkcipher_encrypt,
2910                         .decrypt = ablkcipher_decrypt,
2911                         .givencrypt = ablkcipher_givencrypt,
2912                         .geniv = "<built-in>",
2913                         .min_keysize = DES3_EDE_KEY_SIZE,
2914                         .max_keysize = DES3_EDE_KEY_SIZE,
2915                         .ivsize = DES3_EDE_BLOCK_SIZE,
2916                         },
2917                 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2918         },
2919         {
2920                 .name = "cbc(des)",
2921                 .driver_name = "cbc-des-caam",
2922                 .blocksize = DES_BLOCK_SIZE,
2923                 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2924                 .template_ablkcipher = {
2925                         .setkey = ablkcipher_setkey,
2926                         .encrypt = ablkcipher_encrypt,
2927                         .decrypt = ablkcipher_decrypt,
2928                         .givencrypt = ablkcipher_givencrypt,
2929                         .geniv = "<built-in>",
2930                         .min_keysize = DES_KEY_SIZE,
2931                         .max_keysize = DES_KEY_SIZE,
2932                         .ivsize = DES_BLOCK_SIZE,
2933                         },
2934                 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2935         },
2936         {
2937                 .name = "ctr(aes)",
2938                 .driver_name = "ctr-aes-caam",
2939                 .blocksize = 1,
2940                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2941                 .template_ablkcipher = {
2942                         .setkey = ablkcipher_setkey,
2943                         .encrypt = ablkcipher_encrypt,
2944                         .decrypt = ablkcipher_decrypt,
2945                         .geniv = "chainiv",
2946                         .min_keysize = AES_MIN_KEY_SIZE,
2947                         .max_keysize = AES_MAX_KEY_SIZE,
2948                         .ivsize = AES_BLOCK_SIZE,
2949                         },
2950                 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
2951         },
2952         {
2953                 .name = "rfc3686(ctr(aes))",
2954                 .driver_name = "rfc3686-ctr-aes-caam",
2955                 .blocksize = 1,
2956                 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2957                 .template_ablkcipher = {
2958                         .setkey = ablkcipher_setkey,
2959                         .encrypt = ablkcipher_encrypt,
2960                         .decrypt = ablkcipher_decrypt,
2961                         .givencrypt = ablkcipher_givencrypt,
2962                         .geniv = "<built-in>",
2963                         .min_keysize = AES_MIN_KEY_SIZE +
2964                                        CTR_RFC3686_NONCE_SIZE,
2965                         .max_keysize = AES_MAX_KEY_SIZE +
2966                                        CTR_RFC3686_NONCE_SIZE,
2967                         .ivsize = CTR_RFC3686_IV_SIZE,
2968                         },
2969                 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
2970         },
2971         {
2972                 .name = "xts(aes)",
2973                 .driver_name = "xts-aes-caam",
2974                 .blocksize = AES_BLOCK_SIZE,
2975                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2976                 .template_ablkcipher = {
2977                         .setkey = xts_ablkcipher_setkey,
2978                         .encrypt = ablkcipher_encrypt,
2979                         .decrypt = ablkcipher_decrypt,
2980                         .geniv = "eseqiv",
2981                         .min_keysize = 2 * AES_MIN_KEY_SIZE,
2982                         .max_keysize = 2 * AES_MAX_KEY_SIZE,
2983                         .ivsize = AES_BLOCK_SIZE,
2984                         },
2985                 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
2986         },
2987 };
2988
2989 static struct caam_aead_alg driver_aeads[] = {
2990         {
2991                 .aead = {
2992                         .base = {
2993                                 .cra_name = "rfc4106(gcm(aes))",
2994                                 .cra_driver_name = "rfc4106-gcm-aes-caam",
2995                                 .cra_blocksize = 1,
2996                         },
2997                         .setkey = rfc4106_setkey,
2998                         .setauthsize = rfc4106_setauthsize,
2999                         .encrypt = ipsec_gcm_encrypt,
3000                         .decrypt = ipsec_gcm_decrypt,
3001                         .ivsize = 8,
3002                         .maxauthsize = AES_BLOCK_SIZE,
3003                 },
3004                 .caam = {
3005                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3006                 },
3007         },
3008         {
3009                 .aead = {
3010                         .base = {
3011                                 .cra_name = "rfc4543(gcm(aes))",
3012                                 .cra_driver_name = "rfc4543-gcm-aes-caam",
3013                                 .cra_blocksize = 1,
3014                         },
3015                         .setkey = rfc4543_setkey,
3016                         .setauthsize = rfc4543_setauthsize,
3017                         .encrypt = ipsec_gcm_encrypt,
3018                         .decrypt = ipsec_gcm_decrypt,
3019                         .ivsize = 8,
3020                         .maxauthsize = AES_BLOCK_SIZE,
3021                 },
3022                 .caam = {
3023                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3024                 },
3025         },
3026         /* Galois Counter Mode */
3027         {
3028                 .aead = {
3029                         .base = {
3030                                 .cra_name = "gcm(aes)",
3031                                 .cra_driver_name = "gcm-aes-caam",
3032                                 .cra_blocksize = 1,
3033                         },
3034                         .setkey = gcm_setkey,
3035                         .setauthsize = gcm_setauthsize,
3036                         .encrypt = gcm_encrypt,
3037                         .decrypt = gcm_decrypt,
3038                         .ivsize = 12,
3039                         .maxauthsize = AES_BLOCK_SIZE,
3040                 },
3041                 .caam = {
3042                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3043                 },
3044         },
3045         /* single-pass ipsec_esp descriptor */
3046         {
3047                 .aead = {
3048                         .base = {
3049                                 .cra_name = "authenc(hmac(md5),"
3050                                             "ecb(cipher_null))",
3051                                 .cra_driver_name = "authenc-hmac-md5-"
3052                                                    "ecb-cipher_null-caam",
3053                                 .cra_blocksize = NULL_BLOCK_SIZE,
3054                         },
3055                         .setkey = aead_setkey,
3056                         .setauthsize = aead_setauthsize,
3057                         .encrypt = aead_encrypt,
3058                         .decrypt = aead_decrypt,
3059                         .ivsize = NULL_IV_SIZE,
3060                         .maxauthsize = MD5_DIGEST_SIZE,
3061                 },
3062                 .caam = {
3063                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3064                                            OP_ALG_AAI_HMAC_PRECOMP,
3065                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3066                 },
3067         },
3068         {
3069                 .aead = {
3070                         .base = {
3071                                 .cra_name = "authenc(hmac(sha1),"
3072                                             "ecb(cipher_null))",
3073                                 .cra_driver_name = "authenc-hmac-sha1-"
3074                                                    "ecb-cipher_null-caam",
3075                                 .cra_blocksize = NULL_BLOCK_SIZE,
3076                         },
3077                         .setkey = aead_setkey,
3078                         .setauthsize = aead_setauthsize,
3079                         .encrypt = aead_encrypt,
3080                         .decrypt = aead_decrypt,
3081                         .ivsize = NULL_IV_SIZE,
3082                         .maxauthsize = SHA1_DIGEST_SIZE,
3083                 },
3084                 .caam = {
3085                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3086                                            OP_ALG_AAI_HMAC_PRECOMP,
3087                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3088                 },
3089         },
3090         {
3091                 .aead = {
3092                         .base = {
3093                                 .cra_name = "authenc(hmac(sha224),"
3094                                             "ecb(cipher_null))",
3095                                 .cra_driver_name = "authenc-hmac-sha224-"
3096                                                    "ecb-cipher_null-caam",
3097                                 .cra_blocksize = NULL_BLOCK_SIZE,
3098                         },
3099                         .setkey = aead_setkey,
3100                         .setauthsize = aead_setauthsize,
3101                         .encrypt = aead_encrypt,
3102                         .decrypt = aead_decrypt,
3103                         .ivsize = NULL_IV_SIZE,
3104                         .maxauthsize = SHA224_DIGEST_SIZE,
3105                 },
3106                 .caam = {
3107                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3108                                            OP_ALG_AAI_HMAC_PRECOMP,
3109                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3110                 },
3111         },
3112         {
3113                 .aead = {
3114                         .base = {
3115                                 .cra_name = "authenc(hmac(sha256),"
3116                                             "ecb(cipher_null))",
3117                                 .cra_driver_name = "authenc-hmac-sha256-"
3118                                                    "ecb-cipher_null-caam",
3119                                 .cra_blocksize = NULL_BLOCK_SIZE,
3120                         },
3121                         .setkey = aead_setkey,
3122                         .setauthsize = aead_setauthsize,
3123                         .encrypt = aead_encrypt,
3124                         .decrypt = aead_decrypt,
3125                         .ivsize = NULL_IV_SIZE,
3126                         .maxauthsize = SHA256_DIGEST_SIZE,
3127                 },
3128                 .caam = {
3129                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3130                                            OP_ALG_AAI_HMAC_PRECOMP,
3131                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3132                 },
3133         },
3134         {
3135                 .aead = {
3136                         .base = {
3137                                 .cra_name = "authenc(hmac(sha384),"
3138                                             "ecb(cipher_null))",
3139                                 .cra_driver_name = "authenc-hmac-sha384-"
3140                                                    "ecb-cipher_null-caam",
3141                                 .cra_blocksize = NULL_BLOCK_SIZE,
3142                         },
3143                         .setkey = aead_setkey,
3144                         .setauthsize = aead_setauthsize,
3145                         .encrypt = aead_encrypt,
3146                         .decrypt = aead_decrypt,
3147                         .ivsize = NULL_IV_SIZE,
3148                         .maxauthsize = SHA384_DIGEST_SIZE,
3149                 },
3150                 .caam = {
3151                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3152                                            OP_ALG_AAI_HMAC_PRECOMP,
3153                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3154                 },
3155         },
3156         {
3157                 .aead = {
3158                         .base = {
3159                                 .cra_name = "authenc(hmac(sha512),"
3160                                             "ecb(cipher_null))",
3161                                 .cra_driver_name = "authenc-hmac-sha512-"
3162                                                    "ecb-cipher_null-caam",
3163                                 .cra_blocksize = NULL_BLOCK_SIZE,
3164                         },
3165                         .setkey = aead_setkey,
3166                         .setauthsize = aead_setauthsize,
3167                         .encrypt = aead_encrypt,
3168                         .decrypt = aead_decrypt,
3169                         .ivsize = NULL_IV_SIZE,
3170                         .maxauthsize = SHA512_DIGEST_SIZE,
3171                 },
3172                 .caam = {
3173                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3174                                            OP_ALG_AAI_HMAC_PRECOMP,
3175                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3176                 },
3177         },
3178         {
3179                 .aead = {
3180                         .base = {
3181                                 .cra_name = "authenc(hmac(md5),cbc(aes))",
3182                                 .cra_driver_name = "authenc-hmac-md5-"
3183                                                    "cbc-aes-caam",
3184                                 .cra_blocksize = AES_BLOCK_SIZE,
3185                         },
3186                         .setkey = aead_setkey,
3187                         .setauthsize = aead_setauthsize,
3188                         .encrypt = aead_encrypt,
3189                         .decrypt = aead_decrypt,
3190                         .ivsize = AES_BLOCK_SIZE,
3191                         .maxauthsize = MD5_DIGEST_SIZE,
3192                 },
3193                 .caam = {
3194                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3195                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3196                                            OP_ALG_AAI_HMAC_PRECOMP,
3197                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3198                 },
3199         },
3200         {
3201                 .aead = {
3202                         .base = {
3203                                 .cra_name = "echainiv(authenc(hmac(md5),"
3204                                             "cbc(aes)))",
3205                                 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3206                                                    "cbc-aes-caam",
3207                                 .cra_blocksize = AES_BLOCK_SIZE,
3208                         },
3209                         .setkey = aead_setkey,
3210                         .setauthsize = aead_setauthsize,
3211                         .encrypt = aead_encrypt,
3212                         .decrypt = aead_decrypt,
3213                         .ivsize = AES_BLOCK_SIZE,
3214                         .maxauthsize = MD5_DIGEST_SIZE,
3215                 },
3216                 .caam = {
3217                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3218                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3219                                            OP_ALG_AAI_HMAC_PRECOMP,
3220                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3221                         .geniv = true,
3222                 },
3223         },
3224         {
3225                 .aead = {
3226                         .base = {
3227                                 .cra_name = "authenc(hmac(sha1),cbc(aes))",
3228                                 .cra_driver_name = "authenc-hmac-sha1-"
3229                                                    "cbc-aes-caam",
3230                                 .cra_blocksize = AES_BLOCK_SIZE,
3231                         },
3232                         .setkey = aead_setkey,
3233                         .setauthsize = aead_setauthsize,
3234                         .encrypt = aead_encrypt,
3235                         .decrypt = aead_decrypt,
3236                         .ivsize = AES_BLOCK_SIZE,
3237                         .maxauthsize = SHA1_DIGEST_SIZE,
3238                 },
3239                 .caam = {
3240                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3241                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3242                                            OP_ALG_AAI_HMAC_PRECOMP,
3243                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3244                 },
3245         },
3246         {
3247                 .aead = {
3248                         .base = {
3249                                 .cra_name = "echainiv(authenc(hmac(sha1),"
3250                                             "cbc(aes)))",
3251                                 .cra_driver_name = "echainiv-authenc-"
3252                                                    "hmac-sha1-cbc-aes-caam",
3253                                 .cra_blocksize = AES_BLOCK_SIZE,
3254                         },
3255                         .setkey = aead_setkey,
3256                         .setauthsize = aead_setauthsize,
3257                         .encrypt = aead_encrypt,
3258                         .decrypt = aead_decrypt,
3259                         .ivsize = AES_BLOCK_SIZE,
3260                         .maxauthsize = SHA1_DIGEST_SIZE,
3261                 },
3262                 .caam = {
3263                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3264                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3265                                            OP_ALG_AAI_HMAC_PRECOMP,
3266                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3267                         .geniv = true,
3268                 },
3269         },
3270         {
3271                 .aead = {
3272                         .base = {
3273                                 .cra_name = "authenc(hmac(sha224),cbc(aes))",
3274                                 .cra_driver_name = "authenc-hmac-sha224-"
3275                                                    "cbc-aes-caam",
3276                                 .cra_blocksize = AES_BLOCK_SIZE,
3277                         },
3278                         .setkey = aead_setkey,
3279                         .setauthsize = aead_setauthsize,
3280                         .encrypt = aead_encrypt,
3281                         .decrypt = aead_decrypt,
3282                         .ivsize = AES_BLOCK_SIZE,
3283                         .maxauthsize = SHA224_DIGEST_SIZE,
3284                 },
3285                 .caam = {
3286                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3287                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3288                                            OP_ALG_AAI_HMAC_PRECOMP,
3289                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3290                 },
3291         },
3292         {
3293                 .aead = {
3294                         .base = {
3295                                 .cra_name = "echainiv(authenc(hmac(sha224),"
3296                                             "cbc(aes)))",
3297                                 .cra_driver_name = "echainiv-authenc-"
3298                                                    "hmac-sha224-cbc-aes-caam",
3299                                 .cra_blocksize = AES_BLOCK_SIZE,
3300                         },
3301                         .setkey = aead_setkey,
3302                         .setauthsize = aead_setauthsize,
3303                         .encrypt = aead_encrypt,
3304                         .decrypt = aead_decrypt,
3305                         .ivsize = AES_BLOCK_SIZE,
3306                         .maxauthsize = SHA224_DIGEST_SIZE,
3307                 },
3308                 .caam = {
3309                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3310                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3311                                            OP_ALG_AAI_HMAC_PRECOMP,
3312                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3313                         .geniv = true,
3314                 },
3315         },
3316         {
3317                 .aead = {
3318                         .base = {
3319                                 .cra_name = "authenc(hmac(sha256),cbc(aes))",
3320                                 .cra_driver_name = "authenc-hmac-sha256-"
3321                                                    "cbc-aes-caam",
3322                                 .cra_blocksize = AES_BLOCK_SIZE,
3323                         },
3324                         .setkey = aead_setkey,
3325                         .setauthsize = aead_setauthsize,
3326                         .encrypt = aead_encrypt,
3327                         .decrypt = aead_decrypt,
3328                         .ivsize = AES_BLOCK_SIZE,
3329                         .maxauthsize = SHA256_DIGEST_SIZE,
3330                 },
3331                 .caam = {
3332                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3333                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3334                                            OP_ALG_AAI_HMAC_PRECOMP,
3335                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3336                 },
3337         },
3338         {
3339                 .aead = {
3340                         .base = {
3341                                 .cra_name = "echainiv(authenc(hmac(sha256),"
3342                                             "cbc(aes)))",
3343                                 .cra_driver_name = "echainiv-authenc-"
3344                                                    "hmac-sha256-cbc-aes-caam",
3345                                 .cra_blocksize = AES_BLOCK_SIZE,
3346                         },
3347                         .setkey = aead_setkey,
3348                         .setauthsize = aead_setauthsize,
3349                         .encrypt = aead_encrypt,
3350                         .decrypt = aead_decrypt,
3351                         .ivsize = AES_BLOCK_SIZE,
3352                         .maxauthsize = SHA256_DIGEST_SIZE,
3353                 },
3354                 .caam = {
3355                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3356                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3357                                            OP_ALG_AAI_HMAC_PRECOMP,
3358                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3359                         .geniv = true,
3360                 },
3361         },
3362         {
3363                 .aead = {
3364                         .base = {
3365                                 .cra_name = "authenc(hmac(sha384),cbc(aes))",
3366                                 .cra_driver_name = "authenc-hmac-sha384-"
3367                                                    "cbc-aes-caam",
3368                                 .cra_blocksize = AES_BLOCK_SIZE,
3369                         },
3370                         .setkey = aead_setkey,
3371                         .setauthsize = aead_setauthsize,
3372                         .encrypt = aead_encrypt,
3373                         .decrypt = aead_decrypt,
3374                         .ivsize = AES_BLOCK_SIZE,
3375                         .maxauthsize = SHA384_DIGEST_SIZE,
3376                 },
3377                 .caam = {
3378                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3379                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3380                                            OP_ALG_AAI_HMAC_PRECOMP,
3381                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3382                 },
3383         },
3384         {
3385                 .aead = {
3386                         .base = {
3387                                 .cra_name = "echainiv(authenc(hmac(sha384),"
3388                                             "cbc(aes)))",
3389                                 .cra_driver_name = "echainiv-authenc-"
3390                                                    "hmac-sha384-cbc-aes-caam",
3391                                 .cra_blocksize = AES_BLOCK_SIZE,
3392                         },
3393                         .setkey = aead_setkey,
3394                         .setauthsize = aead_setauthsize,
3395                         .encrypt = aead_encrypt,
3396                         .decrypt = aead_decrypt,
3397                         .ivsize = AES_BLOCK_SIZE,
3398                         .maxauthsize = SHA384_DIGEST_SIZE,
3399                 },
3400                 .caam = {
3401                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3402                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3403                                            OP_ALG_AAI_HMAC_PRECOMP,
3404                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3405                         .geniv = true,
3406                 },
3407         },
3408         {
3409                 .aead = {
3410                         .base = {
3411                                 .cra_name = "authenc(hmac(sha512),cbc(aes))",
3412                                 .cra_driver_name = "authenc-hmac-sha512-"
3413                                                    "cbc-aes-caam",
3414                                 .cra_blocksize = AES_BLOCK_SIZE,
3415                         },
3416                         .setkey = aead_setkey,
3417                         .setauthsize = aead_setauthsize,
3418                         .encrypt = aead_encrypt,
3419                         .decrypt = aead_decrypt,
3420                         .ivsize = AES_BLOCK_SIZE,
3421                         .maxauthsize = SHA512_DIGEST_SIZE,
3422                 },
3423                 .caam = {
3424                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3425                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3426                                            OP_ALG_AAI_HMAC_PRECOMP,
3427                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3428                 },
3429         },
3430         {
3431                 .aead = {
3432                         .base = {
3433                                 .cra_name = "echainiv(authenc(hmac(sha512),"
3434                                             "cbc(aes)))",
3435                                 .cra_driver_name = "echainiv-authenc-"
3436                                                    "hmac-sha512-cbc-aes-caam",
3437                                 .cra_blocksize = AES_BLOCK_SIZE,
3438                         },
3439                         .setkey = aead_setkey,
3440                         .setauthsize = aead_setauthsize,
3441                         .encrypt = aead_encrypt,
3442                         .decrypt = aead_decrypt,
3443                         .ivsize = AES_BLOCK_SIZE,
3444                         .maxauthsize = SHA512_DIGEST_SIZE,
3445                 },
3446                 .caam = {
3447                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3448                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3449                                            OP_ALG_AAI_HMAC_PRECOMP,
3450                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3451                         .geniv = true,
3452                 },
3453         },
3454         {
3455                 .aead = {
3456                         .base = {
3457                                 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
3458                                 .cra_driver_name = "authenc-hmac-md5-"
3459                                                    "cbc-des3_ede-caam",
3460                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3461                         },
3462                         .setkey = aead_setkey,
3463                         .setauthsize = aead_setauthsize,
3464                         .encrypt = aead_encrypt,
3465                         .decrypt = aead_decrypt,
3466                         .ivsize = DES3_EDE_BLOCK_SIZE,
3467                         .maxauthsize = MD5_DIGEST_SIZE,
3468                 },
3469                 .caam = {
3470                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3471                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3472                                            OP_ALG_AAI_HMAC_PRECOMP,
3473                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3474                 }
3475         },
3476         {
3477                 .aead = {
3478                         .base = {
3479                                 .cra_name = "echainiv(authenc(hmac(md5),"
3480                                             "cbc(des3_ede)))",
3481                                 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3482                                                    "cbc-des3_ede-caam",
3483                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3484                         },
3485                         .setkey = aead_setkey,
3486                         .setauthsize = aead_setauthsize,
3487                         .encrypt = aead_encrypt,
3488                         .decrypt = aead_decrypt,
3489                         .ivsize = DES3_EDE_BLOCK_SIZE,
3490                         .maxauthsize = MD5_DIGEST_SIZE,
3491                 },
3492                 .caam = {
3493                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3494                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3495                                            OP_ALG_AAI_HMAC_PRECOMP,
3496                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3497                         .geniv = true,
3498                 }
3499         },
3500         {
3501                 .aead = {
3502                         .base = {
3503                                 .cra_name = "authenc(hmac(sha1),"
3504                                             "cbc(des3_ede))",
3505                                 .cra_driver_name = "authenc-hmac-sha1-"
3506                                                    "cbc-des3_ede-caam",
3507                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3508                         },
3509                         .setkey = aead_setkey,
3510                         .setauthsize = aead_setauthsize,
3511                         .encrypt = aead_encrypt,
3512                         .decrypt = aead_decrypt,
3513                         .ivsize = DES3_EDE_BLOCK_SIZE,
3514                         .maxauthsize = SHA1_DIGEST_SIZE,
3515                 },
3516                 .caam = {
3517                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3518                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3519                                            OP_ALG_AAI_HMAC_PRECOMP,
3520                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3521                 },
3522         },
3523         {
3524                 .aead = {
3525                         .base = {
3526                                 .cra_name = "echainiv(authenc(hmac(sha1),"
3527                                             "cbc(des3_ede)))",
3528                                 .cra_driver_name = "echainiv-authenc-"
3529                                                    "hmac-sha1-"
3530                                                    "cbc-des3_ede-caam",
3531                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3532                         },
3533                         .setkey = aead_setkey,
3534                         .setauthsize = aead_setauthsize,
3535                         .encrypt = aead_encrypt,
3536                         .decrypt = aead_decrypt,
3537                         .ivsize = DES3_EDE_BLOCK_SIZE,
3538                         .maxauthsize = SHA1_DIGEST_SIZE,
3539                 },
3540                 .caam = {
3541                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3542                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3543                                            OP_ALG_AAI_HMAC_PRECOMP,
3544                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3545                         .geniv = true,
3546                 },
3547         },
3548         {
3549                 .aead = {
3550                         .base = {
3551                                 .cra_name = "authenc(hmac(sha224),"
3552                                             "cbc(des3_ede))",
3553                                 .cra_driver_name = "authenc-hmac-sha224-"
3554                                                    "cbc-des3_ede-caam",
3555                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3556                         },
3557                         .setkey = aead_setkey,
3558                         .setauthsize = aead_setauthsize,
3559                         .encrypt = aead_encrypt,
3560                         .decrypt = aead_decrypt,
3561                         .ivsize = DES3_EDE_BLOCK_SIZE,
3562                         .maxauthsize = SHA224_DIGEST_SIZE,
3563                 },
3564                 .caam = {
3565                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3566                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3567                                            OP_ALG_AAI_HMAC_PRECOMP,
3568                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3569                 },
3570         },
3571         {
3572                 .aead = {
3573                         .base = {
3574                                 .cra_name = "echainiv(authenc(hmac(sha224),"
3575                                             "cbc(des3_ede)))",
3576                                 .cra_driver_name = "echainiv-authenc-"
3577                                                    "hmac-sha224-"
3578                                                    "cbc-des3_ede-caam",
3579                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3580                         },
3581                         .setkey = aead_setkey,
3582                         .setauthsize = aead_setauthsize,
3583                         .encrypt = aead_encrypt,
3584                         .decrypt = aead_decrypt,
3585                         .ivsize = DES3_EDE_BLOCK_SIZE,
3586                         .maxauthsize = SHA224_DIGEST_SIZE,
3587                 },
3588                 .caam = {
3589                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3590                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3591                                            OP_ALG_AAI_HMAC_PRECOMP,
3592                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3593                         .geniv = true,
3594                 },
3595         },
3596         {
3597                 .aead = {
3598                         .base = {
3599                                 .cra_name = "authenc(hmac(sha256),"
3600                                             "cbc(des3_ede))",
3601                                 .cra_driver_name = "authenc-hmac-sha256-"
3602                                                    "cbc-des3_ede-caam",
3603                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3604                         },
3605                         .setkey = aead_setkey,
3606                         .setauthsize = aead_setauthsize,
3607                         .encrypt = aead_encrypt,
3608                         .decrypt = aead_decrypt,
3609                         .ivsize = DES3_EDE_BLOCK_SIZE,
3610                         .maxauthsize = SHA256_DIGEST_SIZE,
3611                 },
3612                 .caam = {
3613                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3614                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3615                                            OP_ALG_AAI_HMAC_PRECOMP,
3616                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3617                 },
3618         },
3619         {
3620                 .aead = {
3621                         .base = {
3622                                 .cra_name = "echainiv(authenc(hmac(sha256),"
3623                                             "cbc(des3_ede)))",
3624                                 .cra_driver_name = "echainiv-authenc-"
3625                                                    "hmac-sha256-"
3626                                                    "cbc-des3_ede-caam",
3627                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3628                         },
3629                         .setkey = aead_setkey,
3630                         .setauthsize = aead_setauthsize,
3631                         .encrypt = aead_encrypt,
3632                         .decrypt = aead_decrypt,
3633                         .ivsize = DES3_EDE_BLOCK_SIZE,
3634                         .maxauthsize = SHA256_DIGEST_SIZE,
3635                 },
3636                 .caam = {
3637                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3638                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3639                                            OP_ALG_AAI_HMAC_PRECOMP,
3640                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3641                         .geniv = true,
3642                 },
3643         },
3644         {
3645                 .aead = {
3646                         .base = {
3647                                 .cra_name = "authenc(hmac(sha384),"
3648                                             "cbc(des3_ede))",
3649                                 .cra_driver_name = "authenc-hmac-sha384-"
3650                                                    "cbc-des3_ede-caam",
3651                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3652                         },
3653                         .setkey = aead_setkey,
3654                         .setauthsize = aead_setauthsize,
3655                         .encrypt = aead_encrypt,
3656                         .decrypt = aead_decrypt,
3657                         .ivsize = DES3_EDE_BLOCK_SIZE,
3658                         .maxauthsize = SHA384_DIGEST_SIZE,
3659                 },
3660                 .caam = {
3661                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3662                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3663                                            OP_ALG_AAI_HMAC_PRECOMP,
3664                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3665                 },
3666         },
3667         {
3668                 .aead = {
3669                         .base = {
3670                                 .cra_name = "echainiv(authenc(hmac(sha384),"
3671                                             "cbc(des3_ede)))",
3672                                 .cra_driver_name = "echainiv-authenc-"
3673                                                    "hmac-sha384-"
3674                                                    "cbc-des3_ede-caam",
3675                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3676                         },
3677                         .setkey = aead_setkey,
3678                         .setauthsize = aead_setauthsize,
3679                         .encrypt = aead_encrypt,
3680                         .decrypt = aead_decrypt,
3681                         .ivsize = DES3_EDE_BLOCK_SIZE,
3682                         .maxauthsize = SHA384_DIGEST_SIZE,
3683                 },
3684                 .caam = {
3685                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3686                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3687                                            OP_ALG_AAI_HMAC_PRECOMP,
3688                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3689                         .geniv = true,
3690                 },
3691         },
3692         {
3693                 .aead = {
3694                         .base = {
3695                                 .cra_name = "authenc(hmac(sha512),"
3696                                             "cbc(des3_ede))",
3697                                 .cra_driver_name = "authenc-hmac-sha512-"
3698                                                    "cbc-des3_ede-caam",
3699                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3700                         },
3701                         .setkey = aead_setkey,
3702                         .setauthsize = aead_setauthsize,
3703                         .encrypt = aead_encrypt,
3704                         .decrypt = aead_decrypt,
3705                         .ivsize = DES3_EDE_BLOCK_SIZE,
3706                         .maxauthsize = SHA512_DIGEST_SIZE,
3707                 },
3708                 .caam = {
3709                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3710                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3711                                            OP_ALG_AAI_HMAC_PRECOMP,
3712                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3713                 },
3714         },
3715         {
3716                 .aead = {
3717                         .base = {
3718                                 .cra_name = "echainiv(authenc(hmac(sha512),"
3719                                             "cbc(des3_ede)))",
3720                                 .cra_driver_name = "echainiv-authenc-"
3721                                                    "hmac-sha512-"
3722                                                    "cbc-des3_ede-caam",
3723                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3724                         },
3725                         .setkey = aead_setkey,
3726                         .setauthsize = aead_setauthsize,
3727                         .encrypt = aead_encrypt,
3728                         .decrypt = aead_decrypt,
3729                         .ivsize = DES3_EDE_BLOCK_SIZE,
3730                         .maxauthsize = SHA512_DIGEST_SIZE,
3731                 },
3732                 .caam = {
3733                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3734                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3735                                            OP_ALG_AAI_HMAC_PRECOMP,
3736                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3737                         .geniv = true,
3738                 },
3739         },
3740         {
3741                 .aead = {
3742                         .base = {
3743                                 .cra_name = "authenc(hmac(md5),cbc(des))",
3744                                 .cra_driver_name = "authenc-hmac-md5-"
3745                                                    "cbc-des-caam",
3746                                 .cra_blocksize = DES_BLOCK_SIZE,
3747                         },
3748                         .setkey = aead_setkey,
3749                         .setauthsize = aead_setauthsize,
3750                         .encrypt = aead_encrypt,
3751                         .decrypt = aead_decrypt,
3752                         .ivsize = DES_BLOCK_SIZE,
3753                         .maxauthsize = MD5_DIGEST_SIZE,
3754                 },
3755                 .caam = {
3756                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3757                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3758                                            OP_ALG_AAI_HMAC_PRECOMP,
3759                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3760                 },
3761         },
3762         {
3763                 .aead = {
3764                         .base = {
3765                                 .cra_name = "echainiv(authenc(hmac(md5),"
3766                                             "cbc(des)))",
3767                                 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3768                                                    "cbc-des-caam",
3769                                 .cra_blocksize = DES_BLOCK_SIZE,
3770                         },
3771                         .setkey = aead_setkey,
3772                         .setauthsize = aead_setauthsize,
3773                         .encrypt = aead_encrypt,
3774                         .decrypt = aead_decrypt,
3775                         .ivsize = DES_BLOCK_SIZE,
3776                         .maxauthsize = MD5_DIGEST_SIZE,
3777                 },
3778                 .caam = {
3779                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3780                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3781                                            OP_ALG_AAI_HMAC_PRECOMP,
3782                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3783                         .geniv = true,
3784                 },
3785         },
3786         {
3787                 .aead = {
3788                         .base = {
3789                                 .cra_name = "authenc(hmac(sha1),cbc(des))",
3790                                 .cra_driver_name = "authenc-hmac-sha1-"
3791                                                    "cbc-des-caam",
3792                                 .cra_blocksize = DES_BLOCK_SIZE,
3793                         },
3794                         .setkey = aead_setkey,
3795                         .setauthsize = aead_setauthsize,
3796                         .encrypt = aead_encrypt,
3797                         .decrypt = aead_decrypt,
3798                         .ivsize = DES_BLOCK_SIZE,
3799                         .maxauthsize = SHA1_DIGEST_SIZE,
3800                 },
3801                 .caam = {
3802                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3803                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3804                                            OP_ALG_AAI_HMAC_PRECOMP,
3805                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3806                 },
3807         },
3808         {
3809                 .aead = {
3810                         .base = {
3811                                 .cra_name = "echainiv(authenc(hmac(sha1),"
3812                                             "cbc(des)))",
3813                                 .cra_driver_name = "echainiv-authenc-"
3814                                                    "hmac-sha1-cbc-des-caam",
3815                                 .cra_blocksize = DES_BLOCK_SIZE,
3816                         },
3817                         .setkey = aead_setkey,
3818                         .setauthsize = aead_setauthsize,
3819                         .encrypt = aead_encrypt,
3820                         .decrypt = aead_decrypt,
3821                         .ivsize = DES_BLOCK_SIZE,
3822                         .maxauthsize = SHA1_DIGEST_SIZE,
3823                 },
3824                 .caam = {
3825                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3826                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3827                                            OP_ALG_AAI_HMAC_PRECOMP,
3828                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3829                         .geniv = true,
3830                 },
3831         },
3832         {
3833                 .aead = {
3834                         .base = {
3835                                 .cra_name = "authenc(hmac(sha224),cbc(des))",
3836                                 .cra_driver_name = "authenc-hmac-sha224-"
3837                                                    "cbc-des-caam",
3838                                 .cra_blocksize = DES_BLOCK_SIZE,
3839                         },
3840                         .setkey = aead_setkey,
3841                         .setauthsize = aead_setauthsize,
3842                         .encrypt = aead_encrypt,
3843                         .decrypt = aead_decrypt,
3844                         .ivsize = DES_BLOCK_SIZE,
3845                         .maxauthsize = SHA224_DIGEST_SIZE,
3846                 },
3847                 .caam = {
3848                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3849                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3850                                            OP_ALG_AAI_HMAC_PRECOMP,
3851                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3852                 },
3853         },
3854         {
3855                 .aead = {
3856                         .base = {
3857                                 .cra_name = "echainiv(authenc(hmac(sha224),"
3858                                             "cbc(des)))",
3859                                 .cra_driver_name = "echainiv-authenc-"
3860                                                    "hmac-sha224-cbc-des-caam",
3861                                 .cra_blocksize = DES_BLOCK_SIZE,
3862                         },
3863                         .setkey = aead_setkey,
3864                         .setauthsize = aead_setauthsize,
3865                         .encrypt = aead_encrypt,
3866                         .decrypt = aead_decrypt,
3867                         .ivsize = DES_BLOCK_SIZE,
3868                         .maxauthsize = SHA224_DIGEST_SIZE,
3869                 },
3870                 .caam = {
3871                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3872                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3873                                            OP_ALG_AAI_HMAC_PRECOMP,
3874                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3875                         .geniv = true,
3876                 },
3877         },
3878         {
3879                 .aead = {
3880                         .base = {
3881                                 .cra_name = "authenc(hmac(sha256),cbc(des))",
3882                                 .cra_driver_name = "authenc-hmac-sha256-"
3883                                                    "cbc-des-caam",
3884                                 .cra_blocksize = DES_BLOCK_SIZE,
3885                         },
3886                         .setkey = aead_setkey,
3887                         .setauthsize = aead_setauthsize,
3888                         .encrypt = aead_encrypt,
3889                         .decrypt = aead_decrypt,
3890                         .ivsize = DES_BLOCK_SIZE,
3891                         .maxauthsize = SHA256_DIGEST_SIZE,
3892                 },
3893                 .caam = {
3894                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3895                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3896                                            OP_ALG_AAI_HMAC_PRECOMP,
3897                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3898                 },
3899         },
3900         {
3901                 .aead = {
3902                         .base = {
3903                                 .cra_name = "echainiv(authenc(hmac(sha256),"
3904                                             "cbc(des)))",
3905                                 .cra_driver_name = "echainiv-authenc-"
3906                                                    "hmac-sha256-cbc-des-caam",
3907                                 .cra_blocksize = DES_BLOCK_SIZE,
3908                         },
3909                         .setkey = aead_setkey,
3910                         .setauthsize = aead_setauthsize,
3911                         .encrypt = aead_encrypt,
3912                         .decrypt = aead_decrypt,
3913                         .ivsize = DES_BLOCK_SIZE,
3914                         .maxauthsize = SHA256_DIGEST_SIZE,
3915                 },
3916                 .caam = {
3917                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3918                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3919                                            OP_ALG_AAI_HMAC_PRECOMP,
3920                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3921                         .geniv = true,
3922                 },
3923         },
3924         {
3925                 .aead = {
3926                         .base = {
3927                                 .cra_name = "authenc(hmac(sha384),cbc(des))",
3928                                 .cra_driver_name = "authenc-hmac-sha384-"
3929                                                    "cbc-des-caam",
3930                                 .cra_blocksize = DES_BLOCK_SIZE,
3931                         },
3932                         .setkey = aead_setkey,
3933                         .setauthsize = aead_setauthsize,
3934                         .encrypt = aead_encrypt,
3935                         .decrypt = aead_decrypt,
3936                         .ivsize = DES_BLOCK_SIZE,
3937                         .maxauthsize = SHA384_DIGEST_SIZE,
3938                 },
3939                 .caam = {
3940                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3941                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3942                                            OP_ALG_AAI_HMAC_PRECOMP,
3943                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3944                 },
3945         },
3946         {
3947                 .aead = {
3948                         .base = {
3949                                 .cra_name = "echainiv(authenc(hmac(sha384),"
3950                                             "cbc(des)))",
3951                                 .cra_driver_name = "echainiv-authenc-"
3952                                                    "hmac-sha384-cbc-des-caam",
3953                                 .cra_blocksize = DES_BLOCK_SIZE,
3954                         },
3955                         .setkey = aead_setkey,
3956                         .setauthsize = aead_setauthsize,
3957                         .encrypt = aead_encrypt,
3958                         .decrypt = aead_decrypt,
3959                         .ivsize = DES_BLOCK_SIZE,
3960                         .maxauthsize = SHA384_DIGEST_SIZE,
3961                 },
3962                 .caam = {
3963                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3964                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3965                                            OP_ALG_AAI_HMAC_PRECOMP,
3966                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3967                         .geniv = true,
3968                 },
3969         },
3970         {
3971                 .aead = {
3972                         .base = {
3973                                 .cra_name = "authenc(hmac(sha512),cbc(des))",
3974                                 .cra_driver_name = "authenc-hmac-sha512-"
3975                                                    "cbc-des-caam",
3976                                 .cra_blocksize = DES_BLOCK_SIZE,
3977                         },
3978                         .setkey = aead_setkey,
3979                         .setauthsize = aead_setauthsize,
3980                         .encrypt = aead_encrypt,
3981                         .decrypt = aead_decrypt,
3982                         .ivsize = DES_BLOCK_SIZE,
3983                         .maxauthsize = SHA512_DIGEST_SIZE,
3984                 },
3985                 .caam = {
3986                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3987                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3988                                            OP_ALG_AAI_HMAC_PRECOMP,
3989                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3990                 },
3991         },
3992         {
3993                 .aead = {
3994                         .base = {
3995                                 .cra_name = "echainiv(authenc(hmac(sha512),"
3996                                             "cbc(des)))",
3997                                 .cra_driver_name = "echainiv-authenc-"
3998                                                    "hmac-sha512-cbc-des-caam",
3999                                 .cra_blocksize = DES_BLOCK_SIZE,
4000                         },
4001                         .setkey = aead_setkey,
4002                         .setauthsize = aead_setauthsize,
4003                         .encrypt = aead_encrypt,
4004                         .decrypt = aead_decrypt,
4005                         .ivsize = DES_BLOCK_SIZE,
4006                         .maxauthsize = SHA512_DIGEST_SIZE,
4007                 },
4008                 .caam = {
4009                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4010                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4011                                            OP_ALG_AAI_HMAC_PRECOMP,
4012                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4013                         .geniv = true,
4014                 },
4015         },
4016         {
4017                 .aead = {
4018                         .base = {
4019                                 .cra_name = "authenc(hmac(md5),"
4020                                             "rfc3686(ctr(aes)))",
4021                                 .cra_driver_name = "authenc-hmac-md5-"
4022                                                    "rfc3686-ctr-aes-caam",
4023                                 .cra_blocksize = 1,
4024                         },
4025                         .setkey = aead_setkey,
4026                         .setauthsize = aead_setauthsize,
4027                         .encrypt = aead_encrypt,
4028                         .decrypt = aead_decrypt,
4029                         .ivsize = CTR_RFC3686_IV_SIZE,
4030                         .maxauthsize = MD5_DIGEST_SIZE,
4031                 },
4032                 .caam = {
4033                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4034                                            OP_ALG_AAI_CTR_MOD128,
4035                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4036                                            OP_ALG_AAI_HMAC_PRECOMP,
4037                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4038                         .rfc3686 = true,
4039                 },
4040         },
4041         {
4042                 .aead = {
4043                         .base = {
4044                                 .cra_name = "seqiv(authenc("
4045                                             "hmac(md5),rfc3686(ctr(aes))))",
4046                                 .cra_driver_name = "seqiv-authenc-hmac-md5-"
4047                                                    "rfc3686-ctr-aes-caam",
4048                                 .cra_blocksize = 1,
4049                         },
4050                         .setkey = aead_setkey,
4051                         .setauthsize = aead_setauthsize,
4052                         .encrypt = aead_encrypt,
4053                         .decrypt = aead_decrypt,
4054                         .ivsize = CTR_RFC3686_IV_SIZE,
4055                         .maxauthsize = MD5_DIGEST_SIZE,
4056                 },
4057                 .caam = {
4058                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4059                                            OP_ALG_AAI_CTR_MOD128,
4060                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4061                                            OP_ALG_AAI_HMAC_PRECOMP,
4062                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4063                         .rfc3686 = true,
4064                         .geniv = true,
4065                 },
4066         },
4067         {
4068                 .aead = {
4069                         .base = {
4070                                 .cra_name = "authenc(hmac(sha1),"
4071                                             "rfc3686(ctr(aes)))",
4072                                 .cra_driver_name = "authenc-hmac-sha1-"
4073                                                    "rfc3686-ctr-aes-caam",
4074                                 .cra_blocksize = 1,
4075                         },
4076                         .setkey = aead_setkey,
4077                         .setauthsize = aead_setauthsize,
4078                         .encrypt = aead_encrypt,
4079                         .decrypt = aead_decrypt,
4080                         .ivsize = CTR_RFC3686_IV_SIZE,
4081                         .maxauthsize = SHA1_DIGEST_SIZE,
4082                 },
4083                 .caam = {
4084                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4085                                            OP_ALG_AAI_CTR_MOD128,
4086                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4087                                            OP_ALG_AAI_HMAC_PRECOMP,
4088                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4089                         .rfc3686 = true,
4090                 },
4091         },
4092         {
4093                 .aead = {
4094                         .base = {
4095                                 .cra_name = "seqiv(authenc("
4096                                             "hmac(sha1),rfc3686(ctr(aes))))",
4097                                 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
4098                                                    "rfc3686-ctr-aes-caam",
4099                                 .cra_blocksize = 1,
4100                         },
4101                         .setkey = aead_setkey,
4102                         .setauthsize = aead_setauthsize,
4103                         .encrypt = aead_encrypt,
4104                         .decrypt = aead_decrypt,
4105                         .ivsize = CTR_RFC3686_IV_SIZE,
4106                         .maxauthsize = SHA1_DIGEST_SIZE,
4107                 },
4108                 .caam = {
4109                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4110                                            OP_ALG_AAI_CTR_MOD128,
4111                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4112                                            OP_ALG_AAI_HMAC_PRECOMP,
4113                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4114                         .rfc3686 = true,
4115                         .geniv = true,
4116                 },
4117         },
4118         {
4119                 .aead = {
4120                         .base = {
4121                                 .cra_name = "authenc(hmac(sha224),"
4122                                             "rfc3686(ctr(aes)))",
4123                                 .cra_driver_name = "authenc-hmac-sha224-"
4124                                                    "rfc3686-ctr-aes-caam",
4125                                 .cra_blocksize = 1,
4126                         },
4127                         .setkey = aead_setkey,
4128                         .setauthsize = aead_setauthsize,
4129                         .encrypt = aead_encrypt,
4130                         .decrypt = aead_decrypt,
4131                         .ivsize = CTR_RFC3686_IV_SIZE,
4132                         .maxauthsize = SHA224_DIGEST_SIZE,
4133                 },
4134                 .caam = {
4135                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4136                                            OP_ALG_AAI_CTR_MOD128,
4137                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4138                                            OP_ALG_AAI_HMAC_PRECOMP,
4139                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4140                         .rfc3686 = true,
4141                 },
4142         },
4143         {
4144                 .aead = {
4145                         .base = {
4146                                 .cra_name = "seqiv(authenc("
4147                                             "hmac(sha224),rfc3686(ctr(aes))))",
4148                                 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
4149                                                    "rfc3686-ctr-aes-caam",
4150                                 .cra_blocksize = 1,
4151                         },
4152                         .setkey = aead_setkey,
4153                         .setauthsize = aead_setauthsize,
4154                         .encrypt = aead_encrypt,
4155                         .decrypt = aead_decrypt,
4156                         .ivsize = CTR_RFC3686_IV_SIZE,
4157                         .maxauthsize = SHA224_DIGEST_SIZE,
4158                 },
4159                 .caam = {
4160                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4161                                            OP_ALG_AAI_CTR_MOD128,
4162                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4163                                            OP_ALG_AAI_HMAC_PRECOMP,
4164                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4165                         .rfc3686 = true,
4166                         .geniv = true,
4167                 },
4168         },
4169         {
4170                 .aead = {
4171                         .base = {
4172                                 .cra_name = "authenc(hmac(sha256),"
4173                                             "rfc3686(ctr(aes)))",
4174                                 .cra_driver_name = "authenc-hmac-sha256-"
4175                                                    "rfc3686-ctr-aes-caam",
4176                                 .cra_blocksize = 1,
4177                         },
4178                         .setkey = aead_setkey,
4179                         .setauthsize = aead_setauthsize,
4180                         .encrypt = aead_encrypt,
4181                         .decrypt = aead_decrypt,
4182                         .ivsize = CTR_RFC3686_IV_SIZE,
4183                         .maxauthsize = SHA256_DIGEST_SIZE,
4184                 },
4185                 .caam = {
4186                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4187                                            OP_ALG_AAI_CTR_MOD128,
4188                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4189                                            OP_ALG_AAI_HMAC_PRECOMP,
4190                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4191                         .rfc3686 = true,
4192                 },
4193         },
4194         {
4195                 .aead = {
4196                         .base = {
4197                                 .cra_name = "seqiv(authenc(hmac(sha256),"
4198                                             "rfc3686(ctr(aes))))",
4199                                 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
4200                                                    "rfc3686-ctr-aes-caam",
4201                                 .cra_blocksize = 1,
4202                         },
4203                         .setkey = aead_setkey,
4204                         .setauthsize = aead_setauthsize,
4205                         .encrypt = aead_encrypt,
4206                         .decrypt = aead_decrypt,
4207                         .ivsize = CTR_RFC3686_IV_SIZE,
4208                         .maxauthsize = SHA256_DIGEST_SIZE,
4209                 },
4210                 .caam = {
4211                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4212                                            OP_ALG_AAI_CTR_MOD128,
4213                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4214                                            OP_ALG_AAI_HMAC_PRECOMP,
4215                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4216                         .rfc3686 = true,
4217                         .geniv = true,
4218                 },
4219         },
4220         {
4221                 .aead = {
4222                         .base = {
4223                                 .cra_name = "authenc(hmac(sha384),"
4224                                             "rfc3686(ctr(aes)))",
4225                                 .cra_driver_name = "authenc-hmac-sha384-"
4226                                                    "rfc3686-ctr-aes-caam",
4227                                 .cra_blocksize = 1,
4228                         },
4229                         .setkey = aead_setkey,
4230                         .setauthsize = aead_setauthsize,
4231                         .encrypt = aead_encrypt,
4232                         .decrypt = aead_decrypt,
4233                         .ivsize = CTR_RFC3686_IV_SIZE,
4234                         .maxauthsize = SHA384_DIGEST_SIZE,
4235                 },
4236                 .caam = {
4237                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4238                                            OP_ALG_AAI_CTR_MOD128,
4239                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4240                                            OP_ALG_AAI_HMAC_PRECOMP,
4241                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4242                         .rfc3686 = true,
4243                 },
4244         },
4245         {
4246                 .aead = {
4247                         .base = {
4248                                 .cra_name = "seqiv(authenc(hmac(sha384),"
4249                                             "rfc3686(ctr(aes))))",
4250                                 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
4251                                                    "rfc3686-ctr-aes-caam",
4252                                 .cra_blocksize = 1,
4253                         },
4254                         .setkey = aead_setkey,
4255                         .setauthsize = aead_setauthsize,
4256                         .encrypt = aead_encrypt,
4257                         .decrypt = aead_decrypt,
4258                         .ivsize = CTR_RFC3686_IV_SIZE,
4259                         .maxauthsize = SHA384_DIGEST_SIZE,
4260                 },
4261                 .caam = {
4262                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4263                                            OP_ALG_AAI_CTR_MOD128,
4264                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4265                                            OP_ALG_AAI_HMAC_PRECOMP,
4266                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4267                         .rfc3686 = true,
4268                         .geniv = true,
4269                 },
4270         },
4271         {
4272                 .aead = {
4273                         .base = {
4274                                 .cra_name = "authenc(hmac(sha512),"
4275                                             "rfc3686(ctr(aes)))",
4276                                 .cra_driver_name = "authenc-hmac-sha512-"
4277                                                    "rfc3686-ctr-aes-caam",
4278                                 .cra_blocksize = 1,
4279                         },
4280                         .setkey = aead_setkey,
4281                         .setauthsize = aead_setauthsize,
4282                         .encrypt = aead_encrypt,
4283                         .decrypt = aead_decrypt,
4284                         .ivsize = CTR_RFC3686_IV_SIZE,
4285                         .maxauthsize = SHA512_DIGEST_SIZE,
4286                 },
4287                 .caam = {
4288                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4289                                            OP_ALG_AAI_CTR_MOD128,
4290                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4291                                            OP_ALG_AAI_HMAC_PRECOMP,
4292                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4293                         .rfc3686 = true,
4294                 },
4295         },
4296         {
4297                 .aead = {
4298                         .base = {
4299                                 .cra_name = "seqiv(authenc(hmac(sha512),"
4300                                             "rfc3686(ctr(aes))))",
4301                                 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
4302                                                    "rfc3686-ctr-aes-caam",
4303                                 .cra_blocksize = 1,
4304                         },
4305                         .setkey = aead_setkey,
4306                         .setauthsize = aead_setauthsize,
4307                         .encrypt = aead_encrypt,
4308                         .decrypt = aead_decrypt,
4309                         .ivsize = CTR_RFC3686_IV_SIZE,
4310                         .maxauthsize = SHA512_DIGEST_SIZE,
4311                 },
4312                 .caam = {
4313                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4314                                            OP_ALG_AAI_CTR_MOD128,
4315                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4316                                            OP_ALG_AAI_HMAC_PRECOMP,
4317                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4318                         .rfc3686 = true,
4319                         .geniv = true,
4320                 },
4321         },
4322 };
4323
4324 struct caam_crypto_alg {
4325         struct crypto_alg crypto_alg;
4326         struct list_head entry;
4327         struct caam_alg_entry caam;
4328 };
4329
4330 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
4331 {
4332         ctx->jrdev = caam_jr_alloc();
4333         if (IS_ERR(ctx->jrdev)) {
4334                 pr_err("Job Ring Device allocation for transform failed\n");
4335                 return PTR_ERR(ctx->jrdev);
4336         }
4337
4338         /* copy descriptor header template value */
4339         ctx->class1_alg_type = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
4340         ctx->class2_alg_type = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
4341         ctx->alg_op = OP_TYPE_CLASS2_ALG | caam->alg_op;
4342
4343         return 0;
4344 }
4345
4346 static int caam_cra_init(struct crypto_tfm *tfm)
4347 {
4348         struct crypto_alg *alg = tfm->__crt_alg;
4349         struct caam_crypto_alg *caam_alg =
4350                  container_of(alg, struct caam_crypto_alg, crypto_alg);
4351         struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
4352
4353         return caam_init_common(ctx, &caam_alg->caam);
4354 }
4355
4356 static int caam_aead_init(struct crypto_aead *tfm)
4357 {
4358         struct aead_alg *alg = crypto_aead_alg(tfm);
4359         struct caam_aead_alg *caam_alg =
4360                  container_of(alg, struct caam_aead_alg, aead);
4361         struct caam_ctx *ctx = crypto_aead_ctx(tfm);
4362
4363         return caam_init_common(ctx, &caam_alg->caam);
4364 }
4365
4366 static void caam_exit_common(struct caam_ctx *ctx)
4367 {
4368         if (ctx->sh_desc_enc_dma &&
4369             !dma_mapping_error(ctx->jrdev, ctx->sh_desc_enc_dma))
4370                 dma_unmap_single(ctx->jrdev, ctx->sh_desc_enc_dma,
4371                                  desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
4372         if (ctx->sh_desc_dec_dma &&
4373             !dma_mapping_error(ctx->jrdev, ctx->sh_desc_dec_dma))
4374                 dma_unmap_single(ctx->jrdev, ctx->sh_desc_dec_dma,
4375                                  desc_bytes(ctx->sh_desc_dec), DMA_TO_DEVICE);
4376         if (ctx->sh_desc_givenc_dma &&
4377             !dma_mapping_error(ctx->jrdev, ctx->sh_desc_givenc_dma))
4378                 dma_unmap_single(ctx->jrdev, ctx->sh_desc_givenc_dma,
4379                                  desc_bytes(ctx->sh_desc_givenc),
4380                                  DMA_TO_DEVICE);
4381         if (ctx->key_dma &&
4382             !dma_mapping_error(ctx->jrdev, ctx->key_dma))
4383                 dma_unmap_single(ctx->jrdev, ctx->key_dma,
4384                                  ctx->enckeylen + ctx->split_key_pad_len,
4385                                  DMA_TO_DEVICE);
4386
4387         caam_jr_free(ctx->jrdev);
4388 }
4389
4390 static void caam_cra_exit(struct crypto_tfm *tfm)
4391 {
4392         caam_exit_common(crypto_tfm_ctx(tfm));
4393 }
4394
4395 static void caam_aead_exit(struct crypto_aead *tfm)
4396 {
4397         caam_exit_common(crypto_aead_ctx(tfm));
4398 }
4399
4400 static void __exit caam_algapi_exit(void)
4401 {
4402
4403         struct caam_crypto_alg *t_alg, *n;
4404         int i;
4405
4406         for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4407                 struct caam_aead_alg *t_alg = driver_aeads + i;
4408
4409                 if (t_alg->registered)
4410                         crypto_unregister_aead(&t_alg->aead);
4411         }
4412
4413         if (!alg_list.next)
4414                 return;
4415
4416         list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
4417                 crypto_unregister_alg(&t_alg->crypto_alg);
4418                 list_del(&t_alg->entry);
4419                 kfree(t_alg);
4420         }
4421 }
4422
4423 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
4424                                               *template)
4425 {
4426         struct caam_crypto_alg *t_alg;
4427         struct crypto_alg *alg;
4428
4429         t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
4430         if (!t_alg) {
4431                 pr_err("failed to allocate t_alg\n");
4432                 return ERR_PTR(-ENOMEM);
4433         }
4434
4435         alg = &t_alg->crypto_alg;
4436
4437         snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
4438         snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
4439                  template->driver_name);
4440         alg->cra_module = THIS_MODULE;
4441         alg->cra_init = caam_cra_init;
4442         alg->cra_exit = caam_cra_exit;
4443         alg->cra_priority = CAAM_CRA_PRIORITY;
4444         alg->cra_blocksize = template->blocksize;
4445         alg->cra_alignmask = 0;
4446         alg->cra_ctxsize = sizeof(struct caam_ctx);
4447         alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
4448                          template->type;
4449         switch (template->type) {
4450         case CRYPTO_ALG_TYPE_GIVCIPHER:
4451                 alg->cra_type = &crypto_givcipher_type;
4452                 alg->cra_ablkcipher = template->template_ablkcipher;
4453                 break;
4454         case CRYPTO_ALG_TYPE_ABLKCIPHER:
4455                 alg->cra_type = &crypto_ablkcipher_type;
4456                 alg->cra_ablkcipher = template->template_ablkcipher;
4457                 break;
4458         }
4459
4460         t_alg->caam.class1_alg_type = template->class1_alg_type;
4461         t_alg->caam.class2_alg_type = template->class2_alg_type;
4462         t_alg->caam.alg_op = template->alg_op;
4463
4464         return t_alg;
4465 }
4466
4467 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
4468 {
4469         struct aead_alg *alg = &t_alg->aead;
4470
4471         alg->base.cra_module = THIS_MODULE;
4472         alg->base.cra_priority = CAAM_CRA_PRIORITY;
4473         alg->base.cra_ctxsize = sizeof(struct caam_ctx);
4474         alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
4475
4476         alg->init = caam_aead_init;
4477         alg->exit = caam_aead_exit;
4478 }
4479
4480 static int __init caam_algapi_init(void)
4481 {
4482         struct device_node *dev_node;
4483         struct platform_device *pdev;
4484         struct device *ctrldev;
4485         struct caam_drv_private *priv;
4486         int i = 0, err = 0;
4487         u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
4488         unsigned int md_limit = SHA512_DIGEST_SIZE;
4489         bool registered = false;
4490
4491         dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
4492         if (!dev_node) {
4493                 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
4494                 if (!dev_node)
4495                         return -ENODEV;
4496         }
4497
4498         pdev = of_find_device_by_node(dev_node);
4499         if (!pdev) {
4500                 of_node_put(dev_node);
4501                 return -ENODEV;
4502         }
4503
4504         ctrldev = &pdev->dev;
4505         priv = dev_get_drvdata(ctrldev);
4506         of_node_put(dev_node);
4507
4508         /*
4509          * If priv is NULL, it's probably because the caam driver wasn't
4510          * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
4511          */
4512         if (!priv)
4513                 return -ENODEV;
4514
4515
4516         INIT_LIST_HEAD(&alg_list);
4517
4518         /*
4519          * Register crypto algorithms the device supports.
4520          * First, detect presence and attributes of DES, AES, and MD blocks.
4521          */
4522         cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
4523         cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
4524         des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
4525         aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
4526         md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
4527
4528         /* If MD is present, limit digest size based on LP256 */
4529         if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
4530                 md_limit = SHA256_DIGEST_SIZE;
4531
4532         for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4533                 struct caam_crypto_alg *t_alg;
4534                 struct caam_alg_template *alg = driver_algs + i;
4535                 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
4536
4537                 /* Skip DES algorithms if not supported by device */
4538                 if (!des_inst &&
4539                     ((alg_sel == OP_ALG_ALGSEL_3DES) ||
4540                      (alg_sel == OP_ALG_ALGSEL_DES)))
4541                                 continue;
4542
4543                 /* Skip AES algorithms if not supported by device */
4544                 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
4545                                 continue;
4546
4547                 /*
4548                  * Check support for AES modes not available
4549                  * on LP devices.
4550                  */
4551                 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4552                         if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
4553                              OP_ALG_AAI_XTS)
4554                                 continue;
4555
4556                 t_alg = caam_alg_alloc(alg);
4557                 if (IS_ERR(t_alg)) {
4558                         err = PTR_ERR(t_alg);
4559                         pr_warn("%s alg allocation failed\n", alg->driver_name);
4560                         continue;
4561                 }
4562
4563                 err = crypto_register_alg(&t_alg->crypto_alg);
4564                 if (err) {
4565                         pr_warn("%s alg registration failed\n",
4566                                 t_alg->crypto_alg.cra_driver_name);
4567                         kfree(t_alg);
4568                         continue;
4569                 }
4570
4571                 list_add_tail(&t_alg->entry, &alg_list);
4572                 registered = true;
4573         }
4574
4575         for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4576                 struct caam_aead_alg *t_alg = driver_aeads + i;
4577                 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
4578                                  OP_ALG_ALGSEL_MASK;
4579                 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
4580                                  OP_ALG_ALGSEL_MASK;
4581                 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
4582
4583                 /* Skip DES algorithms if not supported by device */
4584                 if (!des_inst &&
4585                     ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
4586                      (c1_alg_sel == OP_ALG_ALGSEL_DES)))
4587                                 continue;
4588
4589                 /* Skip AES algorithms if not supported by device */
4590                 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
4591                                 continue;
4592
4593                 /*
4594                  * Check support for AES algorithms not available
4595                  * on LP devices.
4596                  */
4597                 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4598                         if (alg_aai == OP_ALG_AAI_GCM)
4599                                 continue;
4600
4601                 /*
4602                  * Skip algorithms requiring message digests
4603                  * if MD or MD size is not supported by device.
4604                  */
4605                 if (c2_alg_sel &&
4606                     (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
4607                                 continue;
4608
4609                 caam_aead_alg_init(t_alg);
4610
4611                 err = crypto_register_aead(&t_alg->aead);
4612                 if (err) {
4613                         pr_warn("%s alg registration failed\n",
4614                                 t_alg->aead.base.cra_driver_name);
4615                         continue;
4616                 }
4617
4618                 t_alg->registered = true;
4619                 registered = true;
4620         }
4621
4622         if (registered)
4623                 pr_info("caam algorithms registered in /proc/crypto\n");
4624
4625         return err;
4626 }
4627
4628 module_init(caam_algapi_init);
4629 module_exit(caam_algapi_exit);
4630
4631 MODULE_LICENSE("GPL");
4632 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
4633 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");