GNU Linux-libre 4.9.309-gnu1
[releases.git] / drivers / crypto / caam / caamalg.c
1 /*
2  * caam - Freescale FSL CAAM support for crypto API
3  *
4  * Copyright 2008-2011 Freescale Semiconductor, Inc.
5  *
6  * Based on talitos crypto API driver.
7  *
8  * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
9  *
10  * ---------------                     ---------------
11  * | JobDesc #1  |-------------------->|  ShareDesc  |
12  * | *(packet 1) |                     |   (PDB)     |
13  * ---------------      |------------->|  (hashKey)  |
14  *       .              |              | (cipherKey) |
15  *       .              |    |-------->| (operation) |
16  * ---------------      |    |         ---------------
17  * | JobDesc #2  |------|    |
18  * | *(packet 2) |           |
19  * ---------------           |
20  *       .                   |
21  *       .                   |
22  * ---------------           |
23  * | JobDesc #3  |------------
24  * | *(packet 3) |
25  * ---------------
26  *
27  * The SharedDesc never changes for a connection unless rekeyed, but
28  * each packet will likely be in a different place. So all we need
29  * to know to process the packet is where the input is, where the
30  * output goes, and what context we want to process with. Context is
31  * in the SharedDesc, packet references in the JobDesc.
32  *
33  * So, a job desc looks like:
34  *
35  * ---------------------
36  * | Header            |
37  * | ShareDesc Pointer |
38  * | SEQ_OUT_PTR       |
39  * | (output buffer)   |
40  * | (output length)   |
41  * | SEQ_IN_PTR        |
42  * | (input buffer)    |
43  * | (input length)    |
44  * ---------------------
45  */
46
47 #include "compat.h"
48
49 #include "regs.h"
50 #include "intern.h"
51 #include "desc_constr.h"
52 #include "jr.h"
53 #include "error.h"
54 #include "sg_sw_sec4.h"
55 #include "key_gen.h"
56
57 /*
58  * crypto alg
59  */
60 #define CAAM_CRA_PRIORITY               3000
61 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
62 #define CAAM_MAX_KEY_SIZE               (AES_MAX_KEY_SIZE + \
63                                          CTR_RFC3686_NONCE_SIZE + \
64                                          SHA512_DIGEST_SIZE * 2)
65 /* max IV is max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
66 #define CAAM_MAX_IV_LENGTH              16
67
68 #define AEAD_DESC_JOB_IO_LEN            (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN             (AEAD_DESC_JOB_IO_LEN + \
70                                          CAAM_CMD_SZ * 4)
71 #define AUTHENC_DESC_JOB_IO_LEN         (AEAD_DESC_JOB_IO_LEN + \
72                                          CAAM_CMD_SZ * 5)
73
74 /* length of descriptors text */
75 #define DESC_AEAD_BASE                  (4 * CAAM_CMD_SZ)
76 #define DESC_AEAD_ENC_LEN               (DESC_AEAD_BASE + 11 * CAAM_CMD_SZ)
77 #define DESC_AEAD_DEC_LEN               (DESC_AEAD_BASE + 15 * CAAM_CMD_SZ)
78 #define DESC_AEAD_GIVENC_LEN            (DESC_AEAD_ENC_LEN + 10 * CAAM_CMD_SZ)
79
80 /* Note: Nonce is counted in enckeylen */
81 #define DESC_AEAD_CTR_RFC3686_LEN       (4 * CAAM_CMD_SZ)
82
83 #define DESC_AEAD_NULL_BASE             (3 * CAAM_CMD_SZ)
84 #define DESC_AEAD_NULL_ENC_LEN          (DESC_AEAD_NULL_BASE + 11 * CAAM_CMD_SZ)
85 #define DESC_AEAD_NULL_DEC_LEN          (DESC_AEAD_NULL_BASE + 13 * CAAM_CMD_SZ)
86
87 #define DESC_GCM_BASE                   (3 * CAAM_CMD_SZ)
88 #define DESC_GCM_ENC_LEN                (DESC_GCM_BASE + 16 * CAAM_CMD_SZ)
89 #define DESC_GCM_DEC_LEN                (DESC_GCM_BASE + 12 * CAAM_CMD_SZ)
90
91 #define DESC_RFC4106_BASE               (3 * CAAM_CMD_SZ)
92 #define DESC_RFC4106_ENC_LEN            (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
93 #define DESC_RFC4106_DEC_LEN            (DESC_RFC4106_BASE + 13 * CAAM_CMD_SZ)
94
95 #define DESC_RFC4543_BASE               (3 * CAAM_CMD_SZ)
96 #define DESC_RFC4543_ENC_LEN            (DESC_RFC4543_BASE + 11 * CAAM_CMD_SZ)
97 #define DESC_RFC4543_DEC_LEN            (DESC_RFC4543_BASE + 12 * CAAM_CMD_SZ)
98
99 #define DESC_ABLKCIPHER_BASE            (3 * CAAM_CMD_SZ)
100 #define DESC_ABLKCIPHER_ENC_LEN         (DESC_ABLKCIPHER_BASE + \
101                                          20 * CAAM_CMD_SZ)
102 #define DESC_ABLKCIPHER_DEC_LEN         (DESC_ABLKCIPHER_BASE + \
103                                          15 * CAAM_CMD_SZ)
104
105 #define DESC_MAX_USED_BYTES             (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
106 #define DESC_MAX_USED_LEN               (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
107
108 #ifdef DEBUG
109 /* for print_hex_dumps with line references */
110 #define debug(format, arg...) printk(format, arg)
111 #else
112 #define debug(format, arg...)
113 #endif
114
115 #ifdef DEBUG
116 #include <linux/highmem.h>
117
118 static void dbg_dump_sg(const char *level, const char *prefix_str,
119                         int prefix_type, int rowsize, int groupsize,
120                         struct scatterlist *sg, size_t tlen, bool ascii,
121                         bool may_sleep)
122 {
123         struct scatterlist *it;
124         void *it_page;
125         size_t len;
126         void *buf;
127
128         for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) {
129                 /*
130                  * make sure the scatterlist's page
131                  * has a valid virtual memory mapping
132                  */
133                 it_page = kmap_atomic(sg_page(it));
134                 if (unlikely(!it_page)) {
135                         printk(KERN_ERR "dbg_dump_sg: kmap failed\n");
136                         return;
137                 }
138
139                 buf = it_page + it->offset;
140                 len = min_t(size_t, tlen, it->length);
141                 print_hex_dump(level, prefix_str, prefix_type, rowsize,
142                                groupsize, buf, len, ascii);
143                 tlen -= len;
144
145                 kunmap_atomic(it_page);
146         }
147 }
148 #endif
149
150 static struct list_head alg_list;
151
152 struct caam_alg_entry {
153         int class1_alg_type;
154         int class2_alg_type;
155         int alg_op;
156         bool rfc3686;
157         bool geniv;
158 };
159
160 struct caam_aead_alg {
161         struct aead_alg aead;
162         struct caam_alg_entry caam;
163         bool registered;
164 };
165
166 /* Set DK bit in class 1 operation if shared */
167 static inline void append_dec_op1(u32 *desc, u32 type)
168 {
169         u32 *jump_cmd, *uncond_jump_cmd;
170
171         /* DK bit is valid only for AES */
172         if ((type & OP_ALG_ALGSEL_MASK) != OP_ALG_ALGSEL_AES) {
173                 append_operation(desc, type | OP_ALG_AS_INITFINAL |
174                                  OP_ALG_DECRYPT);
175                 return;
176         }
177
178         jump_cmd = append_jump(desc, JUMP_TEST_ALL | JUMP_COND_SHRD);
179         append_operation(desc, type | OP_ALG_AS_INITFINAL |
180                          OP_ALG_DECRYPT);
181         uncond_jump_cmd = append_jump(desc, JUMP_TEST_ALL);
182         set_jump_tgt_here(desc, jump_cmd);
183         append_operation(desc, type | OP_ALG_AS_INITFINAL |
184                          OP_ALG_DECRYPT | OP_ALG_AAI_DK);
185         set_jump_tgt_here(desc, uncond_jump_cmd);
186 }
187
188 /*
189  * For aead functions, read payload and write payload,
190  * both of which are specified in req->src and req->dst
191  */
192 static inline void aead_append_src_dst(u32 *desc, u32 msg_type)
193 {
194         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
195         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH |
196                              KEY_VLF | msg_type | FIFOLD_TYPE_LASTBOTH);
197 }
198
199 /*
200  * For ablkcipher encrypt and decrypt, read from req->src and
201  * write to req->dst
202  */
203 static inline void ablkcipher_append_src_dst(u32 *desc)
204 {
205         append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
206         append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
207         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 |
208                              KEY_VLF | FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
209         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
210 }
211
212 /*
213  * per-session context
214  */
215 struct caam_ctx {
216         struct device *jrdev;
217         u32 sh_desc_enc[DESC_MAX_USED_LEN];
218         u32 sh_desc_dec[DESC_MAX_USED_LEN];
219         u32 sh_desc_givenc[DESC_MAX_USED_LEN];
220         dma_addr_t sh_desc_enc_dma;
221         dma_addr_t sh_desc_dec_dma;
222         dma_addr_t sh_desc_givenc_dma;
223         u32 class1_alg_type;
224         u32 class2_alg_type;
225         u32 alg_op;
226         u8 key[CAAM_MAX_KEY_SIZE];
227         dma_addr_t key_dma;
228         unsigned int enckeylen;
229         unsigned int split_key_len;
230         unsigned int split_key_pad_len;
231         unsigned int authsize;
232 };
233
234 static void append_key_aead(u32 *desc, struct caam_ctx *ctx,
235                             int keys_fit_inline, bool is_rfc3686)
236 {
237         u32 *nonce;
238         unsigned int enckeylen = ctx->enckeylen;
239
240         /*
241          * RFC3686 specific:
242          *      | ctx->key = {AUTH_KEY, ENC_KEY, NONCE}
243          *      | enckeylen = encryption key size + nonce size
244          */
245         if (is_rfc3686)
246                 enckeylen -= CTR_RFC3686_NONCE_SIZE;
247
248         if (keys_fit_inline) {
249                 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
250                                   ctx->split_key_len, CLASS_2 |
251                                   KEY_DEST_MDHA_SPLIT | KEY_ENC);
252                 append_key_as_imm(desc, (void *)ctx->key +
253                                   ctx->split_key_pad_len, enckeylen,
254                                   enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
255         } else {
256                 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
257                            KEY_DEST_MDHA_SPLIT | KEY_ENC);
258                 append_key(desc, ctx->key_dma + ctx->split_key_pad_len,
259                            enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
260         }
261
262         /* Load Counter into CONTEXT1 reg */
263         if (is_rfc3686) {
264                 nonce = (u32 *)((void *)ctx->key + ctx->split_key_pad_len +
265                                enckeylen);
266                 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
267                                    LDST_CLASS_IND_CCB |
268                                    LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
269                 append_move(desc,
270                             MOVE_SRC_OUTFIFO |
271                             MOVE_DEST_CLASS1CTX |
272                             (16 << MOVE_OFFSET_SHIFT) |
273                             (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
274         }
275 }
276
277 static void init_sh_desc_key_aead(u32 *desc, struct caam_ctx *ctx,
278                                   int keys_fit_inline, bool is_rfc3686)
279 {
280         u32 *key_jump_cmd;
281
282         /* Note: Context registers are saved. */
283         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
284
285         /* Skip if already shared */
286         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
287                                    JUMP_COND_SHRD);
288
289         append_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
290
291         set_jump_tgt_here(desc, key_jump_cmd);
292 }
293
294 static int aead_null_set_sh_desc(struct crypto_aead *aead)
295 {
296         struct caam_ctx *ctx = crypto_aead_ctx(aead);
297         struct device *jrdev = ctx->jrdev;
298         bool keys_fit_inline = false;
299         u32 *key_jump_cmd, *jump_cmd, *read_move_cmd, *write_move_cmd;
300         u32 *desc;
301
302         /*
303          * Job Descriptor and Shared Descriptors
304          * must all fit into the 64-word Descriptor h/w Buffer
305          */
306         if (DESC_AEAD_NULL_ENC_LEN + AEAD_DESC_JOB_IO_LEN +
307             ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
308                 keys_fit_inline = true;
309
310         /* aead_encrypt shared descriptor */
311         desc = ctx->sh_desc_enc;
312
313         init_sh_desc(desc, HDR_SHARE_SERIAL);
314
315         /* Skip if already shared */
316         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
317                                    JUMP_COND_SHRD);
318         if (keys_fit_inline)
319                 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
320                                   ctx->split_key_len, CLASS_2 |
321                                   KEY_DEST_MDHA_SPLIT | KEY_ENC);
322         else
323                 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
324                            KEY_DEST_MDHA_SPLIT | KEY_ENC);
325         set_jump_tgt_here(desc, key_jump_cmd);
326
327         /* assoclen + cryptlen = seqinlen */
328         append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
329
330         /* Prepare to read and write cryptlen + assoclen bytes */
331         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
332         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
333
334         /*
335          * MOVE_LEN opcode is not available in all SEC HW revisions,
336          * thus need to do some magic, i.e. self-patch the descriptor
337          * buffer.
338          */
339         read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
340                                     MOVE_DEST_MATH3 |
341                                     (0x6 << MOVE_LEN_SHIFT));
342         write_move_cmd = append_move(desc, MOVE_SRC_MATH3 |
343                                      MOVE_DEST_DESCBUF |
344                                      MOVE_WAITCOMP |
345                                      (0x8 << MOVE_LEN_SHIFT));
346
347         /* Class 2 operation */
348         append_operation(desc, ctx->class2_alg_type |
349                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
350
351         /* Read and write cryptlen bytes */
352         aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
353
354         set_move_tgt_here(desc, read_move_cmd);
355         set_move_tgt_here(desc, write_move_cmd);
356         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
357         append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
358                     MOVE_AUX_LS);
359
360         /* Write ICV */
361         append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
362                          LDST_SRCDST_BYTE_CONTEXT);
363
364         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
365                                               desc_bytes(desc),
366                                               DMA_TO_DEVICE);
367         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
368                 dev_err(jrdev, "unable to map shared descriptor\n");
369                 return -ENOMEM;
370         }
371 #ifdef DEBUG
372         print_hex_dump(KERN_ERR,
373                        "aead null enc shdesc@"__stringify(__LINE__)": ",
374                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
375                        desc_bytes(desc), 1);
376 #endif
377
378         /*
379          * Job Descriptor and Shared Descriptors
380          * must all fit into the 64-word Descriptor h/w Buffer
381          */
382         keys_fit_inline = false;
383         if (DESC_AEAD_NULL_DEC_LEN + DESC_JOB_IO_LEN +
384             ctx->split_key_pad_len <= CAAM_DESC_BYTES_MAX)
385                 keys_fit_inline = true;
386
387         desc = ctx->sh_desc_dec;
388
389         /* aead_decrypt shared descriptor */
390         init_sh_desc(desc, HDR_SHARE_SERIAL);
391
392         /* Skip if already shared */
393         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
394                                    JUMP_COND_SHRD);
395         if (keys_fit_inline)
396                 append_key_as_imm(desc, ctx->key, ctx->split_key_pad_len,
397                                   ctx->split_key_len, CLASS_2 |
398                                   KEY_DEST_MDHA_SPLIT | KEY_ENC);
399         else
400                 append_key(desc, ctx->key_dma, ctx->split_key_len, CLASS_2 |
401                            KEY_DEST_MDHA_SPLIT | KEY_ENC);
402         set_jump_tgt_here(desc, key_jump_cmd);
403
404         /* Class 2 operation */
405         append_operation(desc, ctx->class2_alg_type |
406                          OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
407
408         /* assoclen + cryptlen = seqoutlen */
409         append_math_sub(desc, REG2, SEQOUTLEN, REG0, CAAM_CMD_SZ);
410
411         /* Prepare to read and write cryptlen + assoclen bytes */
412         append_math_add(desc, VARSEQINLEN, ZERO, REG2, CAAM_CMD_SZ);
413         append_math_add(desc, VARSEQOUTLEN, ZERO, REG2, CAAM_CMD_SZ);
414
415         /*
416          * MOVE_LEN opcode is not available in all SEC HW revisions,
417          * thus need to do some magic, i.e. self-patch the descriptor
418          * buffer.
419          */
420         read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF |
421                                     MOVE_DEST_MATH2 |
422                                     (0x6 << MOVE_LEN_SHIFT));
423         write_move_cmd = append_move(desc, MOVE_SRC_MATH2 |
424                                      MOVE_DEST_DESCBUF |
425                                      MOVE_WAITCOMP |
426                                      (0x8 << MOVE_LEN_SHIFT));
427
428         /* Read and write cryptlen bytes */
429         aead_append_src_dst(desc, FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
430
431         /*
432          * Insert a NOP here, since we need at least 4 instructions between
433          * code patching the descriptor buffer and the location being patched.
434          */
435         jump_cmd = append_jump(desc, JUMP_TEST_ALL);
436         set_jump_tgt_here(desc, jump_cmd);
437
438         set_move_tgt_here(desc, read_move_cmd);
439         set_move_tgt_here(desc, write_move_cmd);
440         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
441         append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO |
442                     MOVE_AUX_LS);
443         append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
444
445         /* Load ICV */
446         append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
447                              FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
448
449         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
450                                               desc_bytes(desc),
451                                               DMA_TO_DEVICE);
452         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
453                 dev_err(jrdev, "unable to map shared descriptor\n");
454                 return -ENOMEM;
455         }
456 #ifdef DEBUG
457         print_hex_dump(KERN_ERR,
458                        "aead null dec shdesc@"__stringify(__LINE__)": ",
459                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
460                        desc_bytes(desc), 1);
461 #endif
462
463         return 0;
464 }
465
466 static int aead_set_sh_desc(struct crypto_aead *aead)
467 {
468         struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
469                                                  struct caam_aead_alg, aead);
470         unsigned int ivsize = crypto_aead_ivsize(aead);
471         struct caam_ctx *ctx = crypto_aead_ctx(aead);
472         struct device *jrdev = ctx->jrdev;
473         bool keys_fit_inline;
474         u32 geniv, moveiv;
475         u32 ctx1_iv_off = 0;
476         u32 *desc;
477         u32 *wait_cmd;
478         const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
479                                OP_ALG_AAI_CTR_MOD128);
480         const bool is_rfc3686 = alg->caam.rfc3686;
481
482         if (!ctx->authsize)
483                 return 0;
484
485         /* NULL encryption / decryption */
486         if (!ctx->enckeylen)
487                 return aead_null_set_sh_desc(aead);
488
489         /*
490          * AES-CTR needs to load IV in CONTEXT1 reg
491          * at an offset of 128bits (16bytes)
492          * CONTEXT1[255:128] = IV
493          */
494         if (ctr_mode)
495                 ctx1_iv_off = 16;
496
497         /*
498          * RFC3686 specific:
499          *      CONTEXT1[255:128] = {NONCE, IV, COUNTER}
500          */
501         if (is_rfc3686)
502                 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
503
504         if (alg->caam.geniv)
505                 goto skip_enc;
506
507         /*
508          * Job Descriptor and Shared Descriptors
509          * must all fit into the 64-word Descriptor h/w Buffer
510          */
511         keys_fit_inline = false;
512         if (DESC_AEAD_ENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
513             ctx->split_key_pad_len + ctx->enckeylen +
514             (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
515             CAAM_DESC_BYTES_MAX)
516                 keys_fit_inline = true;
517
518         /* aead_encrypt shared descriptor */
519         desc = ctx->sh_desc_enc;
520
521         /* Note: Context registers are saved. */
522         init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
523
524         /* Class 2 operation */
525         append_operation(desc, ctx->class2_alg_type |
526                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
527
528         /* Read and write assoclen bytes */
529         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
530         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
531
532         /* Skip assoc data */
533         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
534
535         /* read assoc before reading payload */
536         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
537                                       FIFOLDST_VLF);
538
539         /* Load Counter into CONTEXT1 reg */
540         if (is_rfc3686)
541                 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
542                                      LDST_SRCDST_BYTE_CONTEXT |
543                                      ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
544                                       LDST_OFFSET_SHIFT));
545
546         /* Class 1 operation */
547         append_operation(desc, ctx->class1_alg_type |
548                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
549
550         /* Read and write cryptlen bytes */
551         append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
552         append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
553         aead_append_src_dst(desc, FIFOLD_TYPE_MSG1OUT2);
554
555         /* Write ICV */
556         append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
557                          LDST_SRCDST_BYTE_CONTEXT);
558
559         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
560                                               desc_bytes(desc),
561                                               DMA_TO_DEVICE);
562         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
563                 dev_err(jrdev, "unable to map shared descriptor\n");
564                 return -ENOMEM;
565         }
566 #ifdef DEBUG
567         print_hex_dump(KERN_ERR, "aead enc shdesc@"__stringify(__LINE__)": ",
568                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
569                        desc_bytes(desc), 1);
570 #endif
571
572 skip_enc:
573         /*
574          * Job Descriptor and Shared Descriptors
575          * must all fit into the 64-word Descriptor h/w Buffer
576          */
577         keys_fit_inline = false;
578         if (DESC_AEAD_DEC_LEN + AUTHENC_DESC_JOB_IO_LEN +
579             ctx->split_key_pad_len + ctx->enckeylen +
580             (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
581             CAAM_DESC_BYTES_MAX)
582                 keys_fit_inline = true;
583
584         /* aead_decrypt shared descriptor */
585         desc = ctx->sh_desc_dec;
586
587         /* Note: Context registers are saved. */
588         init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
589
590         /* Class 2 operation */
591         append_operation(desc, ctx->class2_alg_type |
592                          OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
593
594         /* Read and write assoclen bytes */
595         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
596         if (alg->caam.geniv)
597                 append_math_add_imm_u32(desc, VARSEQOUTLEN, REG3, IMM, ivsize);
598         else
599                 append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
600
601         /* Skip assoc data */
602         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
603
604         /* read assoc before reading payload */
605         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
606                              KEY_VLF);
607
608         if (alg->caam.geniv) {
609                 append_seq_load(desc, ivsize, LDST_CLASS_1_CCB |
610                                 LDST_SRCDST_BYTE_CONTEXT |
611                                 (ctx1_iv_off << LDST_OFFSET_SHIFT));
612                 append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_CLASS2INFIFO |
613                             (ctx1_iv_off << MOVE_OFFSET_SHIFT) | ivsize);
614         }
615
616         /* Load Counter into CONTEXT1 reg */
617         if (is_rfc3686)
618                 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
619                                      LDST_SRCDST_BYTE_CONTEXT |
620                                      ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
621                                       LDST_OFFSET_SHIFT));
622
623         /* Choose operation */
624         if (ctr_mode)
625                 append_operation(desc, ctx->class1_alg_type |
626                                  OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
627         else
628                 append_dec_op1(desc, ctx->class1_alg_type);
629
630         /* Read and write cryptlen bytes */
631         append_math_add(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
632         append_math_add(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
633         aead_append_src_dst(desc, FIFOLD_TYPE_MSG);
634
635         /* Load ICV */
636         append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS2 |
637                              FIFOLD_TYPE_LAST2 | FIFOLD_TYPE_ICV);
638
639         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
640                                               desc_bytes(desc),
641                                               DMA_TO_DEVICE);
642         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
643                 dev_err(jrdev, "unable to map shared descriptor\n");
644                 return -ENOMEM;
645         }
646 #ifdef DEBUG
647         print_hex_dump(KERN_ERR, "aead dec shdesc@"__stringify(__LINE__)": ",
648                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
649                        desc_bytes(desc), 1);
650 #endif
651
652         if (!alg->caam.geniv)
653                 goto skip_givenc;
654
655         /*
656          * Job Descriptor and Shared Descriptors
657          * must all fit into the 64-word Descriptor h/w Buffer
658          */
659         keys_fit_inline = false;
660         if (DESC_AEAD_GIVENC_LEN + AUTHENC_DESC_JOB_IO_LEN +
661             ctx->split_key_pad_len + ctx->enckeylen +
662             (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0) <=
663             CAAM_DESC_BYTES_MAX)
664                 keys_fit_inline = true;
665
666         /* aead_givencrypt shared descriptor */
667         desc = ctx->sh_desc_enc;
668
669         /* Note: Context registers are saved. */
670         init_sh_desc_key_aead(desc, ctx, keys_fit_inline, is_rfc3686);
671
672         if (is_rfc3686)
673                 goto copy_iv;
674
675         /* Generate IV */
676         geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
677                 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
678                 NFIFOENTRY_PTYPE_RND | (ivsize << NFIFOENTRY_DLEN_SHIFT);
679         append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
680                             LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
681         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
682         append_move(desc, MOVE_WAITCOMP |
683                     MOVE_SRC_INFIFO | MOVE_DEST_CLASS1CTX |
684                     (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
685                     (ivsize << MOVE_LEN_SHIFT));
686         append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
687
688 copy_iv:
689         /* Copy IV to class 1 context */
690         append_move(desc, MOVE_SRC_CLASS1CTX | MOVE_DEST_OUTFIFO |
691                     (ctx1_iv_off << MOVE_OFFSET_SHIFT) |
692                     (ivsize << MOVE_LEN_SHIFT));
693
694         /* Return to encryption */
695         append_operation(desc, ctx->class2_alg_type |
696                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
697
698         /* Read and write assoclen bytes */
699         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
700         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
701
702         /* ivsize + cryptlen = seqoutlen - authsize */
703         append_math_sub_imm_u32(desc, REG3, SEQOUTLEN, IMM, ctx->authsize);
704
705         /* Skip assoc data */
706         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
707
708         /* read assoc before reading payload */
709         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS2 | FIFOLD_TYPE_MSG |
710                              KEY_VLF);
711
712         /* Copy iv from outfifo to class 2 fifo */
713         moveiv = NFIFOENTRY_STYPE_OFIFO | NFIFOENTRY_DEST_CLASS2 |
714                  NFIFOENTRY_DTYPE_MSG | (ivsize << NFIFOENTRY_DLEN_SHIFT);
715         append_load_imm_u32(desc, moveiv, LDST_CLASS_IND_CCB |
716                             LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
717         append_load_imm_u32(desc, ivsize, LDST_CLASS_2_CCB |
718                             LDST_SRCDST_WORD_DATASZ_REG | LDST_IMM);
719
720         /* Load Counter into CONTEXT1 reg */
721         if (is_rfc3686)
722                 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
723                                      LDST_SRCDST_BYTE_CONTEXT |
724                                      ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
725                                       LDST_OFFSET_SHIFT));
726
727         /* Class 1 operation */
728         append_operation(desc, ctx->class1_alg_type |
729                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
730
731         /* Will write ivsize + cryptlen */
732         append_math_add(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
733
734         /* Not need to reload iv */
735         append_seq_fifo_load(desc, ivsize,
736                              FIFOLD_CLASS_SKIP);
737
738         /* Will read cryptlen */
739         append_math_add(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
740
741         /*
742          * Wait for IV transfer (ofifo -> class2) to finish before starting
743          * ciphertext transfer (ofifo -> external memory).
744          */
745         wait_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NIFP);
746         set_jump_tgt_here(desc, wait_cmd);
747
748         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | KEY_VLF |
749                              FIFOLD_TYPE_MSG1OUT2 | FIFOLD_TYPE_LASTBOTH);
750         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | KEY_VLF);
751
752         /* Write ICV */
753         append_seq_store(desc, ctx->authsize, LDST_CLASS_2_CCB |
754                          LDST_SRCDST_BYTE_CONTEXT);
755
756         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
757                                               desc_bytes(desc),
758                                               DMA_TO_DEVICE);
759         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
760                 dev_err(jrdev, "unable to map shared descriptor\n");
761                 return -ENOMEM;
762         }
763 #ifdef DEBUG
764         print_hex_dump(KERN_ERR, "aead givenc shdesc@"__stringify(__LINE__)": ",
765                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
766                        desc_bytes(desc), 1);
767 #endif
768
769 skip_givenc:
770         return 0;
771 }
772
773 static int aead_setauthsize(struct crypto_aead *authenc,
774                                     unsigned int authsize)
775 {
776         struct caam_ctx *ctx = crypto_aead_ctx(authenc);
777
778         ctx->authsize = authsize;
779         aead_set_sh_desc(authenc);
780
781         return 0;
782 }
783
784 static int gcm_set_sh_desc(struct crypto_aead *aead)
785 {
786         struct caam_ctx *ctx = crypto_aead_ctx(aead);
787         struct device *jrdev = ctx->jrdev;
788         bool keys_fit_inline = false;
789         u32 *key_jump_cmd, *zero_payload_jump_cmd,
790             *zero_assoc_jump_cmd1, *zero_assoc_jump_cmd2;
791         u32 *desc;
792
793         if (!ctx->enckeylen || !ctx->authsize)
794                 return 0;
795
796         /*
797          * AES GCM encrypt shared descriptor
798          * Job Descriptor and Shared Descriptor
799          * must fit into the 64-word Descriptor h/w Buffer
800          */
801         if (DESC_GCM_ENC_LEN + GCM_DESC_JOB_IO_LEN +
802             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
803                 keys_fit_inline = true;
804
805         desc = ctx->sh_desc_enc;
806
807         init_sh_desc(desc, HDR_SHARE_SERIAL);
808
809         /* skip key loading if they are loaded due to sharing */
810         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
811                                    JUMP_COND_SHRD | JUMP_COND_SELF);
812         if (keys_fit_inline)
813                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
814                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
815         else
816                 append_key(desc, ctx->key_dma, ctx->enckeylen,
817                            CLASS_1 | KEY_DEST_CLASS_REG);
818         set_jump_tgt_here(desc, key_jump_cmd);
819
820         /* class 1 operation */
821         append_operation(desc, ctx->class1_alg_type |
822                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
823
824         /* if assoclen + cryptlen is ZERO, skip to ICV write */
825         append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
826         zero_assoc_jump_cmd2 = append_jump(desc, JUMP_TEST_ALL |
827                                                  JUMP_COND_MATH_Z);
828
829         /* if assoclen is ZERO, skip reading the assoc data */
830         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
831         zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
832                                                  JUMP_COND_MATH_Z);
833
834         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
835
836         /* skip assoc data */
837         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
838
839         /* cryptlen = seqinlen - assoclen */
840         append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG3, CAAM_CMD_SZ);
841
842         /* if cryptlen is ZERO jump to zero-payload commands */
843         zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
844                                             JUMP_COND_MATH_Z);
845
846         /* read assoc data */
847         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
848                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
849         set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
850
851         append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
852
853         /* write encrypted data */
854         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
855
856         /* read payload data */
857         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
858                              FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
859
860         /* jump the zero-payload commands */
861         append_jump(desc, JUMP_TEST_ALL | 2);
862
863         /* zero-payload commands */
864         set_jump_tgt_here(desc, zero_payload_jump_cmd);
865
866         /* read assoc data */
867         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
868                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST1);
869
870         /* There is no input data */
871         set_jump_tgt_here(desc, zero_assoc_jump_cmd2);
872
873         /* write ICV */
874         append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
875                          LDST_SRCDST_BYTE_CONTEXT);
876
877         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
878                                               desc_bytes(desc),
879                                               DMA_TO_DEVICE);
880         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
881                 dev_err(jrdev, "unable to map shared descriptor\n");
882                 return -ENOMEM;
883         }
884 #ifdef DEBUG
885         print_hex_dump(KERN_ERR, "gcm enc shdesc@"__stringify(__LINE__)": ",
886                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
887                        desc_bytes(desc), 1);
888 #endif
889
890         /*
891          * Job Descriptor and Shared Descriptors
892          * must all fit into the 64-word Descriptor h/w Buffer
893          */
894         keys_fit_inline = false;
895         if (DESC_GCM_DEC_LEN + GCM_DESC_JOB_IO_LEN +
896             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
897                 keys_fit_inline = true;
898
899         desc = ctx->sh_desc_dec;
900
901         init_sh_desc(desc, HDR_SHARE_SERIAL);
902
903         /* skip key loading if they are loaded due to sharing */
904         key_jump_cmd = append_jump(desc, JUMP_JSL |
905                                    JUMP_TEST_ALL | JUMP_COND_SHRD |
906                                    JUMP_COND_SELF);
907         if (keys_fit_inline)
908                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
909                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
910         else
911                 append_key(desc, ctx->key_dma, ctx->enckeylen,
912                            CLASS_1 | KEY_DEST_CLASS_REG);
913         set_jump_tgt_here(desc, key_jump_cmd);
914
915         /* class 1 operation */
916         append_operation(desc, ctx->class1_alg_type |
917                          OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
918
919         /* if assoclen is ZERO, skip reading the assoc data */
920         append_math_add(desc, VARSEQINLEN, ZERO, REG3, CAAM_CMD_SZ);
921         zero_assoc_jump_cmd1 = append_jump(desc, JUMP_TEST_ALL |
922                                                  JUMP_COND_MATH_Z);
923
924         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
925
926         /* skip assoc data */
927         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
928
929         /* read assoc data */
930         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
931                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
932
933         set_jump_tgt_here(desc, zero_assoc_jump_cmd1);
934
935         /* cryptlen = seqoutlen - assoclen */
936         append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
937
938         /* jump to zero-payload command if cryptlen is zero */
939         zero_payload_jump_cmd = append_jump(desc, JUMP_TEST_ALL |
940                                             JUMP_COND_MATH_Z);
941
942         append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
943
944         /* store encrypted data */
945         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
946
947         /* read payload data */
948         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
949                              FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
950
951         /* zero-payload command */
952         set_jump_tgt_here(desc, zero_payload_jump_cmd);
953
954         /* read ICV */
955         append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
956                              FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
957
958         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
959                                               desc_bytes(desc),
960                                               DMA_TO_DEVICE);
961         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
962                 dev_err(jrdev, "unable to map shared descriptor\n");
963                 return -ENOMEM;
964         }
965 #ifdef DEBUG
966         print_hex_dump(KERN_ERR, "gcm dec shdesc@"__stringify(__LINE__)": ",
967                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
968                        desc_bytes(desc), 1);
969 #endif
970
971         return 0;
972 }
973
974 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
975 {
976         struct caam_ctx *ctx = crypto_aead_ctx(authenc);
977
978         ctx->authsize = authsize;
979         gcm_set_sh_desc(authenc);
980
981         return 0;
982 }
983
984 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
985 {
986         struct caam_ctx *ctx = crypto_aead_ctx(aead);
987         struct device *jrdev = ctx->jrdev;
988         bool keys_fit_inline = false;
989         u32 *key_jump_cmd;
990         u32 *desc;
991
992         if (!ctx->enckeylen || !ctx->authsize)
993                 return 0;
994
995         /*
996          * RFC4106 encrypt shared descriptor
997          * Job Descriptor and Shared Descriptor
998          * must fit into the 64-word Descriptor h/w Buffer
999          */
1000         if (DESC_RFC4106_ENC_LEN + GCM_DESC_JOB_IO_LEN +
1001             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1002                 keys_fit_inline = true;
1003
1004         desc = ctx->sh_desc_enc;
1005
1006         init_sh_desc(desc, HDR_SHARE_SERIAL);
1007
1008         /* Skip key loading if it is loaded due to sharing */
1009         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1010                                    JUMP_COND_SHRD);
1011         if (keys_fit_inline)
1012                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1013                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1014         else
1015                 append_key(desc, ctx->key_dma, ctx->enckeylen,
1016                            CLASS_1 | KEY_DEST_CLASS_REG);
1017         set_jump_tgt_here(desc, key_jump_cmd);
1018
1019         /* Class 1 operation */
1020         append_operation(desc, ctx->class1_alg_type |
1021                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1022
1023         append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
1024         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1025
1026         /* Read assoc data */
1027         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1028                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1029
1030         /* Skip IV */
1031         append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1032
1033         /* Will read cryptlen bytes */
1034         append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1035
1036         /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1037         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1038
1039         /* Skip assoc data */
1040         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1041
1042         /* cryptlen = seqoutlen - assoclen */
1043         append_math_sub(desc, VARSEQOUTLEN, VARSEQINLEN, REG0, CAAM_CMD_SZ);
1044
1045         /* Write encrypted data */
1046         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1047
1048         /* Read payload data */
1049         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1050                              FIFOLD_TYPE_MSG | FIFOLD_TYPE_LAST1);
1051
1052         /* Write ICV */
1053         append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1054                          LDST_SRCDST_BYTE_CONTEXT);
1055
1056         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1057                                               desc_bytes(desc),
1058                                               DMA_TO_DEVICE);
1059         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1060                 dev_err(jrdev, "unable to map shared descriptor\n");
1061                 return -ENOMEM;
1062         }
1063 #ifdef DEBUG
1064         print_hex_dump(KERN_ERR, "rfc4106 enc shdesc@"__stringify(__LINE__)": ",
1065                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1066                        desc_bytes(desc), 1);
1067 #endif
1068
1069         /*
1070          * Job Descriptor and Shared Descriptors
1071          * must all fit into the 64-word Descriptor h/w Buffer
1072          */
1073         keys_fit_inline = false;
1074         if (DESC_RFC4106_DEC_LEN + DESC_JOB_IO_LEN +
1075             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1076                 keys_fit_inline = true;
1077
1078         desc = ctx->sh_desc_dec;
1079
1080         init_sh_desc(desc, HDR_SHARE_SERIAL);
1081
1082         /* Skip key loading if it is loaded due to sharing */
1083         key_jump_cmd = append_jump(desc, JUMP_JSL |
1084                                    JUMP_TEST_ALL | JUMP_COND_SHRD);
1085         if (keys_fit_inline)
1086                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1087                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1088         else
1089                 append_key(desc, ctx->key_dma, ctx->enckeylen,
1090                            CLASS_1 | KEY_DEST_CLASS_REG);
1091         set_jump_tgt_here(desc, key_jump_cmd);
1092
1093         /* Class 1 operation */
1094         append_operation(desc, ctx->class1_alg_type |
1095                          OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1096
1097         append_math_sub_imm_u32(desc, VARSEQINLEN, REG3, IMM, 8);
1098         append_math_add(desc, VARSEQOUTLEN, ZERO, REG3, CAAM_CMD_SZ);
1099
1100         /* Read assoc data */
1101         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1102                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_FLUSH1);
1103
1104         /* Skip IV */
1105         append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1106
1107         /* Will read cryptlen bytes */
1108         append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG3, CAAM_CMD_SZ);
1109
1110         /* Workaround for erratum A-005473 (simultaneous SEQ FIFO skips) */
1111         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLD_TYPE_MSG);
1112
1113         /* Skip assoc data */
1114         append_seq_fifo_store(desc, 0, FIFOST_TYPE_SKIP | FIFOLDST_VLF);
1115
1116         /* Will write cryptlen bytes */
1117         append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1118
1119         /* Store payload data */
1120         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1121
1122         /* Read encrypted data */
1123         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_CLASS1 | FIFOLDST_VLF |
1124                              FIFOLD_TYPE_MSG | FIFOLD_TYPE_FLUSH1);
1125
1126         /* Read ICV */
1127         append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1128                              FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1129
1130         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1131                                               desc_bytes(desc),
1132                                               DMA_TO_DEVICE);
1133         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1134                 dev_err(jrdev, "unable to map shared descriptor\n");
1135                 return -ENOMEM;
1136         }
1137 #ifdef DEBUG
1138         print_hex_dump(KERN_ERR, "rfc4106 dec shdesc@"__stringify(__LINE__)": ",
1139                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1140                        desc_bytes(desc), 1);
1141 #endif
1142
1143         return 0;
1144 }
1145
1146 static int rfc4106_setauthsize(struct crypto_aead *authenc,
1147                                unsigned int authsize)
1148 {
1149         struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1150
1151         ctx->authsize = authsize;
1152         rfc4106_set_sh_desc(authenc);
1153
1154         return 0;
1155 }
1156
1157 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
1158 {
1159         struct caam_ctx *ctx = crypto_aead_ctx(aead);
1160         struct device *jrdev = ctx->jrdev;
1161         bool keys_fit_inline = false;
1162         u32 *key_jump_cmd;
1163         u32 *read_move_cmd, *write_move_cmd;
1164         u32 *desc;
1165
1166         if (!ctx->enckeylen || !ctx->authsize)
1167                 return 0;
1168
1169         /*
1170          * RFC4543 encrypt shared descriptor
1171          * Job Descriptor and Shared Descriptor
1172          * must fit into the 64-word Descriptor h/w Buffer
1173          */
1174         if (DESC_RFC4543_ENC_LEN + GCM_DESC_JOB_IO_LEN +
1175             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1176                 keys_fit_inline = true;
1177
1178         desc = ctx->sh_desc_enc;
1179
1180         init_sh_desc(desc, HDR_SHARE_SERIAL);
1181
1182         /* Skip key loading if it is loaded due to sharing */
1183         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1184                                    JUMP_COND_SHRD);
1185         if (keys_fit_inline)
1186                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1187                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1188         else
1189                 append_key(desc, ctx->key_dma, ctx->enckeylen,
1190                            CLASS_1 | KEY_DEST_CLASS_REG);
1191         set_jump_tgt_here(desc, key_jump_cmd);
1192
1193         /* Class 1 operation */
1194         append_operation(desc, ctx->class1_alg_type |
1195                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1196
1197         /* assoclen + cryptlen = seqinlen */
1198         append_math_sub(desc, REG3, SEQINLEN, REG0, CAAM_CMD_SZ);
1199
1200         /*
1201          * MOVE_LEN opcode is not available in all SEC HW revisions,
1202          * thus need to do some magic, i.e. self-patch the descriptor
1203          * buffer.
1204          */
1205         read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1206                                     (0x6 << MOVE_LEN_SHIFT));
1207         write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1208                                      (0x8 << MOVE_LEN_SHIFT));
1209
1210         /* Will read assoclen + cryptlen bytes */
1211         append_math_sub(desc, VARSEQINLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1212
1213         /* Will write assoclen + cryptlen bytes */
1214         append_math_sub(desc, VARSEQOUTLEN, SEQINLEN, REG0, CAAM_CMD_SZ);
1215
1216         /* Read and write assoclen + cryptlen bytes */
1217         aead_append_src_dst(desc, FIFOLD_TYPE_AAD);
1218
1219         set_move_tgt_here(desc, read_move_cmd);
1220         set_move_tgt_here(desc, write_move_cmd);
1221         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1222         /* Move payload data to OFIFO */
1223         append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1224
1225         /* Write ICV */
1226         append_seq_store(desc, ctx->authsize, LDST_CLASS_1_CCB |
1227                          LDST_SRCDST_BYTE_CONTEXT);
1228
1229         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1230                                               desc_bytes(desc),
1231                                               DMA_TO_DEVICE);
1232         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1233                 dev_err(jrdev, "unable to map shared descriptor\n");
1234                 return -ENOMEM;
1235         }
1236 #ifdef DEBUG
1237         print_hex_dump(KERN_ERR, "rfc4543 enc shdesc@"__stringify(__LINE__)": ",
1238                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1239                        desc_bytes(desc), 1);
1240 #endif
1241
1242         /*
1243          * Job Descriptor and Shared Descriptors
1244          * must all fit into the 64-word Descriptor h/w Buffer
1245          */
1246         keys_fit_inline = false;
1247         if (DESC_RFC4543_DEC_LEN + GCM_DESC_JOB_IO_LEN +
1248             ctx->enckeylen <= CAAM_DESC_BYTES_MAX)
1249                 keys_fit_inline = true;
1250
1251         desc = ctx->sh_desc_dec;
1252
1253         init_sh_desc(desc, HDR_SHARE_SERIAL);
1254
1255         /* Skip key loading if it is loaded due to sharing */
1256         key_jump_cmd = append_jump(desc, JUMP_JSL |
1257                                    JUMP_TEST_ALL | JUMP_COND_SHRD);
1258         if (keys_fit_inline)
1259                 append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1260                                   ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1261         else
1262                 append_key(desc, ctx->key_dma, ctx->enckeylen,
1263                            CLASS_1 | KEY_DEST_CLASS_REG);
1264         set_jump_tgt_here(desc, key_jump_cmd);
1265
1266         /* Class 1 operation */
1267         append_operation(desc, ctx->class1_alg_type |
1268                          OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT | OP_ALG_ICV_ON);
1269
1270         /* assoclen + cryptlen = seqoutlen */
1271         append_math_sub(desc, REG3, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1272
1273         /*
1274          * MOVE_LEN opcode is not available in all SEC HW revisions,
1275          * thus need to do some magic, i.e. self-patch the descriptor
1276          * buffer.
1277          */
1278         read_move_cmd = append_move(desc, MOVE_SRC_DESCBUF | MOVE_DEST_MATH3 |
1279                                     (0x6 << MOVE_LEN_SHIFT));
1280         write_move_cmd = append_move(desc, MOVE_SRC_MATH3 | MOVE_DEST_DESCBUF |
1281                                      (0x8 << MOVE_LEN_SHIFT));
1282
1283         /* Will read assoclen + cryptlen bytes */
1284         append_math_sub(desc, VARSEQINLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1285
1286         /* Will write assoclen + cryptlen bytes */
1287         append_math_sub(desc, VARSEQOUTLEN, SEQOUTLEN, REG0, CAAM_CMD_SZ);
1288
1289         /* Store payload data */
1290         append_seq_fifo_store(desc, 0, FIFOST_TYPE_MESSAGE_DATA | FIFOLDST_VLF);
1291
1292         /* In-snoop assoclen + cryptlen data */
1293         append_seq_fifo_load(desc, 0, FIFOLD_CLASS_BOTH | FIFOLDST_VLF |
1294                              FIFOLD_TYPE_AAD | FIFOLD_TYPE_LAST2FLUSH1);
1295
1296         set_move_tgt_here(desc, read_move_cmd);
1297         set_move_tgt_here(desc, write_move_cmd);
1298         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1299         /* Move payload data to OFIFO */
1300         append_move(desc, MOVE_SRC_INFIFO_CL | MOVE_DEST_OUTFIFO);
1301         append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1302
1303         /* Read ICV */
1304         append_seq_fifo_load(desc, ctx->authsize, FIFOLD_CLASS_CLASS1 |
1305                              FIFOLD_TYPE_ICV | FIFOLD_TYPE_LAST1);
1306
1307         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1308                                               desc_bytes(desc),
1309                                               DMA_TO_DEVICE);
1310         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1311                 dev_err(jrdev, "unable to map shared descriptor\n");
1312                 return -ENOMEM;
1313         }
1314 #ifdef DEBUG
1315         print_hex_dump(KERN_ERR, "rfc4543 dec shdesc@"__stringify(__LINE__)": ",
1316                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1317                        desc_bytes(desc), 1);
1318 #endif
1319
1320         return 0;
1321 }
1322
1323 static int rfc4543_setauthsize(struct crypto_aead *authenc,
1324                                unsigned int authsize)
1325 {
1326         struct caam_ctx *ctx = crypto_aead_ctx(authenc);
1327
1328         ctx->authsize = authsize;
1329         rfc4543_set_sh_desc(authenc);
1330
1331         return 0;
1332 }
1333
1334 static u32 gen_split_aead_key(struct caam_ctx *ctx, const u8 *key_in,
1335                               u32 authkeylen)
1336 {
1337         return gen_split_key(ctx->jrdev, ctx->key, ctx->split_key_len,
1338                                ctx->split_key_pad_len, key_in, authkeylen,
1339                                ctx->alg_op);
1340 }
1341
1342 static int aead_setkey(struct crypto_aead *aead,
1343                                const u8 *key, unsigned int keylen)
1344 {
1345         /* Sizes for MDHA pads (*not* keys): MD5, SHA1, 224, 256, 384, 512 */
1346         static const u8 mdpadlen[] = { 16, 20, 32, 32, 64, 64 };
1347         struct caam_ctx *ctx = crypto_aead_ctx(aead);
1348         struct device *jrdev = ctx->jrdev;
1349         struct crypto_authenc_keys keys;
1350         int ret = 0;
1351
1352         if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
1353                 goto badkey;
1354
1355         /* Pick class 2 key length from algorithm submask */
1356         ctx->split_key_len = mdpadlen[(ctx->alg_op & OP_ALG_ALGSEL_SUBMASK) >>
1357                                       OP_ALG_ALGSEL_SHIFT] * 2;
1358         ctx->split_key_pad_len = ALIGN(ctx->split_key_len, 16);
1359
1360         if (ctx->split_key_pad_len + keys.enckeylen > CAAM_MAX_KEY_SIZE)
1361                 goto badkey;
1362
1363 #ifdef DEBUG
1364         printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
1365                keys.authkeylen + keys.enckeylen, keys.enckeylen,
1366                keys.authkeylen);
1367         printk(KERN_ERR "split_key_len %d split_key_pad_len %d\n",
1368                ctx->split_key_len, ctx->split_key_pad_len);
1369         print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1370                        DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1371 #endif
1372
1373         ret = gen_split_aead_key(ctx, keys.authkey, keys.authkeylen);
1374         if (ret) {
1375                 goto badkey;
1376         }
1377
1378         /* postpend encryption key to auth split key */
1379         memcpy(ctx->key + ctx->split_key_pad_len, keys.enckey, keys.enckeylen);
1380
1381         ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->split_key_pad_len +
1382                                       keys.enckeylen, DMA_TO_DEVICE);
1383         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1384                 dev_err(jrdev, "unable to map key i/o memory\n");
1385                 return -ENOMEM;
1386         }
1387 #ifdef DEBUG
1388         print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
1389                        DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
1390                        ctx->split_key_pad_len + keys.enckeylen, 1);
1391 #endif
1392
1393         ctx->enckeylen = keys.enckeylen;
1394
1395         ret = aead_set_sh_desc(aead);
1396         if (ret) {
1397                 dma_unmap_single(jrdev, ctx->key_dma, ctx->split_key_pad_len +
1398                                  keys.enckeylen, DMA_TO_DEVICE);
1399         }
1400
1401         return ret;
1402 badkey:
1403         crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
1404         return -EINVAL;
1405 }
1406
1407 static int gcm_setkey(struct crypto_aead *aead,
1408                       const u8 *key, unsigned int keylen)
1409 {
1410         struct caam_ctx *ctx = crypto_aead_ctx(aead);
1411         struct device *jrdev = ctx->jrdev;
1412         int ret = 0;
1413
1414 #ifdef DEBUG
1415         print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1416                        DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1417 #endif
1418
1419         memcpy(ctx->key, key, keylen);
1420         ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1421                                       DMA_TO_DEVICE);
1422         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1423                 dev_err(jrdev, "unable to map key i/o memory\n");
1424                 return -ENOMEM;
1425         }
1426         ctx->enckeylen = keylen;
1427
1428         ret = gcm_set_sh_desc(aead);
1429         if (ret) {
1430                 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1431                                  DMA_TO_DEVICE);
1432         }
1433
1434         return ret;
1435 }
1436
1437 static int rfc4106_setkey(struct crypto_aead *aead,
1438                           const u8 *key, unsigned int keylen)
1439 {
1440         struct caam_ctx *ctx = crypto_aead_ctx(aead);
1441         struct device *jrdev = ctx->jrdev;
1442         int ret = 0;
1443
1444         if (keylen < 4)
1445                 return -EINVAL;
1446
1447 #ifdef DEBUG
1448         print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1449                        DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1450 #endif
1451
1452         memcpy(ctx->key, key, keylen);
1453
1454         /*
1455          * The last four bytes of the key material are used as the salt value
1456          * in the nonce. Update the AES key length.
1457          */
1458         ctx->enckeylen = keylen - 4;
1459
1460         ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1461                                       DMA_TO_DEVICE);
1462         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1463                 dev_err(jrdev, "unable to map key i/o memory\n");
1464                 return -ENOMEM;
1465         }
1466
1467         ret = rfc4106_set_sh_desc(aead);
1468         if (ret) {
1469                 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1470                                  DMA_TO_DEVICE);
1471         }
1472
1473         return ret;
1474 }
1475
1476 static int rfc4543_setkey(struct crypto_aead *aead,
1477                           const u8 *key, unsigned int keylen)
1478 {
1479         struct caam_ctx *ctx = crypto_aead_ctx(aead);
1480         struct device *jrdev = ctx->jrdev;
1481         int ret = 0;
1482
1483         if (keylen < 4)
1484                 return -EINVAL;
1485
1486 #ifdef DEBUG
1487         print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1488                        DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1489 #endif
1490
1491         memcpy(ctx->key, key, keylen);
1492
1493         /*
1494          * The last four bytes of the key material are used as the salt value
1495          * in the nonce. Update the AES key length.
1496          */
1497         ctx->enckeylen = keylen - 4;
1498
1499         ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->enckeylen,
1500                                       DMA_TO_DEVICE);
1501         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1502                 dev_err(jrdev, "unable to map key i/o memory\n");
1503                 return -ENOMEM;
1504         }
1505
1506         ret = rfc4543_set_sh_desc(aead);
1507         if (ret) {
1508                 dma_unmap_single(jrdev, ctx->key_dma, ctx->enckeylen,
1509                                  DMA_TO_DEVICE);
1510         }
1511
1512         return ret;
1513 }
1514
1515 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1516                              const u8 *key, unsigned int keylen)
1517 {
1518         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1519         struct ablkcipher_tfm *crt = &ablkcipher->base.crt_ablkcipher;
1520         struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
1521         const char *alg_name = crypto_tfm_alg_name(tfm);
1522         struct device *jrdev = ctx->jrdev;
1523         int ret = 0;
1524         u32 *key_jump_cmd;
1525         u32 *desc;
1526         u8 *nonce;
1527         u32 geniv;
1528         u32 ctx1_iv_off = 0;
1529         const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
1530                                OP_ALG_AAI_CTR_MOD128);
1531         const bool is_rfc3686 = (ctr_mode &&
1532                                  (strstr(alg_name, "rfc3686") != NULL));
1533
1534 #ifdef DEBUG
1535         print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
1536                        DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
1537 #endif
1538         /*
1539          * AES-CTR needs to load IV in CONTEXT1 reg
1540          * at an offset of 128bits (16bytes)
1541          * CONTEXT1[255:128] = IV
1542          */
1543         if (ctr_mode)
1544                 ctx1_iv_off = 16;
1545
1546         /*
1547          * RFC3686 specific:
1548          *      | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1549          *      | *key = {KEY, NONCE}
1550          */
1551         if (is_rfc3686) {
1552                 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
1553                 keylen -= CTR_RFC3686_NONCE_SIZE;
1554         }
1555
1556         memcpy(ctx->key, key, keylen);
1557         ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
1558                                       DMA_TO_DEVICE);
1559         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1560                 dev_err(jrdev, "unable to map key i/o memory\n");
1561                 return -ENOMEM;
1562         }
1563         ctx->enckeylen = keylen;
1564
1565         /* ablkcipher_encrypt shared descriptor */
1566         desc = ctx->sh_desc_enc;
1567         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1568         /* Skip if already shared */
1569         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1570                                    JUMP_COND_SHRD);
1571
1572         /* Load class1 key only */
1573         append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1574                           ctx->enckeylen, CLASS_1 |
1575                           KEY_DEST_CLASS_REG);
1576
1577         /* Load nonce into CONTEXT1 reg */
1578         if (is_rfc3686) {
1579                 nonce = (u8 *)key + keylen;
1580                 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1581                                    LDST_CLASS_IND_CCB |
1582                                    LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1583                 append_move(desc, MOVE_WAITCOMP |
1584                             MOVE_SRC_OUTFIFO |
1585                             MOVE_DEST_CLASS1CTX |
1586                             (16 << MOVE_OFFSET_SHIFT) |
1587                             (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1588         }
1589
1590         set_jump_tgt_here(desc, key_jump_cmd);
1591
1592         /* Load iv */
1593         append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1594                         LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1595
1596         /* Load counter into CONTEXT1 reg */
1597         if (is_rfc3686)
1598                 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1599                                      LDST_SRCDST_BYTE_CONTEXT |
1600                                      ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1601                                       LDST_OFFSET_SHIFT));
1602
1603         /* Load operation */
1604         append_operation(desc, ctx->class1_alg_type |
1605                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1606
1607         /* Perform operation */
1608         ablkcipher_append_src_dst(desc);
1609
1610         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
1611                                               desc_bytes(desc),
1612                                               DMA_TO_DEVICE);
1613         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1614                 dev_err(jrdev, "unable to map shared descriptor\n");
1615                 return -ENOMEM;
1616         }
1617 #ifdef DEBUG
1618         print_hex_dump(KERN_ERR,
1619                        "ablkcipher enc shdesc@"__stringify(__LINE__)": ",
1620                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1621                        desc_bytes(desc), 1);
1622 #endif
1623         /* ablkcipher_decrypt shared descriptor */
1624         desc = ctx->sh_desc_dec;
1625
1626         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1627         /* Skip if already shared */
1628         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1629                                    JUMP_COND_SHRD);
1630
1631         /* Load class1 key only */
1632         append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1633                           ctx->enckeylen, CLASS_1 |
1634                           KEY_DEST_CLASS_REG);
1635
1636         /* Load nonce into CONTEXT1 reg */
1637         if (is_rfc3686) {
1638                 nonce = (u8 *)key + keylen;
1639                 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1640                                    LDST_CLASS_IND_CCB |
1641                                    LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1642                 append_move(desc, MOVE_WAITCOMP |
1643                             MOVE_SRC_OUTFIFO |
1644                             MOVE_DEST_CLASS1CTX |
1645                             (16 << MOVE_OFFSET_SHIFT) |
1646                             (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1647         }
1648
1649         set_jump_tgt_here(desc, key_jump_cmd);
1650
1651         /* load IV */
1652         append_seq_load(desc, crt->ivsize, LDST_SRCDST_BYTE_CONTEXT |
1653                         LDST_CLASS_1_CCB | (ctx1_iv_off << LDST_OFFSET_SHIFT));
1654
1655         /* Load counter into CONTEXT1 reg */
1656         if (is_rfc3686)
1657                 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1658                                      LDST_SRCDST_BYTE_CONTEXT |
1659                                      ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1660                                       LDST_OFFSET_SHIFT));
1661
1662         /* Choose operation */
1663         if (ctr_mode)
1664                 append_operation(desc, ctx->class1_alg_type |
1665                                  OP_ALG_AS_INITFINAL | OP_ALG_DECRYPT);
1666         else
1667                 append_dec_op1(desc, ctx->class1_alg_type);
1668
1669         /* Perform operation */
1670         ablkcipher_append_src_dst(desc);
1671
1672         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
1673                                               desc_bytes(desc),
1674                                               DMA_TO_DEVICE);
1675         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1676                 dev_err(jrdev, "unable to map shared descriptor\n");
1677                 return -ENOMEM;
1678         }
1679
1680 #ifdef DEBUG
1681         print_hex_dump(KERN_ERR,
1682                        "ablkcipher dec shdesc@"__stringify(__LINE__)": ",
1683                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1684                        desc_bytes(desc), 1);
1685 #endif
1686         /* ablkcipher_givencrypt shared descriptor */
1687         desc = ctx->sh_desc_givenc;
1688
1689         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1690         /* Skip if already shared */
1691         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1692                                    JUMP_COND_SHRD);
1693
1694         /* Load class1 key only */
1695         append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1696                           ctx->enckeylen, CLASS_1 |
1697                           KEY_DEST_CLASS_REG);
1698
1699         /* Load Nonce into CONTEXT1 reg */
1700         if (is_rfc3686) {
1701                 nonce = (u8 *)key + keylen;
1702                 append_load_as_imm(desc, nonce, CTR_RFC3686_NONCE_SIZE,
1703                                    LDST_CLASS_IND_CCB |
1704                                    LDST_SRCDST_BYTE_OUTFIFO | LDST_IMM);
1705                 append_move(desc, MOVE_WAITCOMP |
1706                             MOVE_SRC_OUTFIFO |
1707                             MOVE_DEST_CLASS1CTX |
1708                             (16 << MOVE_OFFSET_SHIFT) |
1709                             (CTR_RFC3686_NONCE_SIZE << MOVE_LEN_SHIFT));
1710         }
1711         set_jump_tgt_here(desc, key_jump_cmd);
1712
1713         /* Generate IV */
1714         geniv = NFIFOENTRY_STYPE_PAD | NFIFOENTRY_DEST_DECO |
1715                 NFIFOENTRY_DTYPE_MSG | NFIFOENTRY_LC1 |
1716                 NFIFOENTRY_PTYPE_RND | (crt->ivsize << NFIFOENTRY_DLEN_SHIFT);
1717         append_load_imm_u32(desc, geniv, LDST_CLASS_IND_CCB |
1718                             LDST_SRCDST_WORD_INFO_FIFO | LDST_IMM);
1719         append_cmd(desc, CMD_LOAD | DISABLE_AUTO_INFO_FIFO);
1720         append_move(desc, MOVE_WAITCOMP |
1721                     MOVE_SRC_INFIFO |
1722                     MOVE_DEST_CLASS1CTX |
1723                     (crt->ivsize << MOVE_LEN_SHIFT) |
1724                     (ctx1_iv_off << MOVE_OFFSET_SHIFT));
1725         append_cmd(desc, CMD_LOAD | ENABLE_AUTO_INFO_FIFO);
1726
1727         /* Copy generated IV to memory */
1728         append_seq_store(desc, crt->ivsize,
1729                          LDST_SRCDST_BYTE_CONTEXT | LDST_CLASS_1_CCB |
1730                          (ctx1_iv_off << LDST_OFFSET_SHIFT));
1731
1732         /* Load Counter into CONTEXT1 reg */
1733         if (is_rfc3686)
1734                 append_load_imm_be32(desc, 1, LDST_IMM | LDST_CLASS_1_CCB |
1735                                      LDST_SRCDST_BYTE_CONTEXT |
1736                                      ((ctx1_iv_off + CTR_RFC3686_IV_SIZE) <<
1737                                       LDST_OFFSET_SHIFT));
1738
1739         if (ctx1_iv_off)
1740                 append_jump(desc, JUMP_JSL | JUMP_TEST_ALL | JUMP_COND_NCP |
1741                             (1 << JUMP_OFFSET_SHIFT));
1742
1743         /* Load operation */
1744         append_operation(desc, ctx->class1_alg_type |
1745                          OP_ALG_AS_INITFINAL | OP_ALG_ENCRYPT);
1746
1747         /* Perform operation */
1748         ablkcipher_append_src_dst(desc);
1749
1750         ctx->sh_desc_givenc_dma = dma_map_single(jrdev, desc,
1751                                                  desc_bytes(desc),
1752                                                  DMA_TO_DEVICE);
1753         if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) {
1754                 dev_err(jrdev, "unable to map shared descriptor\n");
1755                 return -ENOMEM;
1756         }
1757 #ifdef DEBUG
1758         print_hex_dump(KERN_ERR,
1759                        "ablkcipher givenc shdesc@" __stringify(__LINE__) ": ",
1760                        DUMP_PREFIX_ADDRESS, 16, 4, desc,
1761                        desc_bytes(desc), 1);
1762 #endif
1763
1764         return ret;
1765 }
1766
1767 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
1768                                  const u8 *key, unsigned int keylen)
1769 {
1770         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1771         struct device *jrdev = ctx->jrdev;
1772         u32 *key_jump_cmd, *desc;
1773         __be64 sector_size = cpu_to_be64(512);
1774
1775         if (keylen != 2 * AES_MIN_KEY_SIZE  && keylen != 2 * AES_MAX_KEY_SIZE) {
1776                 crypto_ablkcipher_set_flags(ablkcipher,
1777                                             CRYPTO_TFM_RES_BAD_KEY_LEN);
1778                 dev_err(jrdev, "key size mismatch\n");
1779                 return -EINVAL;
1780         }
1781
1782         memcpy(ctx->key, key, keylen);
1783         ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, DMA_TO_DEVICE);
1784         if (dma_mapping_error(jrdev, ctx->key_dma)) {
1785                 dev_err(jrdev, "unable to map key i/o memory\n");
1786                 return -ENOMEM;
1787         }
1788         ctx->enckeylen = keylen;
1789
1790         /* xts_ablkcipher_encrypt shared descriptor */
1791         desc = ctx->sh_desc_enc;
1792         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1793         /* Skip if already shared */
1794         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1795                                    JUMP_COND_SHRD);
1796
1797         /* Load class1 keys only */
1798         append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1799                           ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1800
1801         /* Load sector size with index 40 bytes (0x28) */
1802         append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1803                    LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1804         append_data(desc, (void *)&sector_size, 8);
1805
1806         set_jump_tgt_here(desc, key_jump_cmd);
1807
1808         /*
1809          * create sequence for loading the sector index
1810          * Upper 8B of IV - will be used as sector index
1811          * Lower 8B of IV - will be discarded
1812          */
1813         append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1814                    LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1815         append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1816
1817         /* Load operation */
1818         append_operation(desc, ctx->class1_alg_type | OP_ALG_AS_INITFINAL |
1819                          OP_ALG_ENCRYPT);
1820
1821         /* Perform operation */
1822         ablkcipher_append_src_dst(desc);
1823
1824         ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1825                                               DMA_TO_DEVICE);
1826         if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
1827                 dev_err(jrdev, "unable to map shared descriptor\n");
1828                 return -ENOMEM;
1829         }
1830 #ifdef DEBUG
1831         print_hex_dump(KERN_ERR,
1832                        "xts ablkcipher enc shdesc@" __stringify(__LINE__) ": ",
1833                        DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1834 #endif
1835
1836         /* xts_ablkcipher_decrypt shared descriptor */
1837         desc = ctx->sh_desc_dec;
1838
1839         init_sh_desc(desc, HDR_SHARE_SERIAL | HDR_SAVECTX);
1840         /* Skip if already shared */
1841         key_jump_cmd = append_jump(desc, JUMP_JSL | JUMP_TEST_ALL |
1842                                    JUMP_COND_SHRD);
1843
1844         /* Load class1 key only */
1845         append_key_as_imm(desc, (void *)ctx->key, ctx->enckeylen,
1846                           ctx->enckeylen, CLASS_1 | KEY_DEST_CLASS_REG);
1847
1848         /* Load sector size with index 40 bytes (0x28) */
1849         append_cmd(desc, CMD_LOAD | IMMEDIATE | LDST_SRCDST_BYTE_CONTEXT |
1850                    LDST_CLASS_1_CCB | (0x28 << LDST_OFFSET_SHIFT) | 8);
1851         append_data(desc, (void *)&sector_size, 8);
1852
1853         set_jump_tgt_here(desc, key_jump_cmd);
1854
1855         /*
1856          * create sequence for loading the sector index
1857          * Upper 8B of IV - will be used as sector index
1858          * Lower 8B of IV - will be discarded
1859          */
1860         append_cmd(desc, CMD_SEQ_LOAD | LDST_SRCDST_BYTE_CONTEXT |
1861                    LDST_CLASS_1_CCB | (0x20 << LDST_OFFSET_SHIFT) | 8);
1862         append_seq_fifo_load(desc, 8, FIFOLD_CLASS_SKIP);
1863
1864         /* Load operation */
1865         append_dec_op1(desc, ctx->class1_alg_type);
1866
1867         /* Perform operation */
1868         ablkcipher_append_src_dst(desc);
1869
1870         ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
1871                                               DMA_TO_DEVICE);
1872         if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
1873                 dma_unmap_single(jrdev, ctx->sh_desc_enc_dma,
1874                                  desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
1875                 dev_err(jrdev, "unable to map shared descriptor\n");
1876                 return -ENOMEM;
1877         }
1878 #ifdef DEBUG
1879         print_hex_dump(KERN_ERR,
1880                        "xts ablkcipher dec shdesc@" __stringify(__LINE__) ": ",
1881                        DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc), 1);
1882 #endif
1883
1884         return 0;
1885 }
1886
1887 /*
1888  * aead_edesc - s/w-extended aead descriptor
1889  * @assoc_nents: number of segments in associated data (SPI+Seq) scatterlist
1890  * @src_nents: number of segments in input scatterlist
1891  * @dst_nents: number of segments in output scatterlist
1892  * @iv_dma: dma address of iv for checking continuity and link table
1893  * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1894  * @sec4_sg_bytes: length of dma mapped sec4_sg space
1895  * @sec4_sg_dma: bus physical mapped address of h/w link table
1896  * @hw_desc: the h/w job descriptor followed by any referenced link tables
1897  */
1898 struct aead_edesc {
1899         int assoc_nents;
1900         int src_nents;
1901         int dst_nents;
1902         dma_addr_t iv_dma;
1903         int sec4_sg_bytes;
1904         dma_addr_t sec4_sg_dma;
1905         struct sec4_sg_entry *sec4_sg;
1906         u32 hw_desc[];
1907 };
1908
1909 /*
1910  * ablkcipher_edesc - s/w-extended ablkcipher descriptor
1911  * @src_nents: number of segments in input scatterlist
1912  * @dst_nents: number of segments in output scatterlist
1913  * @iv_dma: dma address of iv for checking continuity and link table
1914  * @desc: h/w descriptor (variable length; must not exceed MAX_CAAM_DESCSIZE)
1915  * @sec4_sg_bytes: length of dma mapped sec4_sg space
1916  * @sec4_sg_dma: bus physical mapped address of h/w link table
1917  * @hw_desc: the h/w job descriptor followed by any referenced link tables
1918  */
1919 struct ablkcipher_edesc {
1920         int src_nents;
1921         int dst_nents;
1922         dma_addr_t iv_dma;
1923         int sec4_sg_bytes;
1924         dma_addr_t sec4_sg_dma;
1925         struct sec4_sg_entry *sec4_sg;
1926         u32 hw_desc[0];
1927 };
1928
1929 static void caam_unmap(struct device *dev, struct scatterlist *src,
1930                        struct scatterlist *dst, int src_nents,
1931                        int dst_nents,
1932                        dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
1933                        int sec4_sg_bytes)
1934 {
1935         if (dst != src) {
1936                 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_TO_DEVICE);
1937                 dma_unmap_sg(dev, dst, dst_nents ? : 1, DMA_FROM_DEVICE);
1938         } else {
1939                 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_BIDIRECTIONAL);
1940         }
1941
1942         if (iv_dma)
1943                 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
1944         if (sec4_sg_bytes)
1945                 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
1946                                  DMA_TO_DEVICE);
1947 }
1948
1949 static void aead_unmap(struct device *dev,
1950                        struct aead_edesc *edesc,
1951                        struct aead_request *req)
1952 {
1953         caam_unmap(dev, req->src, req->dst,
1954                    edesc->src_nents, edesc->dst_nents, 0, 0,
1955                    edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1956 }
1957
1958 static void ablkcipher_unmap(struct device *dev,
1959                              struct ablkcipher_edesc *edesc,
1960                              struct ablkcipher_request *req)
1961 {
1962         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1963         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1964
1965         caam_unmap(dev, req->src, req->dst,
1966                    edesc->src_nents, edesc->dst_nents,
1967                    edesc->iv_dma, ivsize,
1968                    edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
1969 }
1970
1971 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1972                                    void *context)
1973 {
1974         struct aead_request *req = context;
1975         struct aead_edesc *edesc;
1976
1977 #ifdef DEBUG
1978         dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1979 #endif
1980
1981         edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1982
1983         if (err)
1984                 caam_jr_strstatus(jrdev, err);
1985
1986         aead_unmap(jrdev, edesc, req);
1987
1988         kfree(edesc);
1989
1990         aead_request_complete(req, err);
1991 }
1992
1993 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1994                                    void *context)
1995 {
1996         struct aead_request *req = context;
1997         struct aead_edesc *edesc;
1998
1999 #ifdef DEBUG
2000         dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
2001 #endif
2002
2003         edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
2004
2005         if (err)
2006                 caam_jr_strstatus(jrdev, err);
2007
2008         aead_unmap(jrdev, edesc, req);
2009
2010         /*
2011          * verify hw auth check passed else return -EBADMSG
2012          */
2013         if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
2014                 err = -EBADMSG;
2015
2016         kfree(edesc);
2017
2018         aead_request_complete(req, err);
2019 }
2020
2021 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
2022                                    void *context)
2023 {
2024         struct ablkcipher_request *req = context;
2025         struct ablkcipher_edesc *edesc;
2026         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2027         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2028         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2029
2030 #ifdef DEBUG
2031         dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
2032 #endif
2033
2034         edesc = (struct ablkcipher_edesc *)((char *)desc -
2035                  offsetof(struct ablkcipher_edesc, hw_desc));
2036
2037         if (err)
2038                 caam_jr_strstatus(jrdev, err);
2039
2040 #ifdef DEBUG
2041         print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
2042                        DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2043                        edesc->src_nents > 1 ? 100 : ivsize, 1);
2044         dbg_dump_sg(KERN_ERR, "dst    @"__stringify(__LINE__)": ",
2045                     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
2046                     edesc->dst_nents > 1 ? 100 : req->nbytes, 1, true);
2047 #endif
2048
2049         ablkcipher_unmap(jrdev, edesc, req);
2050
2051         /*
2052          * The crypto API expects us to set the IV (req->info) to the last
2053          * ciphertext block when running in CBC mode.
2054          */
2055         if ((ctx->class1_alg_type & OP_ALG_AAI_MASK) == OP_ALG_AAI_CBC)
2056                 scatterwalk_map_and_copy(req->info, req->dst, req->nbytes -
2057                                          ivsize, ivsize, 0);
2058
2059         kfree(edesc);
2060
2061         ablkcipher_request_complete(req, err);
2062 }
2063
2064 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
2065                                     void *context)
2066 {
2067         struct ablkcipher_request *req = context;
2068         struct ablkcipher_edesc *edesc;
2069         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2070         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2071         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2072
2073 #ifdef DEBUG
2074         dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
2075 #endif
2076
2077         edesc = (struct ablkcipher_edesc *)((char *)desc -
2078                  offsetof(struct ablkcipher_edesc, hw_desc));
2079         if (err)
2080                 caam_jr_strstatus(jrdev, err);
2081
2082 #ifdef DEBUG
2083         print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
2084                        DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2085                        ivsize, 1);
2086         dbg_dump_sg(KERN_ERR, "dst    @"__stringify(__LINE__)": ",
2087                     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
2088                     edesc->dst_nents > 1 ? 100 : req->nbytes, 1, true);
2089 #endif
2090
2091         ablkcipher_unmap(jrdev, edesc, req);
2092
2093         /*
2094          * The crypto API expects us to set the IV (req->info) to the last
2095          * ciphertext block when running in CBC mode.
2096          */
2097         if ((ctx->class1_alg_type & OP_ALG_AAI_MASK) == OP_ALG_AAI_CBC)
2098                 scatterwalk_map_and_copy(req->info, req->src, req->nbytes -
2099                                          ivsize, ivsize, 0);
2100
2101         kfree(edesc);
2102
2103         ablkcipher_request_complete(req, err);
2104 }
2105
2106 /*
2107  * Fill in aead job descriptor
2108  */
2109 static void init_aead_job(struct aead_request *req,
2110                           struct aead_edesc *edesc,
2111                           bool all_contig, bool encrypt)
2112 {
2113         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2114         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2115         int authsize = ctx->authsize;
2116         u32 *desc = edesc->hw_desc;
2117         u32 out_options, in_options;
2118         dma_addr_t dst_dma, src_dma;
2119         int len, sec4_sg_index = 0;
2120         dma_addr_t ptr;
2121         u32 *sh_desc;
2122
2123         sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
2124         ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
2125
2126         len = desc_len(sh_desc);
2127         init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2128
2129         if (all_contig) {
2130                 src_dma = sg_dma_address(req->src);
2131                 in_options = 0;
2132         } else {
2133                 src_dma = edesc->sec4_sg_dma;
2134                 sec4_sg_index += edesc->src_nents;
2135                 in_options = LDST_SGF;
2136         }
2137
2138         append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
2139                           in_options);
2140
2141         dst_dma = src_dma;
2142         out_options = in_options;
2143
2144         if (unlikely(req->src != req->dst)) {
2145                 if (!edesc->dst_nents) {
2146                         dst_dma = sg_dma_address(req->dst);
2147                         out_options = 0;
2148                 } else {
2149                         dst_dma = edesc->sec4_sg_dma +
2150                                   sec4_sg_index *
2151                                   sizeof(struct sec4_sg_entry);
2152                         out_options = LDST_SGF;
2153                 }
2154         }
2155
2156         if (encrypt)
2157                 append_seq_out_ptr(desc, dst_dma,
2158                                    req->assoclen + req->cryptlen + authsize,
2159                                    out_options);
2160         else
2161                 append_seq_out_ptr(desc, dst_dma,
2162                                    req->assoclen + req->cryptlen - authsize,
2163                                    out_options);
2164
2165         /* REG3 = assoclen */
2166         append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
2167 }
2168
2169 static void init_gcm_job(struct aead_request *req,
2170                          struct aead_edesc *edesc,
2171                          bool all_contig, bool encrypt)
2172 {
2173         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2174         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2175         unsigned int ivsize = crypto_aead_ivsize(aead);
2176         u32 *desc = edesc->hw_desc;
2177         bool generic_gcm = (ivsize == 12);
2178         unsigned int last;
2179
2180         init_aead_job(req, edesc, all_contig, encrypt);
2181
2182         /* BUG This should not be specific to generic GCM. */
2183         last = 0;
2184         if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
2185                 last = FIFOLD_TYPE_LAST1;
2186
2187         /* Read GCM IV */
2188         append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
2189                          FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
2190         /* Append Salt */
2191         if (!generic_gcm)
2192                 append_data(desc, ctx->key + ctx->enckeylen, 4);
2193         /* Append IV */
2194         append_data(desc, req->iv, ivsize);
2195         /* End of blank commands */
2196 }
2197
2198 static void init_authenc_job(struct aead_request *req,
2199                              struct aead_edesc *edesc,
2200                              bool all_contig, bool encrypt)
2201 {
2202         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2203         struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
2204                                                  struct caam_aead_alg, aead);
2205         unsigned int ivsize = crypto_aead_ivsize(aead);
2206         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2207         const bool ctr_mode = ((ctx->class1_alg_type & OP_ALG_AAI_MASK) ==
2208                                OP_ALG_AAI_CTR_MOD128);
2209         const bool is_rfc3686 = alg->caam.rfc3686;
2210         u32 *desc = edesc->hw_desc;
2211         u32 ivoffset = 0;
2212
2213         /*
2214          * AES-CTR needs to load IV in CONTEXT1 reg
2215          * at an offset of 128bits (16bytes)
2216          * CONTEXT1[255:128] = IV
2217          */
2218         if (ctr_mode)
2219                 ivoffset = 16;
2220
2221         /*
2222          * RFC3686 specific:
2223          *      CONTEXT1[255:128] = {NONCE, IV, COUNTER}
2224          */
2225         if (is_rfc3686)
2226                 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
2227
2228         init_aead_job(req, edesc, all_contig, encrypt);
2229
2230         if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
2231                 append_load_as_imm(desc, req->iv, ivsize,
2232                                    LDST_CLASS_1_CCB |
2233                                    LDST_SRCDST_BYTE_CONTEXT |
2234                                    (ivoffset << LDST_OFFSET_SHIFT));
2235 }
2236
2237 /*
2238  * Fill in ablkcipher job descriptor
2239  */
2240 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
2241                                 struct ablkcipher_edesc *edesc,
2242                                 struct ablkcipher_request *req,
2243                                 bool iv_contig)
2244 {
2245         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2246         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2247         u32 *desc = edesc->hw_desc;
2248         u32 out_options = 0, in_options;
2249         dma_addr_t dst_dma, src_dma;
2250         int len, sec4_sg_index = 0;
2251
2252 #ifdef DEBUG
2253         bool may_sleep = ((req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2254                                               CRYPTO_TFM_REQ_MAY_SLEEP)) != 0);
2255         print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
2256                        DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2257                        ivsize, 1);
2258         printk(KERN_ERR "asked=%d, nbytes%d\n", (int)edesc->src_nents ? 100 : req->nbytes, req->nbytes);
2259         dbg_dump_sg(KERN_ERR, "src    @"__stringify(__LINE__)": ",
2260                     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
2261                     edesc->src_nents ? 100 : req->nbytes, 1, may_sleep);
2262 #endif
2263
2264         len = desc_len(sh_desc);
2265         init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2266
2267         if (iv_contig) {
2268                 src_dma = edesc->iv_dma;
2269                 in_options = 0;
2270         } else {
2271                 src_dma = edesc->sec4_sg_dma;
2272                 sec4_sg_index += edesc->src_nents + 1;
2273                 in_options = LDST_SGF;
2274         }
2275         append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
2276
2277         if (likely(req->src == req->dst)) {
2278                 if (!edesc->src_nents && iv_contig) {
2279                         dst_dma = sg_dma_address(req->src);
2280                 } else {
2281                         dst_dma = edesc->sec4_sg_dma +
2282                                 sizeof(struct sec4_sg_entry);
2283                         out_options = LDST_SGF;
2284                 }
2285         } else {
2286                 if (!edesc->dst_nents) {
2287                         dst_dma = sg_dma_address(req->dst);
2288                 } else {
2289                         dst_dma = edesc->sec4_sg_dma +
2290                                 sec4_sg_index * sizeof(struct sec4_sg_entry);
2291                         out_options = LDST_SGF;
2292                 }
2293         }
2294         append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
2295 }
2296
2297 /*
2298  * Fill in ablkcipher givencrypt job descriptor
2299  */
2300 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
2301                                     struct ablkcipher_edesc *edesc,
2302                                     struct ablkcipher_request *req,
2303                                     bool iv_contig)
2304 {
2305         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2306         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2307         u32 *desc = edesc->hw_desc;
2308         u32 out_options, in_options;
2309         dma_addr_t dst_dma, src_dma;
2310         int len, sec4_sg_index = 0;
2311
2312 #ifdef DEBUG
2313         bool may_sleep = ((req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2314                                               CRYPTO_TFM_REQ_MAY_SLEEP)) != 0);
2315         print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
2316                        DUMP_PREFIX_ADDRESS, 16, 4, req->info,
2317                        ivsize, 1);
2318         dbg_dump_sg(KERN_ERR, "src    @" __stringify(__LINE__) ": ",
2319                     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
2320                     edesc->src_nents ? 100 : req->nbytes, 1, may_sleep);
2321 #endif
2322
2323         len = desc_len(sh_desc);
2324         init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
2325
2326         if (!edesc->src_nents) {
2327                 src_dma = sg_dma_address(req->src);
2328                 in_options = 0;
2329         } else {
2330                 src_dma = edesc->sec4_sg_dma;
2331                 sec4_sg_index += edesc->src_nents;
2332                 in_options = LDST_SGF;
2333         }
2334         append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
2335
2336         if (iv_contig) {
2337                 dst_dma = edesc->iv_dma;
2338                 out_options = 0;
2339         } else {
2340                 dst_dma = edesc->sec4_sg_dma +
2341                           sec4_sg_index * sizeof(struct sec4_sg_entry);
2342                 out_options = LDST_SGF;
2343         }
2344         append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
2345 }
2346
2347 /*
2348  * allocate and map the aead extended descriptor
2349  */
2350 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
2351                                            int desc_bytes, bool *all_contig_ptr,
2352                                            bool encrypt)
2353 {
2354         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2355         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2356         struct device *jrdev = ctx->jrdev;
2357         gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2358                        CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
2359         int src_nents, dst_nents = 0;
2360         struct aead_edesc *edesc;
2361         int sgc;
2362         bool all_contig = true;
2363         int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes;
2364         unsigned int authsize = ctx->authsize;
2365
2366         if (unlikely(req->dst != req->src)) {
2367                 src_nents = sg_count(req->src, req->assoclen + req->cryptlen);
2368                 dst_nents = sg_count(req->dst,
2369                                      req->assoclen + req->cryptlen +
2370                                         (encrypt ? authsize : (-authsize)));
2371         } else {
2372                 src_nents = sg_count(req->src,
2373                                      req->assoclen + req->cryptlen +
2374                                         (encrypt ? authsize : 0));
2375         }
2376
2377         /* Check if data are contiguous. */
2378         all_contig = !src_nents;
2379         if (!all_contig) {
2380                 src_nents = src_nents ? : 1;
2381                 sec4_sg_len = src_nents;
2382         }
2383
2384         sec4_sg_len += dst_nents;
2385
2386         sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
2387
2388         /* allocate space for base edesc and hw desc commands, link tables */
2389         edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2390                         GFP_DMA | flags);
2391         if (!edesc) {
2392                 dev_err(jrdev, "could not allocate extended descriptor\n");
2393                 return ERR_PTR(-ENOMEM);
2394         }
2395
2396         if (likely(req->src == req->dst)) {
2397                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2398                                  DMA_BIDIRECTIONAL);
2399                 if (unlikely(!sgc)) {
2400                         dev_err(jrdev, "unable to map source\n");
2401                         kfree(edesc);
2402                         return ERR_PTR(-ENOMEM);
2403                 }
2404         } else {
2405                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2406                                  DMA_TO_DEVICE);
2407                 if (unlikely(!sgc)) {
2408                         dev_err(jrdev, "unable to map source\n");
2409                         kfree(edesc);
2410                         return ERR_PTR(-ENOMEM);
2411                 }
2412
2413                 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2414                                  DMA_FROM_DEVICE);
2415                 if (unlikely(!sgc)) {
2416                         dev_err(jrdev, "unable to map destination\n");
2417                         dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
2418                                      DMA_TO_DEVICE);
2419                         kfree(edesc);
2420                         return ERR_PTR(-ENOMEM);
2421                 }
2422         }
2423
2424         edesc->src_nents = src_nents;
2425         edesc->dst_nents = dst_nents;
2426         edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
2427                          desc_bytes;
2428         *all_contig_ptr = all_contig;
2429
2430         sec4_sg_index = 0;
2431         if (!all_contig) {
2432                 sg_to_sec4_sg_last(req->src, src_nents,
2433                               edesc->sec4_sg + sec4_sg_index, 0);
2434                 sec4_sg_index += src_nents;
2435         }
2436         if (dst_nents) {
2437                 sg_to_sec4_sg_last(req->dst, dst_nents,
2438                                    edesc->sec4_sg + sec4_sg_index, 0);
2439         }
2440
2441         if (!sec4_sg_bytes)
2442                 return edesc;
2443
2444         edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2445                                             sec4_sg_bytes, DMA_TO_DEVICE);
2446         if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2447                 dev_err(jrdev, "unable to map S/G table\n");
2448                 aead_unmap(jrdev, edesc, req);
2449                 kfree(edesc);
2450                 return ERR_PTR(-ENOMEM);
2451         }
2452
2453         edesc->sec4_sg_bytes = sec4_sg_bytes;
2454
2455         return edesc;
2456 }
2457
2458 static int gcm_encrypt(struct aead_request *req)
2459 {
2460         struct aead_edesc *edesc;
2461         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2462         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2463         struct device *jrdev = ctx->jrdev;
2464         bool all_contig;
2465         u32 *desc;
2466         int ret = 0;
2467
2468         /* allocate extended descriptor */
2469         edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
2470         if (IS_ERR(edesc))
2471                 return PTR_ERR(edesc);
2472
2473         /* Create and submit job descriptor */
2474         init_gcm_job(req, edesc, all_contig, true);
2475 #ifdef DEBUG
2476         print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2477                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2478                        desc_bytes(edesc->hw_desc), 1);
2479 #endif
2480
2481         desc = edesc->hw_desc;
2482         ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2483         if (!ret) {
2484                 ret = -EINPROGRESS;
2485         } else {
2486                 aead_unmap(jrdev, edesc, req);
2487                 kfree(edesc);
2488         }
2489
2490         return ret;
2491 }
2492
2493 static int ipsec_gcm_encrypt(struct aead_request *req)
2494 {
2495         if (req->assoclen < 8)
2496                 return -EINVAL;
2497
2498         return gcm_encrypt(req);
2499 }
2500
2501 static int aead_encrypt(struct aead_request *req)
2502 {
2503         struct aead_edesc *edesc;
2504         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2505         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2506         struct device *jrdev = ctx->jrdev;
2507         bool all_contig;
2508         u32 *desc;
2509         int ret = 0;
2510
2511         /* allocate extended descriptor */
2512         edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2513                                  &all_contig, true);
2514         if (IS_ERR(edesc))
2515                 return PTR_ERR(edesc);
2516
2517         /* Create and submit job descriptor */
2518         init_authenc_job(req, edesc, all_contig, true);
2519 #ifdef DEBUG
2520         print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2521                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2522                        desc_bytes(edesc->hw_desc), 1);
2523 #endif
2524
2525         desc = edesc->hw_desc;
2526         ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
2527         if (!ret) {
2528                 ret = -EINPROGRESS;
2529         } else {
2530                 aead_unmap(jrdev, edesc, req);
2531                 kfree(edesc);
2532         }
2533
2534         return ret;
2535 }
2536
2537 static int gcm_decrypt(struct aead_request *req)
2538 {
2539         struct aead_edesc *edesc;
2540         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2541         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2542         struct device *jrdev = ctx->jrdev;
2543         bool all_contig;
2544         u32 *desc;
2545         int ret = 0;
2546
2547         /* allocate extended descriptor */
2548         edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
2549         if (IS_ERR(edesc))
2550                 return PTR_ERR(edesc);
2551
2552         /* Create and submit job descriptor*/
2553         init_gcm_job(req, edesc, all_contig, false);
2554 #ifdef DEBUG
2555         print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2556                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2557                        desc_bytes(edesc->hw_desc), 1);
2558 #endif
2559
2560         desc = edesc->hw_desc;
2561         ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2562         if (!ret) {
2563                 ret = -EINPROGRESS;
2564         } else {
2565                 aead_unmap(jrdev, edesc, req);
2566                 kfree(edesc);
2567         }
2568
2569         return ret;
2570 }
2571
2572 static int ipsec_gcm_decrypt(struct aead_request *req)
2573 {
2574         if (req->assoclen < 8)
2575                 return -EINVAL;
2576
2577         return gcm_decrypt(req);
2578 }
2579
2580 static int aead_decrypt(struct aead_request *req)
2581 {
2582         struct aead_edesc *edesc;
2583         struct crypto_aead *aead = crypto_aead_reqtfm(req);
2584         struct caam_ctx *ctx = crypto_aead_ctx(aead);
2585         struct device *jrdev = ctx->jrdev;
2586         bool all_contig;
2587         u32 *desc;
2588         int ret = 0;
2589
2590 #ifdef DEBUG
2591         bool may_sleep = ((req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2592                                               CRYPTO_TFM_REQ_MAY_SLEEP)) != 0);
2593         dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
2594                     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
2595                     req->assoclen + req->cryptlen, 1, may_sleep);
2596 #endif
2597
2598         /* allocate extended descriptor */
2599         edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
2600                                  &all_contig, false);
2601         if (IS_ERR(edesc))
2602                 return PTR_ERR(edesc);
2603
2604         /* Create and submit job descriptor*/
2605         init_authenc_job(req, edesc, all_contig, false);
2606 #ifdef DEBUG
2607         print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
2608                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2609                        desc_bytes(edesc->hw_desc), 1);
2610 #endif
2611
2612         desc = edesc->hw_desc;
2613         ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
2614         if (!ret) {
2615                 ret = -EINPROGRESS;
2616         } else {
2617                 aead_unmap(jrdev, edesc, req);
2618                 kfree(edesc);
2619         }
2620
2621         return ret;
2622 }
2623
2624 /*
2625  * allocate and map the ablkcipher extended descriptor for ablkcipher
2626  */
2627 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
2628                                                        *req, int desc_bytes,
2629                                                        bool *iv_contig_out)
2630 {
2631         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2632         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2633         struct device *jrdev = ctx->jrdev;
2634         gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
2635                        GFP_KERNEL : GFP_ATOMIC;
2636         int src_nents, dst_nents = 0, sec4_sg_bytes;
2637         struct ablkcipher_edesc *edesc;
2638         dma_addr_t iv_dma = 0;
2639         bool iv_contig = false;
2640         int sgc;
2641         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2642         int sec4_sg_index;
2643
2644         src_nents = sg_count(req->src, req->nbytes);
2645
2646         if (req->dst != req->src)
2647                 dst_nents = sg_count(req->dst, req->nbytes);
2648
2649         if (likely(req->src == req->dst)) {
2650                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2651                                  DMA_BIDIRECTIONAL);
2652         } else {
2653                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2654                                  DMA_TO_DEVICE);
2655                 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2656                                  DMA_FROM_DEVICE);
2657         }
2658
2659         iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
2660         if (dma_mapping_error(jrdev, iv_dma)) {
2661                 dev_err(jrdev, "unable to map IV\n");
2662                 return ERR_PTR(-ENOMEM);
2663         }
2664
2665         /*
2666          * Check if iv can be contiguous with source and destination.
2667          * If so, include it. If not, create scatterlist.
2668          */
2669         if (!src_nents && iv_dma + ivsize == sg_dma_address(req->src))
2670                 iv_contig = true;
2671         else
2672                 src_nents = src_nents ? : 1;
2673         sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2674                         sizeof(struct sec4_sg_entry);
2675
2676         /* allocate space for base edesc and hw desc commands, link tables */
2677         edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2678                         GFP_DMA | flags);
2679         if (!edesc) {
2680                 dev_err(jrdev, "could not allocate extended descriptor\n");
2681                 return ERR_PTR(-ENOMEM);
2682         }
2683
2684         edesc->src_nents = src_nents;
2685         edesc->dst_nents = dst_nents;
2686         edesc->sec4_sg_bytes = sec4_sg_bytes;
2687         edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2688                          desc_bytes;
2689
2690         sec4_sg_index = 0;
2691         if (!iv_contig) {
2692                 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
2693                 sg_to_sec4_sg_last(req->src, src_nents,
2694                                    edesc->sec4_sg + 1, 0);
2695                 sec4_sg_index += 1 + src_nents;
2696         }
2697
2698         if (dst_nents) {
2699                 sg_to_sec4_sg_last(req->dst, dst_nents,
2700                         edesc->sec4_sg + sec4_sg_index, 0);
2701         }
2702
2703         edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2704                                             sec4_sg_bytes, DMA_TO_DEVICE);
2705         if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2706                 dev_err(jrdev, "unable to map S/G table\n");
2707                 return ERR_PTR(-ENOMEM);
2708         }
2709
2710         edesc->iv_dma = iv_dma;
2711
2712 #ifdef DEBUG
2713         print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
2714                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2715                        sec4_sg_bytes, 1);
2716 #endif
2717
2718         *iv_contig_out = iv_contig;
2719         return edesc;
2720 }
2721
2722 static int ablkcipher_encrypt(struct ablkcipher_request *req)
2723 {
2724         struct ablkcipher_edesc *edesc;
2725         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2726         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2727         struct device *jrdev = ctx->jrdev;
2728         bool iv_contig;
2729         u32 *desc;
2730         int ret = 0;
2731
2732         /* allocate extended descriptor */
2733         edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2734                                        CAAM_CMD_SZ, &iv_contig);
2735         if (IS_ERR(edesc))
2736                 return PTR_ERR(edesc);
2737
2738         /* Create and submit job descriptor*/
2739         init_ablkcipher_job(ctx->sh_desc_enc,
2740                 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
2741 #ifdef DEBUG
2742         print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2743                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2744                        desc_bytes(edesc->hw_desc), 1);
2745 #endif
2746         desc = edesc->hw_desc;
2747         ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2748
2749         if (!ret) {
2750                 ret = -EINPROGRESS;
2751         } else {
2752                 ablkcipher_unmap(jrdev, edesc, req);
2753                 kfree(edesc);
2754         }
2755
2756         return ret;
2757 }
2758
2759 static int ablkcipher_decrypt(struct ablkcipher_request *req)
2760 {
2761         struct ablkcipher_edesc *edesc;
2762         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2763         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2764         struct device *jrdev = ctx->jrdev;
2765         bool iv_contig;
2766         u32 *desc;
2767         int ret = 0;
2768
2769         /* allocate extended descriptor */
2770         edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
2771                                        CAAM_CMD_SZ, &iv_contig);
2772         if (IS_ERR(edesc))
2773                 return PTR_ERR(edesc);
2774
2775         /* Create and submit job descriptor*/
2776         init_ablkcipher_job(ctx->sh_desc_dec,
2777                 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
2778         desc = edesc->hw_desc;
2779 #ifdef DEBUG
2780         print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
2781                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2782                        desc_bytes(edesc->hw_desc), 1);
2783 #endif
2784
2785         ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
2786         if (!ret) {
2787                 ret = -EINPROGRESS;
2788         } else {
2789                 ablkcipher_unmap(jrdev, edesc, req);
2790                 kfree(edesc);
2791         }
2792
2793         return ret;
2794 }
2795
2796 /*
2797  * allocate and map the ablkcipher extended descriptor
2798  * for ablkcipher givencrypt
2799  */
2800 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
2801                                 struct skcipher_givcrypt_request *greq,
2802                                 int desc_bytes,
2803                                 bool *iv_contig_out)
2804 {
2805         struct ablkcipher_request *req = &greq->creq;
2806         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2807         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2808         struct device *jrdev = ctx->jrdev;
2809         gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
2810                                           CRYPTO_TFM_REQ_MAY_SLEEP)) ?
2811                        GFP_KERNEL : GFP_ATOMIC;
2812         int src_nents, dst_nents = 0, sec4_sg_bytes;
2813         struct ablkcipher_edesc *edesc;
2814         dma_addr_t iv_dma = 0;
2815         bool iv_contig = false;
2816         int sgc;
2817         int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
2818         int sec4_sg_index;
2819
2820         src_nents = sg_count(req->src, req->nbytes);
2821
2822         if (unlikely(req->dst != req->src))
2823                 dst_nents = sg_count(req->dst, req->nbytes);
2824
2825         if (likely(req->src == req->dst)) {
2826                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2827                                  DMA_BIDIRECTIONAL);
2828         } else {
2829                 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
2830                                  DMA_TO_DEVICE);
2831                 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
2832                                  DMA_FROM_DEVICE);
2833         }
2834
2835         /*
2836          * Check if iv can be contiguous with source and destination.
2837          * If so, include it. If not, create scatterlist.
2838          */
2839         iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
2840         if (dma_mapping_error(jrdev, iv_dma)) {
2841                 dev_err(jrdev, "unable to map IV\n");
2842                 return ERR_PTR(-ENOMEM);
2843         }
2844
2845         if (!dst_nents && iv_dma + ivsize == sg_dma_address(req->dst))
2846                 iv_contig = true;
2847         else
2848                 dst_nents = dst_nents ? : 1;
2849         sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
2850                         sizeof(struct sec4_sg_entry);
2851
2852         /* allocate space for base edesc and hw desc commands, link tables */
2853         edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
2854                         GFP_DMA | flags);
2855         if (!edesc) {
2856                 dev_err(jrdev, "could not allocate extended descriptor\n");
2857                 return ERR_PTR(-ENOMEM);
2858         }
2859
2860         edesc->src_nents = src_nents;
2861         edesc->dst_nents = dst_nents;
2862         edesc->sec4_sg_bytes = sec4_sg_bytes;
2863         edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
2864                          desc_bytes;
2865
2866         sec4_sg_index = 0;
2867         if (src_nents) {
2868                 sg_to_sec4_sg_last(req->src, src_nents, edesc->sec4_sg, 0);
2869                 sec4_sg_index += src_nents;
2870         }
2871
2872         if (!iv_contig) {
2873                 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
2874                                    iv_dma, ivsize, 0);
2875                 sec4_sg_index += 1;
2876                 sg_to_sec4_sg_last(req->dst, dst_nents,
2877                                    edesc->sec4_sg + sec4_sg_index, 0);
2878         }
2879
2880         edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
2881                                             sec4_sg_bytes, DMA_TO_DEVICE);
2882         if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
2883                 dev_err(jrdev, "unable to map S/G table\n");
2884                 return ERR_PTR(-ENOMEM);
2885         }
2886         edesc->iv_dma = iv_dma;
2887
2888 #ifdef DEBUG
2889         print_hex_dump(KERN_ERR,
2890                        "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
2891                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
2892                        sec4_sg_bytes, 1);
2893 #endif
2894
2895         *iv_contig_out = iv_contig;
2896         return edesc;
2897 }
2898
2899 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
2900 {
2901         struct ablkcipher_request *req = &creq->creq;
2902         struct ablkcipher_edesc *edesc;
2903         struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
2904         struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
2905         struct device *jrdev = ctx->jrdev;
2906         bool iv_contig;
2907         u32 *desc;
2908         int ret = 0;
2909
2910         /* allocate extended descriptor */
2911         edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
2912                                        CAAM_CMD_SZ, &iv_contig);
2913         if (IS_ERR(edesc))
2914                 return PTR_ERR(edesc);
2915
2916         /* Create and submit job descriptor*/
2917         init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
2918                                 edesc, req, iv_contig);
2919 #ifdef DEBUG
2920         print_hex_dump(KERN_ERR,
2921                        "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
2922                        DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
2923                        desc_bytes(edesc->hw_desc), 1);
2924 #endif
2925         desc = edesc->hw_desc;
2926         ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
2927
2928         if (!ret) {
2929                 ret = -EINPROGRESS;
2930         } else {
2931                 ablkcipher_unmap(jrdev, edesc, req);
2932                 kfree(edesc);
2933         }
2934
2935         return ret;
2936 }
2937
2938 #define template_aead           template_u.aead
2939 #define template_ablkcipher     template_u.ablkcipher
2940 struct caam_alg_template {
2941         char name[CRYPTO_MAX_ALG_NAME];
2942         char driver_name[CRYPTO_MAX_ALG_NAME];
2943         unsigned int blocksize;
2944         u32 type;
2945         union {
2946                 struct ablkcipher_alg ablkcipher;
2947         } template_u;
2948         u32 class1_alg_type;
2949         u32 class2_alg_type;
2950         u32 alg_op;
2951 };
2952
2953 static struct caam_alg_template driver_algs[] = {
2954         /* ablkcipher descriptor */
2955         {
2956                 .name = "cbc(aes)",
2957                 .driver_name = "cbc-aes-caam",
2958                 .blocksize = AES_BLOCK_SIZE,
2959                 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2960                 .template_ablkcipher = {
2961                         .setkey = ablkcipher_setkey,
2962                         .encrypt = ablkcipher_encrypt,
2963                         .decrypt = ablkcipher_decrypt,
2964                         .givencrypt = ablkcipher_givencrypt,
2965                         .geniv = "<built-in>",
2966                         .min_keysize = AES_MIN_KEY_SIZE,
2967                         .max_keysize = AES_MAX_KEY_SIZE,
2968                         .ivsize = AES_BLOCK_SIZE,
2969                         },
2970                 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2971         },
2972         {
2973                 .name = "cbc(des3_ede)",
2974                 .driver_name = "cbc-3des-caam",
2975                 .blocksize = DES3_EDE_BLOCK_SIZE,
2976                 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2977                 .template_ablkcipher = {
2978                         .setkey = ablkcipher_setkey,
2979                         .encrypt = ablkcipher_encrypt,
2980                         .decrypt = ablkcipher_decrypt,
2981                         .givencrypt = ablkcipher_givencrypt,
2982                         .geniv = "<built-in>",
2983                         .min_keysize = DES3_EDE_KEY_SIZE,
2984                         .max_keysize = DES3_EDE_KEY_SIZE,
2985                         .ivsize = DES3_EDE_BLOCK_SIZE,
2986                         },
2987                 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2988         },
2989         {
2990                 .name = "cbc(des)",
2991                 .driver_name = "cbc-des-caam",
2992                 .blocksize = DES_BLOCK_SIZE,
2993                 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2994                 .template_ablkcipher = {
2995                         .setkey = ablkcipher_setkey,
2996                         .encrypt = ablkcipher_encrypt,
2997                         .decrypt = ablkcipher_decrypt,
2998                         .givencrypt = ablkcipher_givencrypt,
2999                         .geniv = "<built-in>",
3000                         .min_keysize = DES_KEY_SIZE,
3001                         .max_keysize = DES_KEY_SIZE,
3002                         .ivsize = DES_BLOCK_SIZE,
3003                         },
3004                 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3005         },
3006         {
3007                 .name = "ctr(aes)",
3008                 .driver_name = "ctr-aes-caam",
3009                 .blocksize = 1,
3010                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
3011                 .template_ablkcipher = {
3012                         .setkey = ablkcipher_setkey,
3013                         .encrypt = ablkcipher_encrypt,
3014                         .decrypt = ablkcipher_decrypt,
3015                         .geniv = "chainiv",
3016                         .min_keysize = AES_MIN_KEY_SIZE,
3017                         .max_keysize = AES_MAX_KEY_SIZE,
3018                         .ivsize = AES_BLOCK_SIZE,
3019                         },
3020                 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
3021         },
3022         {
3023                 .name = "rfc3686(ctr(aes))",
3024                 .driver_name = "rfc3686-ctr-aes-caam",
3025                 .blocksize = 1,
3026                 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
3027                 .template_ablkcipher = {
3028                         .setkey = ablkcipher_setkey,
3029                         .encrypt = ablkcipher_encrypt,
3030                         .decrypt = ablkcipher_decrypt,
3031                         .givencrypt = ablkcipher_givencrypt,
3032                         .geniv = "<built-in>",
3033                         .min_keysize = AES_MIN_KEY_SIZE +
3034                                        CTR_RFC3686_NONCE_SIZE,
3035                         .max_keysize = AES_MAX_KEY_SIZE +
3036                                        CTR_RFC3686_NONCE_SIZE,
3037                         .ivsize = CTR_RFC3686_IV_SIZE,
3038                         },
3039                 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
3040         },
3041         {
3042                 .name = "xts(aes)",
3043                 .driver_name = "xts-aes-caam",
3044                 .blocksize = AES_BLOCK_SIZE,
3045                 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
3046                 .template_ablkcipher = {
3047                         .setkey = xts_ablkcipher_setkey,
3048                         .encrypt = ablkcipher_encrypt,
3049                         .decrypt = ablkcipher_decrypt,
3050                         .geniv = "eseqiv",
3051                         .min_keysize = 2 * AES_MIN_KEY_SIZE,
3052                         .max_keysize = 2 * AES_MAX_KEY_SIZE,
3053                         .ivsize = AES_BLOCK_SIZE,
3054                         },
3055                 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
3056         },
3057 };
3058
3059 static struct caam_aead_alg driver_aeads[] = {
3060         {
3061                 .aead = {
3062                         .base = {
3063                                 .cra_name = "rfc4106(gcm(aes))",
3064                                 .cra_driver_name = "rfc4106-gcm-aes-caam",
3065                                 .cra_blocksize = 1,
3066                         },
3067                         .setkey = rfc4106_setkey,
3068                         .setauthsize = rfc4106_setauthsize,
3069                         .encrypt = ipsec_gcm_encrypt,
3070                         .decrypt = ipsec_gcm_decrypt,
3071                         .ivsize = 8,
3072                         .maxauthsize = AES_BLOCK_SIZE,
3073                 },
3074                 .caam = {
3075                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3076                 },
3077         },
3078         {
3079                 .aead = {
3080                         .base = {
3081                                 .cra_name = "rfc4543(gcm(aes))",
3082                                 .cra_driver_name = "rfc4543-gcm-aes-caam",
3083                                 .cra_blocksize = 1,
3084                         },
3085                         .setkey = rfc4543_setkey,
3086                         .setauthsize = rfc4543_setauthsize,
3087                         .encrypt = ipsec_gcm_encrypt,
3088                         .decrypt = ipsec_gcm_decrypt,
3089                         .ivsize = 8,
3090                         .maxauthsize = AES_BLOCK_SIZE,
3091                 },
3092                 .caam = {
3093                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3094                 },
3095         },
3096         /* Galois Counter Mode */
3097         {
3098                 .aead = {
3099                         .base = {
3100                                 .cra_name = "gcm(aes)",
3101                                 .cra_driver_name = "gcm-aes-caam",
3102                                 .cra_blocksize = 1,
3103                         },
3104                         .setkey = gcm_setkey,
3105                         .setauthsize = gcm_setauthsize,
3106                         .encrypt = gcm_encrypt,
3107                         .decrypt = gcm_decrypt,
3108                         .ivsize = 12,
3109                         .maxauthsize = AES_BLOCK_SIZE,
3110                 },
3111                 .caam = {
3112                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
3113                 },
3114         },
3115         /* single-pass ipsec_esp descriptor */
3116         {
3117                 .aead = {
3118                         .base = {
3119                                 .cra_name = "authenc(hmac(md5),"
3120                                             "ecb(cipher_null))",
3121                                 .cra_driver_name = "authenc-hmac-md5-"
3122                                                    "ecb-cipher_null-caam",
3123                                 .cra_blocksize = NULL_BLOCK_SIZE,
3124                         },
3125                         .setkey = aead_setkey,
3126                         .setauthsize = aead_setauthsize,
3127                         .encrypt = aead_encrypt,
3128                         .decrypt = aead_decrypt,
3129                         .ivsize = NULL_IV_SIZE,
3130                         .maxauthsize = MD5_DIGEST_SIZE,
3131                 },
3132                 .caam = {
3133                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3134                                            OP_ALG_AAI_HMAC_PRECOMP,
3135                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3136                 },
3137         },
3138         {
3139                 .aead = {
3140                         .base = {
3141                                 .cra_name = "authenc(hmac(sha1),"
3142                                             "ecb(cipher_null))",
3143                                 .cra_driver_name = "authenc-hmac-sha1-"
3144                                                    "ecb-cipher_null-caam",
3145                                 .cra_blocksize = NULL_BLOCK_SIZE,
3146                         },
3147                         .setkey = aead_setkey,
3148                         .setauthsize = aead_setauthsize,
3149                         .encrypt = aead_encrypt,
3150                         .decrypt = aead_decrypt,
3151                         .ivsize = NULL_IV_SIZE,
3152                         .maxauthsize = SHA1_DIGEST_SIZE,
3153                 },
3154                 .caam = {
3155                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3156                                            OP_ALG_AAI_HMAC_PRECOMP,
3157                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3158                 },
3159         },
3160         {
3161                 .aead = {
3162                         .base = {
3163                                 .cra_name = "authenc(hmac(sha224),"
3164                                             "ecb(cipher_null))",
3165                                 .cra_driver_name = "authenc-hmac-sha224-"
3166                                                    "ecb-cipher_null-caam",
3167                                 .cra_blocksize = NULL_BLOCK_SIZE,
3168                         },
3169                         .setkey = aead_setkey,
3170                         .setauthsize = aead_setauthsize,
3171                         .encrypt = aead_encrypt,
3172                         .decrypt = aead_decrypt,
3173                         .ivsize = NULL_IV_SIZE,
3174                         .maxauthsize = SHA224_DIGEST_SIZE,
3175                 },
3176                 .caam = {
3177                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3178                                            OP_ALG_AAI_HMAC_PRECOMP,
3179                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3180                 },
3181         },
3182         {
3183                 .aead = {
3184                         .base = {
3185                                 .cra_name = "authenc(hmac(sha256),"
3186                                             "ecb(cipher_null))",
3187                                 .cra_driver_name = "authenc-hmac-sha256-"
3188                                                    "ecb-cipher_null-caam",
3189                                 .cra_blocksize = NULL_BLOCK_SIZE,
3190                         },
3191                         .setkey = aead_setkey,
3192                         .setauthsize = aead_setauthsize,
3193                         .encrypt = aead_encrypt,
3194                         .decrypt = aead_decrypt,
3195                         .ivsize = NULL_IV_SIZE,
3196                         .maxauthsize = SHA256_DIGEST_SIZE,
3197                 },
3198                 .caam = {
3199                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3200                                            OP_ALG_AAI_HMAC_PRECOMP,
3201                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3202                 },
3203         },
3204         {
3205                 .aead = {
3206                         .base = {
3207                                 .cra_name = "authenc(hmac(sha384),"
3208                                             "ecb(cipher_null))",
3209                                 .cra_driver_name = "authenc-hmac-sha384-"
3210                                                    "ecb-cipher_null-caam",
3211                                 .cra_blocksize = NULL_BLOCK_SIZE,
3212                         },
3213                         .setkey = aead_setkey,
3214                         .setauthsize = aead_setauthsize,
3215                         .encrypt = aead_encrypt,
3216                         .decrypt = aead_decrypt,
3217                         .ivsize = NULL_IV_SIZE,
3218                         .maxauthsize = SHA384_DIGEST_SIZE,
3219                 },
3220                 .caam = {
3221                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3222                                            OP_ALG_AAI_HMAC_PRECOMP,
3223                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3224                 },
3225         },
3226         {
3227                 .aead = {
3228                         .base = {
3229                                 .cra_name = "authenc(hmac(sha512),"
3230                                             "ecb(cipher_null))",
3231                                 .cra_driver_name = "authenc-hmac-sha512-"
3232                                                    "ecb-cipher_null-caam",
3233                                 .cra_blocksize = NULL_BLOCK_SIZE,
3234                         },
3235                         .setkey = aead_setkey,
3236                         .setauthsize = aead_setauthsize,
3237                         .encrypt = aead_encrypt,
3238                         .decrypt = aead_decrypt,
3239                         .ivsize = NULL_IV_SIZE,
3240                         .maxauthsize = SHA512_DIGEST_SIZE,
3241                 },
3242                 .caam = {
3243                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3244                                            OP_ALG_AAI_HMAC_PRECOMP,
3245                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3246                 },
3247         },
3248         {
3249                 .aead = {
3250                         .base = {
3251                                 .cra_name = "authenc(hmac(md5),cbc(aes))",
3252                                 .cra_driver_name = "authenc-hmac-md5-"
3253                                                    "cbc-aes-caam",
3254                                 .cra_blocksize = AES_BLOCK_SIZE,
3255                         },
3256                         .setkey = aead_setkey,
3257                         .setauthsize = aead_setauthsize,
3258                         .encrypt = aead_encrypt,
3259                         .decrypt = aead_decrypt,
3260                         .ivsize = AES_BLOCK_SIZE,
3261                         .maxauthsize = MD5_DIGEST_SIZE,
3262                 },
3263                 .caam = {
3264                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3265                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3266                                            OP_ALG_AAI_HMAC_PRECOMP,
3267                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3268                 },
3269         },
3270         {
3271                 .aead = {
3272                         .base = {
3273                                 .cra_name = "echainiv(authenc(hmac(md5),"
3274                                             "cbc(aes)))",
3275                                 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3276                                                    "cbc-aes-caam",
3277                                 .cra_blocksize = AES_BLOCK_SIZE,
3278                         },
3279                         .setkey = aead_setkey,
3280                         .setauthsize = aead_setauthsize,
3281                         .encrypt = aead_encrypt,
3282                         .decrypt = aead_decrypt,
3283                         .ivsize = AES_BLOCK_SIZE,
3284                         .maxauthsize = MD5_DIGEST_SIZE,
3285                 },
3286                 .caam = {
3287                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3288                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3289                                            OP_ALG_AAI_HMAC_PRECOMP,
3290                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3291                         .geniv = true,
3292                 },
3293         },
3294         {
3295                 .aead = {
3296                         .base = {
3297                                 .cra_name = "authenc(hmac(sha1),cbc(aes))",
3298                                 .cra_driver_name = "authenc-hmac-sha1-"
3299                                                    "cbc-aes-caam",
3300                                 .cra_blocksize = AES_BLOCK_SIZE,
3301                         },
3302                         .setkey = aead_setkey,
3303                         .setauthsize = aead_setauthsize,
3304                         .encrypt = aead_encrypt,
3305                         .decrypt = aead_decrypt,
3306                         .ivsize = AES_BLOCK_SIZE,
3307                         .maxauthsize = SHA1_DIGEST_SIZE,
3308                 },
3309                 .caam = {
3310                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3311                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3312                                            OP_ALG_AAI_HMAC_PRECOMP,
3313                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3314                 },
3315         },
3316         {
3317                 .aead = {
3318                         .base = {
3319                                 .cra_name = "echainiv(authenc(hmac(sha1),"
3320                                             "cbc(aes)))",
3321                                 .cra_driver_name = "echainiv-authenc-"
3322                                                    "hmac-sha1-cbc-aes-caam",
3323                                 .cra_blocksize = AES_BLOCK_SIZE,
3324                         },
3325                         .setkey = aead_setkey,
3326                         .setauthsize = aead_setauthsize,
3327                         .encrypt = aead_encrypt,
3328                         .decrypt = aead_decrypt,
3329                         .ivsize = AES_BLOCK_SIZE,
3330                         .maxauthsize = SHA1_DIGEST_SIZE,
3331                 },
3332                 .caam = {
3333                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3334                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3335                                            OP_ALG_AAI_HMAC_PRECOMP,
3336                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3337                         .geniv = true,
3338                 },
3339         },
3340         {
3341                 .aead = {
3342                         .base = {
3343                                 .cra_name = "authenc(hmac(sha224),cbc(aes))",
3344                                 .cra_driver_name = "authenc-hmac-sha224-"
3345                                                    "cbc-aes-caam",
3346                                 .cra_blocksize = AES_BLOCK_SIZE,
3347                         },
3348                         .setkey = aead_setkey,
3349                         .setauthsize = aead_setauthsize,
3350                         .encrypt = aead_encrypt,
3351                         .decrypt = aead_decrypt,
3352                         .ivsize = AES_BLOCK_SIZE,
3353                         .maxauthsize = SHA224_DIGEST_SIZE,
3354                 },
3355                 .caam = {
3356                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3357                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3358                                            OP_ALG_AAI_HMAC_PRECOMP,
3359                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3360                 },
3361         },
3362         {
3363                 .aead = {
3364                         .base = {
3365                                 .cra_name = "echainiv(authenc(hmac(sha224),"
3366                                             "cbc(aes)))",
3367                                 .cra_driver_name = "echainiv-authenc-"
3368                                                    "hmac-sha224-cbc-aes-caam",
3369                                 .cra_blocksize = AES_BLOCK_SIZE,
3370                         },
3371                         .setkey = aead_setkey,
3372                         .setauthsize = aead_setauthsize,
3373                         .encrypt = aead_encrypt,
3374                         .decrypt = aead_decrypt,
3375                         .ivsize = AES_BLOCK_SIZE,
3376                         .maxauthsize = SHA224_DIGEST_SIZE,
3377                 },
3378                 .caam = {
3379                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3380                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3381                                            OP_ALG_AAI_HMAC_PRECOMP,
3382                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3383                         .geniv = true,
3384                 },
3385         },
3386         {
3387                 .aead = {
3388                         .base = {
3389                                 .cra_name = "authenc(hmac(sha256),cbc(aes))",
3390                                 .cra_driver_name = "authenc-hmac-sha256-"
3391                                                    "cbc-aes-caam",
3392                                 .cra_blocksize = AES_BLOCK_SIZE,
3393                         },
3394                         .setkey = aead_setkey,
3395                         .setauthsize = aead_setauthsize,
3396                         .encrypt = aead_encrypt,
3397                         .decrypt = aead_decrypt,
3398                         .ivsize = AES_BLOCK_SIZE,
3399                         .maxauthsize = SHA256_DIGEST_SIZE,
3400                 },
3401                 .caam = {
3402                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3403                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3404                                            OP_ALG_AAI_HMAC_PRECOMP,
3405                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3406                 },
3407         },
3408         {
3409                 .aead = {
3410                         .base = {
3411                                 .cra_name = "echainiv(authenc(hmac(sha256),"
3412                                             "cbc(aes)))",
3413                                 .cra_driver_name = "echainiv-authenc-"
3414                                                    "hmac-sha256-cbc-aes-caam",
3415                                 .cra_blocksize = AES_BLOCK_SIZE,
3416                         },
3417                         .setkey = aead_setkey,
3418                         .setauthsize = aead_setauthsize,
3419                         .encrypt = aead_encrypt,
3420                         .decrypt = aead_decrypt,
3421                         .ivsize = AES_BLOCK_SIZE,
3422                         .maxauthsize = SHA256_DIGEST_SIZE,
3423                 },
3424                 .caam = {
3425                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3426                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3427                                            OP_ALG_AAI_HMAC_PRECOMP,
3428                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3429                         .geniv = true,
3430                 },
3431         },
3432         {
3433                 .aead = {
3434                         .base = {
3435                                 .cra_name = "authenc(hmac(sha384),cbc(aes))",
3436                                 .cra_driver_name = "authenc-hmac-sha384-"
3437                                                    "cbc-aes-caam",
3438                                 .cra_blocksize = AES_BLOCK_SIZE,
3439                         },
3440                         .setkey = aead_setkey,
3441                         .setauthsize = aead_setauthsize,
3442                         .encrypt = aead_encrypt,
3443                         .decrypt = aead_decrypt,
3444                         .ivsize = AES_BLOCK_SIZE,
3445                         .maxauthsize = SHA384_DIGEST_SIZE,
3446                 },
3447                 .caam = {
3448                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3449                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3450                                            OP_ALG_AAI_HMAC_PRECOMP,
3451                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3452                 },
3453         },
3454         {
3455                 .aead = {
3456                         .base = {
3457                                 .cra_name = "echainiv(authenc(hmac(sha384),"
3458                                             "cbc(aes)))",
3459                                 .cra_driver_name = "echainiv-authenc-"
3460                                                    "hmac-sha384-cbc-aes-caam",
3461                                 .cra_blocksize = AES_BLOCK_SIZE,
3462                         },
3463                         .setkey = aead_setkey,
3464                         .setauthsize = aead_setauthsize,
3465                         .encrypt = aead_encrypt,
3466                         .decrypt = aead_decrypt,
3467                         .ivsize = AES_BLOCK_SIZE,
3468                         .maxauthsize = SHA384_DIGEST_SIZE,
3469                 },
3470                 .caam = {
3471                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3472                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3473                                            OP_ALG_AAI_HMAC_PRECOMP,
3474                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3475                         .geniv = true,
3476                 },
3477         },
3478         {
3479                 .aead = {
3480                         .base = {
3481                                 .cra_name = "authenc(hmac(sha512),cbc(aes))",
3482                                 .cra_driver_name = "authenc-hmac-sha512-"
3483                                                    "cbc-aes-caam",
3484                                 .cra_blocksize = AES_BLOCK_SIZE,
3485                         },
3486                         .setkey = aead_setkey,
3487                         .setauthsize = aead_setauthsize,
3488                         .encrypt = aead_encrypt,
3489                         .decrypt = aead_decrypt,
3490                         .ivsize = AES_BLOCK_SIZE,
3491                         .maxauthsize = SHA512_DIGEST_SIZE,
3492                 },
3493                 .caam = {
3494                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3495                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3496                                            OP_ALG_AAI_HMAC_PRECOMP,
3497                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3498                 },
3499         },
3500         {
3501                 .aead = {
3502                         .base = {
3503                                 .cra_name = "echainiv(authenc(hmac(sha512),"
3504                                             "cbc(aes)))",
3505                                 .cra_driver_name = "echainiv-authenc-"
3506                                                    "hmac-sha512-cbc-aes-caam",
3507                                 .cra_blocksize = AES_BLOCK_SIZE,
3508                         },
3509                         .setkey = aead_setkey,
3510                         .setauthsize = aead_setauthsize,
3511                         .encrypt = aead_encrypt,
3512                         .decrypt = aead_decrypt,
3513                         .ivsize = AES_BLOCK_SIZE,
3514                         .maxauthsize = SHA512_DIGEST_SIZE,
3515                 },
3516                 .caam = {
3517                         .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
3518                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3519                                            OP_ALG_AAI_HMAC_PRECOMP,
3520                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3521                         .geniv = true,
3522                 },
3523         },
3524         {
3525                 .aead = {
3526                         .base = {
3527                                 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
3528                                 .cra_driver_name = "authenc-hmac-md5-"
3529                                                    "cbc-des3_ede-caam",
3530                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3531                         },
3532                         .setkey = aead_setkey,
3533                         .setauthsize = aead_setauthsize,
3534                         .encrypt = aead_encrypt,
3535                         .decrypt = aead_decrypt,
3536                         .ivsize = DES3_EDE_BLOCK_SIZE,
3537                         .maxauthsize = MD5_DIGEST_SIZE,
3538                 },
3539                 .caam = {
3540                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3541                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3542                                            OP_ALG_AAI_HMAC_PRECOMP,
3543                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3544                 }
3545         },
3546         {
3547                 .aead = {
3548                         .base = {
3549                                 .cra_name = "echainiv(authenc(hmac(md5),"
3550                                             "cbc(des3_ede)))",
3551                                 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3552                                                    "cbc-des3_ede-caam",
3553                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3554                         },
3555                         .setkey = aead_setkey,
3556                         .setauthsize = aead_setauthsize,
3557                         .encrypt = aead_encrypt,
3558                         .decrypt = aead_decrypt,
3559                         .ivsize = DES3_EDE_BLOCK_SIZE,
3560                         .maxauthsize = MD5_DIGEST_SIZE,
3561                 },
3562                 .caam = {
3563                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3564                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3565                                            OP_ALG_AAI_HMAC_PRECOMP,
3566                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3567                         .geniv = true,
3568                 }
3569         },
3570         {
3571                 .aead = {
3572                         .base = {
3573                                 .cra_name = "authenc(hmac(sha1),"
3574                                             "cbc(des3_ede))",
3575                                 .cra_driver_name = "authenc-hmac-sha1-"
3576                                                    "cbc-des3_ede-caam",
3577                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3578                         },
3579                         .setkey = aead_setkey,
3580                         .setauthsize = aead_setauthsize,
3581                         .encrypt = aead_encrypt,
3582                         .decrypt = aead_decrypt,
3583                         .ivsize = DES3_EDE_BLOCK_SIZE,
3584                         .maxauthsize = SHA1_DIGEST_SIZE,
3585                 },
3586                 .caam = {
3587                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3588                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3589                                            OP_ALG_AAI_HMAC_PRECOMP,
3590                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3591                 },
3592         },
3593         {
3594                 .aead = {
3595                         .base = {
3596                                 .cra_name = "echainiv(authenc(hmac(sha1),"
3597                                             "cbc(des3_ede)))",
3598                                 .cra_driver_name = "echainiv-authenc-"
3599                                                    "hmac-sha1-"
3600                                                    "cbc-des3_ede-caam",
3601                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3602                         },
3603                         .setkey = aead_setkey,
3604                         .setauthsize = aead_setauthsize,
3605                         .encrypt = aead_encrypt,
3606                         .decrypt = aead_decrypt,
3607                         .ivsize = DES3_EDE_BLOCK_SIZE,
3608                         .maxauthsize = SHA1_DIGEST_SIZE,
3609                 },
3610                 .caam = {
3611                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3612                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3613                                            OP_ALG_AAI_HMAC_PRECOMP,
3614                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3615                         .geniv = true,
3616                 },
3617         },
3618         {
3619                 .aead = {
3620                         .base = {
3621                                 .cra_name = "authenc(hmac(sha224),"
3622                                             "cbc(des3_ede))",
3623                                 .cra_driver_name = "authenc-hmac-sha224-"
3624                                                    "cbc-des3_ede-caam",
3625                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3626                         },
3627                         .setkey = aead_setkey,
3628                         .setauthsize = aead_setauthsize,
3629                         .encrypt = aead_encrypt,
3630                         .decrypt = aead_decrypt,
3631                         .ivsize = DES3_EDE_BLOCK_SIZE,
3632                         .maxauthsize = SHA224_DIGEST_SIZE,
3633                 },
3634                 .caam = {
3635                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3636                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3637                                            OP_ALG_AAI_HMAC_PRECOMP,
3638                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3639                 },
3640         },
3641         {
3642                 .aead = {
3643                         .base = {
3644                                 .cra_name = "echainiv(authenc(hmac(sha224),"
3645                                             "cbc(des3_ede)))",
3646                                 .cra_driver_name = "echainiv-authenc-"
3647                                                    "hmac-sha224-"
3648                                                    "cbc-des3_ede-caam",
3649                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3650                         },
3651                         .setkey = aead_setkey,
3652                         .setauthsize = aead_setauthsize,
3653                         .encrypt = aead_encrypt,
3654                         .decrypt = aead_decrypt,
3655                         .ivsize = DES3_EDE_BLOCK_SIZE,
3656                         .maxauthsize = SHA224_DIGEST_SIZE,
3657                 },
3658                 .caam = {
3659                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3660                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3661                                            OP_ALG_AAI_HMAC_PRECOMP,
3662                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3663                         .geniv = true,
3664                 },
3665         },
3666         {
3667                 .aead = {
3668                         .base = {
3669                                 .cra_name = "authenc(hmac(sha256),"
3670                                             "cbc(des3_ede))",
3671                                 .cra_driver_name = "authenc-hmac-sha256-"
3672                                                    "cbc-des3_ede-caam",
3673                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3674                         },
3675                         .setkey = aead_setkey,
3676                         .setauthsize = aead_setauthsize,
3677                         .encrypt = aead_encrypt,
3678                         .decrypt = aead_decrypt,
3679                         .ivsize = DES3_EDE_BLOCK_SIZE,
3680                         .maxauthsize = SHA256_DIGEST_SIZE,
3681                 },
3682                 .caam = {
3683                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3684                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3685                                            OP_ALG_AAI_HMAC_PRECOMP,
3686                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3687                 },
3688         },
3689         {
3690                 .aead = {
3691                         .base = {
3692                                 .cra_name = "echainiv(authenc(hmac(sha256),"
3693                                             "cbc(des3_ede)))",
3694                                 .cra_driver_name = "echainiv-authenc-"
3695                                                    "hmac-sha256-"
3696                                                    "cbc-des3_ede-caam",
3697                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3698                         },
3699                         .setkey = aead_setkey,
3700                         .setauthsize = aead_setauthsize,
3701                         .encrypt = aead_encrypt,
3702                         .decrypt = aead_decrypt,
3703                         .ivsize = DES3_EDE_BLOCK_SIZE,
3704                         .maxauthsize = SHA256_DIGEST_SIZE,
3705                 },
3706                 .caam = {
3707                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3708                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3709                                            OP_ALG_AAI_HMAC_PRECOMP,
3710                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3711                         .geniv = true,
3712                 },
3713         },
3714         {
3715                 .aead = {
3716                         .base = {
3717                                 .cra_name = "authenc(hmac(sha384),"
3718                                             "cbc(des3_ede))",
3719                                 .cra_driver_name = "authenc-hmac-sha384-"
3720                                                    "cbc-des3_ede-caam",
3721                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3722                         },
3723                         .setkey = aead_setkey,
3724                         .setauthsize = aead_setauthsize,
3725                         .encrypt = aead_encrypt,
3726                         .decrypt = aead_decrypt,
3727                         .ivsize = DES3_EDE_BLOCK_SIZE,
3728                         .maxauthsize = SHA384_DIGEST_SIZE,
3729                 },
3730                 .caam = {
3731                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3732                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3733                                            OP_ALG_AAI_HMAC_PRECOMP,
3734                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3735                 },
3736         },
3737         {
3738                 .aead = {
3739                         .base = {
3740                                 .cra_name = "echainiv(authenc(hmac(sha384),"
3741                                             "cbc(des3_ede)))",
3742                                 .cra_driver_name = "echainiv-authenc-"
3743                                                    "hmac-sha384-"
3744                                                    "cbc-des3_ede-caam",
3745                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3746                         },
3747                         .setkey = aead_setkey,
3748                         .setauthsize = aead_setauthsize,
3749                         .encrypt = aead_encrypt,
3750                         .decrypt = aead_decrypt,
3751                         .ivsize = DES3_EDE_BLOCK_SIZE,
3752                         .maxauthsize = SHA384_DIGEST_SIZE,
3753                 },
3754                 .caam = {
3755                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3756                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3757                                            OP_ALG_AAI_HMAC_PRECOMP,
3758                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
3759                         .geniv = true,
3760                 },
3761         },
3762         {
3763                 .aead = {
3764                         .base = {
3765                                 .cra_name = "authenc(hmac(sha512),"
3766                                             "cbc(des3_ede))",
3767                                 .cra_driver_name = "authenc-hmac-sha512-"
3768                                                    "cbc-des3_ede-caam",
3769                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3770                         },
3771                         .setkey = aead_setkey,
3772                         .setauthsize = aead_setauthsize,
3773                         .encrypt = aead_encrypt,
3774                         .decrypt = aead_decrypt,
3775                         .ivsize = DES3_EDE_BLOCK_SIZE,
3776                         .maxauthsize = SHA512_DIGEST_SIZE,
3777                 },
3778                 .caam = {
3779                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3780                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3781                                            OP_ALG_AAI_HMAC_PRECOMP,
3782                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3783                 },
3784         },
3785         {
3786                 .aead = {
3787                         .base = {
3788                                 .cra_name = "echainiv(authenc(hmac(sha512),"
3789                                             "cbc(des3_ede)))",
3790                                 .cra_driver_name = "echainiv-authenc-"
3791                                                    "hmac-sha512-"
3792                                                    "cbc-des3_ede-caam",
3793                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
3794                         },
3795                         .setkey = aead_setkey,
3796                         .setauthsize = aead_setauthsize,
3797                         .encrypt = aead_encrypt,
3798                         .decrypt = aead_decrypt,
3799                         .ivsize = DES3_EDE_BLOCK_SIZE,
3800                         .maxauthsize = SHA512_DIGEST_SIZE,
3801                 },
3802                 .caam = {
3803                         .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
3804                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3805                                            OP_ALG_AAI_HMAC_PRECOMP,
3806                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
3807                         .geniv = true,
3808                 },
3809         },
3810         {
3811                 .aead = {
3812                         .base = {
3813                                 .cra_name = "authenc(hmac(md5),cbc(des))",
3814                                 .cra_driver_name = "authenc-hmac-md5-"
3815                                                    "cbc-des-caam",
3816                                 .cra_blocksize = DES_BLOCK_SIZE,
3817                         },
3818                         .setkey = aead_setkey,
3819                         .setauthsize = aead_setauthsize,
3820                         .encrypt = aead_encrypt,
3821                         .decrypt = aead_decrypt,
3822                         .ivsize = DES_BLOCK_SIZE,
3823                         .maxauthsize = MD5_DIGEST_SIZE,
3824                 },
3825                 .caam = {
3826                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3827                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3828                                            OP_ALG_AAI_HMAC_PRECOMP,
3829                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3830                 },
3831         },
3832         {
3833                 .aead = {
3834                         .base = {
3835                                 .cra_name = "echainiv(authenc(hmac(md5),"
3836                                             "cbc(des)))",
3837                                 .cra_driver_name = "echainiv-authenc-hmac-md5-"
3838                                                    "cbc-des-caam",
3839                                 .cra_blocksize = DES_BLOCK_SIZE,
3840                         },
3841                         .setkey = aead_setkey,
3842                         .setauthsize = aead_setauthsize,
3843                         .encrypt = aead_encrypt,
3844                         .decrypt = aead_decrypt,
3845                         .ivsize = DES_BLOCK_SIZE,
3846                         .maxauthsize = MD5_DIGEST_SIZE,
3847                 },
3848                 .caam = {
3849                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3850                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3851                                            OP_ALG_AAI_HMAC_PRECOMP,
3852                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
3853                         .geniv = true,
3854                 },
3855         },
3856         {
3857                 .aead = {
3858                         .base = {
3859                                 .cra_name = "authenc(hmac(sha1),cbc(des))",
3860                                 .cra_driver_name = "authenc-hmac-sha1-"
3861                                                    "cbc-des-caam",
3862                                 .cra_blocksize = DES_BLOCK_SIZE,
3863                         },
3864                         .setkey = aead_setkey,
3865                         .setauthsize = aead_setauthsize,
3866                         .encrypt = aead_encrypt,
3867                         .decrypt = aead_decrypt,
3868                         .ivsize = DES_BLOCK_SIZE,
3869                         .maxauthsize = SHA1_DIGEST_SIZE,
3870                 },
3871                 .caam = {
3872                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3873                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3874                                            OP_ALG_AAI_HMAC_PRECOMP,
3875                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3876                 },
3877         },
3878         {
3879                 .aead = {
3880                         .base = {
3881                                 .cra_name = "echainiv(authenc(hmac(sha1),"
3882                                             "cbc(des)))",
3883                                 .cra_driver_name = "echainiv-authenc-"
3884                                                    "hmac-sha1-cbc-des-caam",
3885                                 .cra_blocksize = DES_BLOCK_SIZE,
3886                         },
3887                         .setkey = aead_setkey,
3888                         .setauthsize = aead_setauthsize,
3889                         .encrypt = aead_encrypt,
3890                         .decrypt = aead_decrypt,
3891                         .ivsize = DES_BLOCK_SIZE,
3892                         .maxauthsize = SHA1_DIGEST_SIZE,
3893                 },
3894                 .caam = {
3895                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3896                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3897                                            OP_ALG_AAI_HMAC_PRECOMP,
3898                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
3899                         .geniv = true,
3900                 },
3901         },
3902         {
3903                 .aead = {
3904                         .base = {
3905                                 .cra_name = "authenc(hmac(sha224),cbc(des))",
3906                                 .cra_driver_name = "authenc-hmac-sha224-"
3907                                                    "cbc-des-caam",
3908                                 .cra_blocksize = DES_BLOCK_SIZE,
3909                         },
3910                         .setkey = aead_setkey,
3911                         .setauthsize = aead_setauthsize,
3912                         .encrypt = aead_encrypt,
3913                         .decrypt = aead_decrypt,
3914                         .ivsize = DES_BLOCK_SIZE,
3915                         .maxauthsize = SHA224_DIGEST_SIZE,
3916                 },
3917                 .caam = {
3918                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3919                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3920                                            OP_ALG_AAI_HMAC_PRECOMP,
3921                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3922                 },
3923         },
3924         {
3925                 .aead = {
3926                         .base = {
3927                                 .cra_name = "echainiv(authenc(hmac(sha224),"
3928                                             "cbc(des)))",
3929                                 .cra_driver_name = "echainiv-authenc-"
3930                                                    "hmac-sha224-cbc-des-caam",
3931                                 .cra_blocksize = DES_BLOCK_SIZE,
3932                         },
3933                         .setkey = aead_setkey,
3934                         .setauthsize = aead_setauthsize,
3935                         .encrypt = aead_encrypt,
3936                         .decrypt = aead_decrypt,
3937                         .ivsize = DES_BLOCK_SIZE,
3938                         .maxauthsize = SHA224_DIGEST_SIZE,
3939                 },
3940                 .caam = {
3941                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3942                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3943                                            OP_ALG_AAI_HMAC_PRECOMP,
3944                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
3945                         .geniv = true,
3946                 },
3947         },
3948         {
3949                 .aead = {
3950                         .base = {
3951                                 .cra_name = "authenc(hmac(sha256),cbc(des))",
3952                                 .cra_driver_name = "authenc-hmac-sha256-"
3953                                                    "cbc-des-caam",
3954                                 .cra_blocksize = DES_BLOCK_SIZE,
3955                         },
3956                         .setkey = aead_setkey,
3957                         .setauthsize = aead_setauthsize,
3958                         .encrypt = aead_encrypt,
3959                         .decrypt = aead_decrypt,
3960                         .ivsize = DES_BLOCK_SIZE,
3961                         .maxauthsize = SHA256_DIGEST_SIZE,
3962                 },
3963                 .caam = {
3964                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3965                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3966                                            OP_ALG_AAI_HMAC_PRECOMP,
3967                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3968                 },
3969         },
3970         {
3971                 .aead = {
3972                         .base = {
3973                                 .cra_name = "echainiv(authenc(hmac(sha256),"
3974                                             "cbc(des)))",
3975                                 .cra_driver_name = "echainiv-authenc-"
3976                                                    "hmac-sha256-cbc-des-caam",
3977                                 .cra_blocksize = DES_BLOCK_SIZE,
3978                         },
3979                         .setkey = aead_setkey,
3980                         .setauthsize = aead_setauthsize,
3981                         .encrypt = aead_encrypt,
3982                         .decrypt = aead_decrypt,
3983                         .ivsize = DES_BLOCK_SIZE,
3984                         .maxauthsize = SHA256_DIGEST_SIZE,
3985                 },
3986                 .caam = {
3987                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3988                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3989                                            OP_ALG_AAI_HMAC_PRECOMP,
3990                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
3991                         .geniv = true,
3992                 },
3993         },
3994         {
3995                 .aead = {
3996                         .base = {
3997                                 .cra_name = "authenc(hmac(sha384),cbc(des))",
3998                                 .cra_driver_name = "authenc-hmac-sha384-"
3999                                                    "cbc-des-caam",
4000                                 .cra_blocksize = DES_BLOCK_SIZE,
4001                         },
4002                         .setkey = aead_setkey,
4003                         .setauthsize = aead_setauthsize,
4004                         .encrypt = aead_encrypt,
4005                         .decrypt = aead_decrypt,
4006                         .ivsize = DES_BLOCK_SIZE,
4007                         .maxauthsize = SHA384_DIGEST_SIZE,
4008                 },
4009                 .caam = {
4010                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4011                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4012                                            OP_ALG_AAI_HMAC_PRECOMP,
4013                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4014                 },
4015         },
4016         {
4017                 .aead = {
4018                         .base = {
4019                                 .cra_name = "echainiv(authenc(hmac(sha384),"
4020                                             "cbc(des)))",
4021                                 .cra_driver_name = "echainiv-authenc-"
4022                                                    "hmac-sha384-cbc-des-caam",
4023                                 .cra_blocksize = DES_BLOCK_SIZE,
4024                         },
4025                         .setkey = aead_setkey,
4026                         .setauthsize = aead_setauthsize,
4027                         .encrypt = aead_encrypt,
4028                         .decrypt = aead_decrypt,
4029                         .ivsize = DES_BLOCK_SIZE,
4030                         .maxauthsize = SHA384_DIGEST_SIZE,
4031                 },
4032                 .caam = {
4033                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4034                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4035                                            OP_ALG_AAI_HMAC_PRECOMP,
4036                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4037                         .geniv = true,
4038                 },
4039         },
4040         {
4041                 .aead = {
4042                         .base = {
4043                                 .cra_name = "authenc(hmac(sha512),cbc(des))",
4044                                 .cra_driver_name = "authenc-hmac-sha512-"
4045                                                    "cbc-des-caam",
4046                                 .cra_blocksize = DES_BLOCK_SIZE,
4047                         },
4048                         .setkey = aead_setkey,
4049                         .setauthsize = aead_setauthsize,
4050                         .encrypt = aead_encrypt,
4051                         .decrypt = aead_decrypt,
4052                         .ivsize = DES_BLOCK_SIZE,
4053                         .maxauthsize = SHA512_DIGEST_SIZE,
4054                 },
4055                 .caam = {
4056                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4057                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4058                                            OP_ALG_AAI_HMAC_PRECOMP,
4059                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4060                 },
4061         },
4062         {
4063                 .aead = {
4064                         .base = {
4065                                 .cra_name = "echainiv(authenc(hmac(sha512),"
4066                                             "cbc(des)))",
4067                                 .cra_driver_name = "echainiv-authenc-"
4068                                                    "hmac-sha512-cbc-des-caam",
4069                                 .cra_blocksize = DES_BLOCK_SIZE,
4070                         },
4071                         .setkey = aead_setkey,
4072                         .setauthsize = aead_setauthsize,
4073                         .encrypt = aead_encrypt,
4074                         .decrypt = aead_decrypt,
4075                         .ivsize = DES_BLOCK_SIZE,
4076                         .maxauthsize = SHA512_DIGEST_SIZE,
4077                 },
4078                 .caam = {
4079                         .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
4080                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4081                                            OP_ALG_AAI_HMAC_PRECOMP,
4082                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4083                         .geniv = true,
4084                 },
4085         },
4086         {
4087                 .aead = {
4088                         .base = {
4089                                 .cra_name = "authenc(hmac(md5),"
4090                                             "rfc3686(ctr(aes)))",
4091                                 .cra_driver_name = "authenc-hmac-md5-"
4092                                                    "rfc3686-ctr-aes-caam",
4093                                 .cra_blocksize = 1,
4094                         },
4095                         .setkey = aead_setkey,
4096                         .setauthsize = aead_setauthsize,
4097                         .encrypt = aead_encrypt,
4098                         .decrypt = aead_decrypt,
4099                         .ivsize = CTR_RFC3686_IV_SIZE,
4100                         .maxauthsize = MD5_DIGEST_SIZE,
4101                 },
4102                 .caam = {
4103                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4104                                            OP_ALG_AAI_CTR_MOD128,
4105                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4106                                            OP_ALG_AAI_HMAC_PRECOMP,
4107                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4108                         .rfc3686 = true,
4109                 },
4110         },
4111         {
4112                 .aead = {
4113                         .base = {
4114                                 .cra_name = "seqiv(authenc("
4115                                             "hmac(md5),rfc3686(ctr(aes))))",
4116                                 .cra_driver_name = "seqiv-authenc-hmac-md5-"
4117                                                    "rfc3686-ctr-aes-caam",
4118                                 .cra_blocksize = 1,
4119                         },
4120                         .setkey = aead_setkey,
4121                         .setauthsize = aead_setauthsize,
4122                         .encrypt = aead_encrypt,
4123                         .decrypt = aead_decrypt,
4124                         .ivsize = CTR_RFC3686_IV_SIZE,
4125                         .maxauthsize = MD5_DIGEST_SIZE,
4126                 },
4127                 .caam = {
4128                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4129                                            OP_ALG_AAI_CTR_MOD128,
4130                         .class2_alg_type = OP_ALG_ALGSEL_MD5 |
4131                                            OP_ALG_AAI_HMAC_PRECOMP,
4132                         .alg_op = OP_ALG_ALGSEL_MD5 | OP_ALG_AAI_HMAC,
4133                         .rfc3686 = true,
4134                         .geniv = true,
4135                 },
4136         },
4137         {
4138                 .aead = {
4139                         .base = {
4140                                 .cra_name = "authenc(hmac(sha1),"
4141                                             "rfc3686(ctr(aes)))",
4142                                 .cra_driver_name = "authenc-hmac-sha1-"
4143                                                    "rfc3686-ctr-aes-caam",
4144                                 .cra_blocksize = 1,
4145                         },
4146                         .setkey = aead_setkey,
4147                         .setauthsize = aead_setauthsize,
4148                         .encrypt = aead_encrypt,
4149                         .decrypt = aead_decrypt,
4150                         .ivsize = CTR_RFC3686_IV_SIZE,
4151                         .maxauthsize = SHA1_DIGEST_SIZE,
4152                 },
4153                 .caam = {
4154                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4155                                            OP_ALG_AAI_CTR_MOD128,
4156                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4157                                            OP_ALG_AAI_HMAC_PRECOMP,
4158                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4159                         .rfc3686 = true,
4160                 },
4161         },
4162         {
4163                 .aead = {
4164                         .base = {
4165                                 .cra_name = "seqiv(authenc("
4166                                             "hmac(sha1),rfc3686(ctr(aes))))",
4167                                 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
4168                                                    "rfc3686-ctr-aes-caam",
4169                                 .cra_blocksize = 1,
4170                         },
4171                         .setkey = aead_setkey,
4172                         .setauthsize = aead_setauthsize,
4173                         .encrypt = aead_encrypt,
4174                         .decrypt = aead_decrypt,
4175                         .ivsize = CTR_RFC3686_IV_SIZE,
4176                         .maxauthsize = SHA1_DIGEST_SIZE,
4177                 },
4178                 .caam = {
4179                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4180                                            OP_ALG_AAI_CTR_MOD128,
4181                         .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
4182                                            OP_ALG_AAI_HMAC_PRECOMP,
4183                         .alg_op = OP_ALG_ALGSEL_SHA1 | OP_ALG_AAI_HMAC,
4184                         .rfc3686 = true,
4185                         .geniv = true,
4186                 },
4187         },
4188         {
4189                 .aead = {
4190                         .base = {
4191                                 .cra_name = "authenc(hmac(sha224),"
4192                                             "rfc3686(ctr(aes)))",
4193                                 .cra_driver_name = "authenc-hmac-sha224-"
4194                                                    "rfc3686-ctr-aes-caam",
4195                                 .cra_blocksize = 1,
4196                         },
4197                         .setkey = aead_setkey,
4198                         .setauthsize = aead_setauthsize,
4199                         .encrypt = aead_encrypt,
4200                         .decrypt = aead_decrypt,
4201                         .ivsize = CTR_RFC3686_IV_SIZE,
4202                         .maxauthsize = SHA224_DIGEST_SIZE,
4203                 },
4204                 .caam = {
4205                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4206                                            OP_ALG_AAI_CTR_MOD128,
4207                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4208                                            OP_ALG_AAI_HMAC_PRECOMP,
4209                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4210                         .rfc3686 = true,
4211                 },
4212         },
4213         {
4214                 .aead = {
4215                         .base = {
4216                                 .cra_name = "seqiv(authenc("
4217                                             "hmac(sha224),rfc3686(ctr(aes))))",
4218                                 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
4219                                                    "rfc3686-ctr-aes-caam",
4220                                 .cra_blocksize = 1,
4221                         },
4222                         .setkey = aead_setkey,
4223                         .setauthsize = aead_setauthsize,
4224                         .encrypt = aead_encrypt,
4225                         .decrypt = aead_decrypt,
4226                         .ivsize = CTR_RFC3686_IV_SIZE,
4227                         .maxauthsize = SHA224_DIGEST_SIZE,
4228                 },
4229                 .caam = {
4230                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4231                                            OP_ALG_AAI_CTR_MOD128,
4232                         .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
4233                                            OP_ALG_AAI_HMAC_PRECOMP,
4234                         .alg_op = OP_ALG_ALGSEL_SHA224 | OP_ALG_AAI_HMAC,
4235                         .rfc3686 = true,
4236                         .geniv = true,
4237                 },
4238         },
4239         {
4240                 .aead = {
4241                         .base = {
4242                                 .cra_name = "authenc(hmac(sha256),"
4243                                             "rfc3686(ctr(aes)))",
4244                                 .cra_driver_name = "authenc-hmac-sha256-"
4245                                                    "rfc3686-ctr-aes-caam",
4246                                 .cra_blocksize = 1,
4247                         },
4248                         .setkey = aead_setkey,
4249                         .setauthsize = aead_setauthsize,
4250                         .encrypt = aead_encrypt,
4251                         .decrypt = aead_decrypt,
4252                         .ivsize = CTR_RFC3686_IV_SIZE,
4253                         .maxauthsize = SHA256_DIGEST_SIZE,
4254                 },
4255                 .caam = {
4256                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4257                                            OP_ALG_AAI_CTR_MOD128,
4258                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4259                                            OP_ALG_AAI_HMAC_PRECOMP,
4260                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4261                         .rfc3686 = true,
4262                 },
4263         },
4264         {
4265                 .aead = {
4266                         .base = {
4267                                 .cra_name = "seqiv(authenc(hmac(sha256),"
4268                                             "rfc3686(ctr(aes))))",
4269                                 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
4270                                                    "rfc3686-ctr-aes-caam",
4271                                 .cra_blocksize = 1,
4272                         },
4273                         .setkey = aead_setkey,
4274                         .setauthsize = aead_setauthsize,
4275                         .encrypt = aead_encrypt,
4276                         .decrypt = aead_decrypt,
4277                         .ivsize = CTR_RFC3686_IV_SIZE,
4278                         .maxauthsize = SHA256_DIGEST_SIZE,
4279                 },
4280                 .caam = {
4281                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4282                                            OP_ALG_AAI_CTR_MOD128,
4283                         .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
4284                                            OP_ALG_AAI_HMAC_PRECOMP,
4285                         .alg_op = OP_ALG_ALGSEL_SHA256 | OP_ALG_AAI_HMAC,
4286                         .rfc3686 = true,
4287                         .geniv = true,
4288                 },
4289         },
4290         {
4291                 .aead = {
4292                         .base = {
4293                                 .cra_name = "authenc(hmac(sha384),"
4294                                             "rfc3686(ctr(aes)))",
4295                                 .cra_driver_name = "authenc-hmac-sha384-"
4296                                                    "rfc3686-ctr-aes-caam",
4297                                 .cra_blocksize = 1,
4298                         },
4299                         .setkey = aead_setkey,
4300                         .setauthsize = aead_setauthsize,
4301                         .encrypt = aead_encrypt,
4302                         .decrypt = aead_decrypt,
4303                         .ivsize = CTR_RFC3686_IV_SIZE,
4304                         .maxauthsize = SHA384_DIGEST_SIZE,
4305                 },
4306                 .caam = {
4307                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4308                                            OP_ALG_AAI_CTR_MOD128,
4309                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4310                                            OP_ALG_AAI_HMAC_PRECOMP,
4311                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4312                         .rfc3686 = true,
4313                 },
4314         },
4315         {
4316                 .aead = {
4317                         .base = {
4318                                 .cra_name = "seqiv(authenc(hmac(sha384),"
4319                                             "rfc3686(ctr(aes))))",
4320                                 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
4321                                                    "rfc3686-ctr-aes-caam",
4322                                 .cra_blocksize = 1,
4323                         },
4324                         .setkey = aead_setkey,
4325                         .setauthsize = aead_setauthsize,
4326                         .encrypt = aead_encrypt,
4327                         .decrypt = aead_decrypt,
4328                         .ivsize = CTR_RFC3686_IV_SIZE,
4329                         .maxauthsize = SHA384_DIGEST_SIZE,
4330                 },
4331                 .caam = {
4332                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4333                                            OP_ALG_AAI_CTR_MOD128,
4334                         .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
4335                                            OP_ALG_AAI_HMAC_PRECOMP,
4336                         .alg_op = OP_ALG_ALGSEL_SHA384 | OP_ALG_AAI_HMAC,
4337                         .rfc3686 = true,
4338                         .geniv = true,
4339                 },
4340         },
4341         {
4342                 .aead = {
4343                         .base = {
4344                                 .cra_name = "authenc(hmac(sha512),"
4345                                             "rfc3686(ctr(aes)))",
4346                                 .cra_driver_name = "authenc-hmac-sha512-"
4347                                                    "rfc3686-ctr-aes-caam",
4348                                 .cra_blocksize = 1,
4349                         },
4350                         .setkey = aead_setkey,
4351                         .setauthsize = aead_setauthsize,
4352                         .encrypt = aead_encrypt,
4353                         .decrypt = aead_decrypt,
4354                         .ivsize = CTR_RFC3686_IV_SIZE,
4355                         .maxauthsize = SHA512_DIGEST_SIZE,
4356                 },
4357                 .caam = {
4358                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4359                                            OP_ALG_AAI_CTR_MOD128,
4360                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4361                                            OP_ALG_AAI_HMAC_PRECOMP,
4362                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4363                         .rfc3686 = true,
4364                 },
4365         },
4366         {
4367                 .aead = {
4368                         .base = {
4369                                 .cra_name = "seqiv(authenc(hmac(sha512),"
4370                                             "rfc3686(ctr(aes))))",
4371                                 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
4372                                                    "rfc3686-ctr-aes-caam",
4373                                 .cra_blocksize = 1,
4374                         },
4375                         .setkey = aead_setkey,
4376                         .setauthsize = aead_setauthsize,
4377                         .encrypt = aead_encrypt,
4378                         .decrypt = aead_decrypt,
4379                         .ivsize = CTR_RFC3686_IV_SIZE,
4380                         .maxauthsize = SHA512_DIGEST_SIZE,
4381                 },
4382                 .caam = {
4383                         .class1_alg_type = OP_ALG_ALGSEL_AES |
4384                                            OP_ALG_AAI_CTR_MOD128,
4385                         .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
4386                                            OP_ALG_AAI_HMAC_PRECOMP,
4387                         .alg_op = OP_ALG_ALGSEL_SHA512 | OP_ALG_AAI_HMAC,
4388                         .rfc3686 = true,
4389                         .geniv = true,
4390                 },
4391         },
4392 };
4393
4394 struct caam_crypto_alg {
4395         struct crypto_alg crypto_alg;
4396         struct list_head entry;
4397         struct caam_alg_entry caam;
4398 };
4399
4400 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
4401 {
4402         ctx->jrdev = caam_jr_alloc();
4403         if (IS_ERR(ctx->jrdev)) {
4404                 pr_err("Job Ring Device allocation for transform failed\n");
4405                 return PTR_ERR(ctx->jrdev);
4406         }
4407
4408         /* copy descriptor header template value */
4409         ctx->class1_alg_type = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
4410         ctx->class2_alg_type = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
4411         ctx->alg_op = OP_TYPE_CLASS2_ALG | caam->alg_op;
4412
4413         return 0;
4414 }
4415
4416 static int caam_cra_init(struct crypto_tfm *tfm)
4417 {
4418         struct crypto_alg *alg = tfm->__crt_alg;
4419         struct caam_crypto_alg *caam_alg =
4420                  container_of(alg, struct caam_crypto_alg, crypto_alg);
4421         struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
4422
4423         return caam_init_common(ctx, &caam_alg->caam);
4424 }
4425
4426 static int caam_aead_init(struct crypto_aead *tfm)
4427 {
4428         struct aead_alg *alg = crypto_aead_alg(tfm);
4429         struct caam_aead_alg *caam_alg =
4430                  container_of(alg, struct caam_aead_alg, aead);
4431         struct caam_ctx *ctx = crypto_aead_ctx(tfm);
4432
4433         return caam_init_common(ctx, &caam_alg->caam);
4434 }
4435
4436 static void caam_exit_common(struct caam_ctx *ctx)
4437 {
4438         if (ctx->sh_desc_enc_dma &&
4439             !dma_mapping_error(ctx->jrdev, ctx->sh_desc_enc_dma))
4440                 dma_unmap_single(ctx->jrdev, ctx->sh_desc_enc_dma,
4441                                  desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
4442         if (ctx->sh_desc_dec_dma &&
4443             !dma_mapping_error(ctx->jrdev, ctx->sh_desc_dec_dma))
4444                 dma_unmap_single(ctx->jrdev, ctx->sh_desc_dec_dma,
4445                                  desc_bytes(ctx->sh_desc_dec), DMA_TO_DEVICE);
4446         if (ctx->sh_desc_givenc_dma &&
4447             !dma_mapping_error(ctx->jrdev, ctx->sh_desc_givenc_dma))
4448                 dma_unmap_single(ctx->jrdev, ctx->sh_desc_givenc_dma,
4449                                  desc_bytes(ctx->sh_desc_givenc),
4450                                  DMA_TO_DEVICE);
4451         if (ctx->key_dma &&
4452             !dma_mapping_error(ctx->jrdev, ctx->key_dma))
4453                 dma_unmap_single(ctx->jrdev, ctx->key_dma,
4454                                  ctx->enckeylen + ctx->split_key_pad_len,
4455                                  DMA_TO_DEVICE);
4456
4457         caam_jr_free(ctx->jrdev);
4458 }
4459
4460 static void caam_cra_exit(struct crypto_tfm *tfm)
4461 {
4462         caam_exit_common(crypto_tfm_ctx(tfm));
4463 }
4464
4465 static void caam_aead_exit(struct crypto_aead *tfm)
4466 {
4467         caam_exit_common(crypto_aead_ctx(tfm));
4468 }
4469
4470 static void __exit caam_algapi_exit(void)
4471 {
4472
4473         struct caam_crypto_alg *t_alg, *n;
4474         int i;
4475
4476         for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4477                 struct caam_aead_alg *t_alg = driver_aeads + i;
4478
4479                 if (t_alg->registered)
4480                         crypto_unregister_aead(&t_alg->aead);
4481         }
4482
4483         if (!alg_list.next)
4484                 return;
4485
4486         list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
4487                 crypto_unregister_alg(&t_alg->crypto_alg);
4488                 list_del(&t_alg->entry);
4489                 kfree(t_alg);
4490         }
4491 }
4492
4493 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
4494                                               *template)
4495 {
4496         struct caam_crypto_alg *t_alg;
4497         struct crypto_alg *alg;
4498
4499         t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
4500         if (!t_alg) {
4501                 pr_err("failed to allocate t_alg\n");
4502                 return ERR_PTR(-ENOMEM);
4503         }
4504
4505         alg = &t_alg->crypto_alg;
4506
4507         snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
4508         snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
4509                  template->driver_name);
4510         alg->cra_module = THIS_MODULE;
4511         alg->cra_init = caam_cra_init;
4512         alg->cra_exit = caam_cra_exit;
4513         alg->cra_priority = CAAM_CRA_PRIORITY;
4514         alg->cra_blocksize = template->blocksize;
4515         alg->cra_alignmask = 0;
4516         alg->cra_ctxsize = sizeof(struct caam_ctx);
4517         alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
4518                          template->type;
4519         switch (template->type) {
4520         case CRYPTO_ALG_TYPE_GIVCIPHER:
4521                 alg->cra_type = &crypto_givcipher_type;
4522                 alg->cra_ablkcipher = template->template_ablkcipher;
4523                 break;
4524         case CRYPTO_ALG_TYPE_ABLKCIPHER:
4525                 alg->cra_type = &crypto_ablkcipher_type;
4526                 alg->cra_ablkcipher = template->template_ablkcipher;
4527                 break;
4528         }
4529
4530         t_alg->caam.class1_alg_type = template->class1_alg_type;
4531         t_alg->caam.class2_alg_type = template->class2_alg_type;
4532         t_alg->caam.alg_op = template->alg_op;
4533
4534         return t_alg;
4535 }
4536
4537 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
4538 {
4539         struct aead_alg *alg = &t_alg->aead;
4540
4541         alg->base.cra_module = THIS_MODULE;
4542         alg->base.cra_priority = CAAM_CRA_PRIORITY;
4543         alg->base.cra_ctxsize = sizeof(struct caam_ctx);
4544         alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
4545
4546         alg->init = caam_aead_init;
4547         alg->exit = caam_aead_exit;
4548 }
4549
4550 static int __init caam_algapi_init(void)
4551 {
4552         struct device_node *dev_node;
4553         struct platform_device *pdev;
4554         struct device *ctrldev;
4555         struct caam_drv_private *priv;
4556         int i = 0, err = 0;
4557         u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
4558         unsigned int md_limit = SHA512_DIGEST_SIZE;
4559         bool registered = false;
4560
4561         dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
4562         if (!dev_node) {
4563                 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
4564                 if (!dev_node)
4565                         return -ENODEV;
4566         }
4567
4568         pdev = of_find_device_by_node(dev_node);
4569         if (!pdev) {
4570                 of_node_put(dev_node);
4571                 return -ENODEV;
4572         }
4573
4574         ctrldev = &pdev->dev;
4575         priv = dev_get_drvdata(ctrldev);
4576         of_node_put(dev_node);
4577
4578         /*
4579          * If priv is NULL, it's probably because the caam driver wasn't
4580          * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
4581          */
4582         if (!priv)
4583                 return -ENODEV;
4584
4585
4586         INIT_LIST_HEAD(&alg_list);
4587
4588         /*
4589          * Register crypto algorithms the device supports.
4590          * First, detect presence and attributes of DES, AES, and MD blocks.
4591          */
4592         cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
4593         cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
4594         des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
4595         aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
4596         md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
4597
4598         /* If MD is present, limit digest size based on LP256 */
4599         if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
4600                 md_limit = SHA256_DIGEST_SIZE;
4601
4602         for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
4603                 struct caam_crypto_alg *t_alg;
4604                 struct caam_alg_template *alg = driver_algs + i;
4605                 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
4606
4607                 /* Skip DES algorithms if not supported by device */
4608                 if (!des_inst &&
4609                     ((alg_sel == OP_ALG_ALGSEL_3DES) ||
4610                      (alg_sel == OP_ALG_ALGSEL_DES)))
4611                                 continue;
4612
4613                 /* Skip AES algorithms if not supported by device */
4614                 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
4615                                 continue;
4616
4617                 /*
4618                  * Check support for AES modes not available
4619                  * on LP devices.
4620                  */
4621                 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4622                         if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
4623                              OP_ALG_AAI_XTS)
4624                                 continue;
4625
4626                 t_alg = caam_alg_alloc(alg);
4627                 if (IS_ERR(t_alg)) {
4628                         err = PTR_ERR(t_alg);
4629                         pr_warn("%s alg allocation failed\n", alg->driver_name);
4630                         continue;
4631                 }
4632
4633                 err = crypto_register_alg(&t_alg->crypto_alg);
4634                 if (err) {
4635                         pr_warn("%s alg registration failed\n",
4636                                 t_alg->crypto_alg.cra_driver_name);
4637                         kfree(t_alg);
4638                         continue;
4639                 }
4640
4641                 list_add_tail(&t_alg->entry, &alg_list);
4642                 registered = true;
4643         }
4644
4645         for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
4646                 struct caam_aead_alg *t_alg = driver_aeads + i;
4647                 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
4648                                  OP_ALG_ALGSEL_MASK;
4649                 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
4650                                  OP_ALG_ALGSEL_MASK;
4651                 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
4652
4653                 /* Skip DES algorithms if not supported by device */
4654                 if (!des_inst &&
4655                     ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
4656                      (c1_alg_sel == OP_ALG_ALGSEL_DES)))
4657                                 continue;
4658
4659                 /* Skip AES algorithms if not supported by device */
4660                 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
4661                                 continue;
4662
4663                 /*
4664                  * Check support for AES algorithms not available
4665                  * on LP devices.
4666                  */
4667                 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
4668                         if (alg_aai == OP_ALG_AAI_GCM)
4669                                 continue;
4670
4671                 /*
4672                  * Skip algorithms requiring message digests
4673                  * if MD or MD size is not supported by device.
4674                  */
4675                 if (c2_alg_sel &&
4676                     (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
4677                                 continue;
4678
4679                 caam_aead_alg_init(t_alg);
4680
4681                 err = crypto_register_aead(&t_alg->aead);
4682                 if (err) {
4683                         pr_warn("%s alg registration failed\n",
4684                                 t_alg->aead.base.cra_driver_name);
4685                         continue;
4686                 }
4687
4688                 t_alg->registered = true;
4689                 registered = true;
4690         }
4691
4692         if (registered)
4693                 pr_info("caam algorithms registered in /proc/crypto\n");
4694
4695         return err;
4696 }
4697
4698 module_init(caam_algapi_init);
4699 module_exit(caam_algapi_exit);
4700
4701 MODULE_LICENSE("GPL");
4702 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
4703 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");