GNU Linux-libre 4.19.286-gnu1
[releases.git] / drivers / crypto / amcc / crypto4xx_alg.c
1 /**
2  * AMCC SoC PPC4xx Crypto Driver
3  *
4  * Copyright (c) 2008 Applied Micro Circuits Corporation.
5  * All rights reserved. James Hsiao <jhsiao@amcc.com>
6  *
7  * This program is free software; you can redistribute it and/or modify
8  * it under the terms of the GNU General Public License as published by
9  * the Free Software Foundation; either version 2 of the License, or
10  * (at your option) any later version.
11  *
12  * This program is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15  * GNU General Public License for more details.
16  *
17  * This file implements the Linux crypto algorithms.
18  */
19
20 #include <linux/kernel.h>
21 #include <linux/interrupt.h>
22 #include <linux/spinlock_types.h>
23 #include <linux/scatterlist.h>
24 #include <linux/crypto.h>
25 #include <linux/hash.h>
26 #include <crypto/internal/hash.h>
27 #include <linux/dma-mapping.h>
28 #include <crypto/algapi.h>
29 #include <crypto/aead.h>
30 #include <crypto/aes.h>
31 #include <crypto/gcm.h>
32 #include <crypto/sha.h>
33 #include <crypto/ctr.h>
34 #include <crypto/skcipher.h>
35 #include "crypto4xx_reg_def.h"
36 #include "crypto4xx_core.h"
37 #include "crypto4xx_sa.h"
38
39 static void set_dynamic_sa_command_0(struct dynamic_sa_ctl *sa, u32 save_h,
40                                      u32 save_iv, u32 ld_h, u32 ld_iv,
41                                      u32 hdr_proc, u32 h, u32 c, u32 pad_type,
42                                      u32 op_grp, u32 op, u32 dir)
43 {
44         sa->sa_command_0.w = 0;
45         sa->sa_command_0.bf.save_hash_state = save_h;
46         sa->sa_command_0.bf.save_iv = save_iv;
47         sa->sa_command_0.bf.load_hash_state = ld_h;
48         sa->sa_command_0.bf.load_iv = ld_iv;
49         sa->sa_command_0.bf.hdr_proc = hdr_proc;
50         sa->sa_command_0.bf.hash_alg = h;
51         sa->sa_command_0.bf.cipher_alg = c;
52         sa->sa_command_0.bf.pad_type = pad_type & 3;
53         sa->sa_command_0.bf.extend_pad = pad_type >> 2;
54         sa->sa_command_0.bf.op_group = op_grp;
55         sa->sa_command_0.bf.opcode = op;
56         sa->sa_command_0.bf.dir = dir;
57 }
58
59 static void set_dynamic_sa_command_1(struct dynamic_sa_ctl *sa, u32 cm,
60                                      u32 hmac_mc, u32 cfb, u32 esn,
61                                      u32 sn_mask, u32 mute, u32 cp_pad,
62                                      u32 cp_pay, u32 cp_hdr)
63 {
64         sa->sa_command_1.w = 0;
65         sa->sa_command_1.bf.crypto_mode31 = (cm & 4) >> 2;
66         sa->sa_command_1.bf.crypto_mode9_8 = cm & 3;
67         sa->sa_command_1.bf.feedback_mode = cfb,
68         sa->sa_command_1.bf.sa_rev = 1;
69         sa->sa_command_1.bf.hmac_muting = hmac_mc;
70         sa->sa_command_1.bf.extended_seq_num = esn;
71         sa->sa_command_1.bf.seq_num_mask = sn_mask;
72         sa->sa_command_1.bf.mutable_bit_proc = mute;
73         sa->sa_command_1.bf.copy_pad = cp_pad;
74         sa->sa_command_1.bf.copy_payload = cp_pay;
75         sa->sa_command_1.bf.copy_hdr = cp_hdr;
76 }
77
78 static inline int crypto4xx_crypt(struct skcipher_request *req,
79                                   const unsigned int ivlen, bool decrypt,
80                                   bool check_blocksize)
81 {
82         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
83         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
84         __le32 iv[AES_IV_SIZE];
85
86         if (check_blocksize && !IS_ALIGNED(req->cryptlen, AES_BLOCK_SIZE))
87                 return -EINVAL;
88
89         if (ivlen)
90                 crypto4xx_memcpy_to_le32(iv, req->iv, ivlen);
91
92         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
93                 req->cryptlen, iv, ivlen, decrypt ? ctx->sa_in : ctx->sa_out,
94                 ctx->sa_len, 0, NULL);
95 }
96
97 int crypto4xx_encrypt_noiv_block(struct skcipher_request *req)
98 {
99         return crypto4xx_crypt(req, 0, false, true);
100 }
101
102 int crypto4xx_encrypt_iv_stream(struct skcipher_request *req)
103 {
104         return crypto4xx_crypt(req, AES_IV_SIZE, false, false);
105 }
106
107 int crypto4xx_decrypt_noiv_block(struct skcipher_request *req)
108 {
109         return crypto4xx_crypt(req, 0, true, true);
110 }
111
112 int crypto4xx_decrypt_iv_stream(struct skcipher_request *req)
113 {
114         return crypto4xx_crypt(req, AES_IV_SIZE, true, false);
115 }
116
117 int crypto4xx_encrypt_iv_block(struct skcipher_request *req)
118 {
119         return crypto4xx_crypt(req, AES_IV_SIZE, false, true);
120 }
121
122 int crypto4xx_decrypt_iv_block(struct skcipher_request *req)
123 {
124         return crypto4xx_crypt(req, AES_IV_SIZE, true, true);
125 }
126
127 /**
128  * AES Functions
129  */
130 static int crypto4xx_setkey_aes(struct crypto_skcipher *cipher,
131                                 const u8 *key,
132                                 unsigned int keylen,
133                                 unsigned char cm,
134                                 u8 fb)
135 {
136         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
137         struct dynamic_sa_ctl *sa;
138         int    rc;
139
140         if (keylen != AES_KEYSIZE_256 &&
141                 keylen != AES_KEYSIZE_192 && keylen != AES_KEYSIZE_128) {
142                 crypto_skcipher_set_flags(cipher,
143                                 CRYPTO_TFM_RES_BAD_KEY_LEN);
144                 return -EINVAL;
145         }
146
147         /* Create SA */
148         if (ctx->sa_in || ctx->sa_out)
149                 crypto4xx_free_sa(ctx);
150
151         rc = crypto4xx_alloc_sa(ctx, SA_AES128_LEN + (keylen-16) / 4);
152         if (rc)
153                 return rc;
154
155         /* Setup SA */
156         sa = ctx->sa_in;
157
158         set_dynamic_sa_command_0(sa, SA_NOT_SAVE_HASH, (cm == CRYPTO_MODE_ECB ?
159                                  SA_NOT_SAVE_IV : SA_SAVE_IV),
160                                  SA_NOT_LOAD_HASH, (cm == CRYPTO_MODE_ECB ?
161                                  SA_LOAD_IV_FROM_SA : SA_LOAD_IV_FROM_STATE),
162                                  SA_NO_HEADER_PROC, SA_HASH_ALG_NULL,
163                                  SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
164                                  SA_OP_GROUP_BASIC, SA_OPCODE_DECRYPT,
165                                  DIR_INBOUND);
166
167         set_dynamic_sa_command_1(sa, cm, SA_HASH_MODE_HASH,
168                                  fb, SA_EXTENDED_SN_OFF,
169                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
170                                  SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
171                                  SA_NOT_COPY_HDR);
172         crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
173                                  key, keylen);
174         sa->sa_contents.w = SA_AES_CONTENTS | (keylen << 2);
175         sa->sa_command_1.bf.key_len = keylen >> 3;
176
177         memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
178         sa = ctx->sa_out;
179         sa->sa_command_0.bf.dir = DIR_OUTBOUND;
180         /*
181          * SA_OPCODE_ENCRYPT is the same value as SA_OPCODE_DECRYPT.
182          * it's the DIR_(IN|OUT)BOUND that matters
183          */
184         sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT;
185
186         return 0;
187 }
188
189 int crypto4xx_setkey_aes_cbc(struct crypto_skcipher *cipher,
190                              const u8 *key, unsigned int keylen)
191 {
192         return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CBC,
193                                     CRYPTO_FEEDBACK_MODE_NO_FB);
194 }
195
196 int crypto4xx_setkey_aes_cfb(struct crypto_skcipher *cipher,
197                              const u8 *key, unsigned int keylen)
198 {
199         return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_CFB,
200                                     CRYPTO_FEEDBACK_MODE_128BIT_CFB);
201 }
202
203 int crypto4xx_setkey_aes_ecb(struct crypto_skcipher *cipher,
204                              const u8 *key, unsigned int keylen)
205 {
206         return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_ECB,
207                                     CRYPTO_FEEDBACK_MODE_NO_FB);
208 }
209
210 int crypto4xx_setkey_aes_ofb(struct crypto_skcipher *cipher,
211                              const u8 *key, unsigned int keylen)
212 {
213         return crypto4xx_setkey_aes(cipher, key, keylen, CRYPTO_MODE_OFB,
214                                     CRYPTO_FEEDBACK_MODE_64BIT_OFB);
215 }
216
217 int crypto4xx_setkey_rfc3686(struct crypto_skcipher *cipher,
218                              const u8 *key, unsigned int keylen)
219 {
220         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
221         int rc;
222
223         rc = crypto4xx_setkey_aes(cipher, key, keylen - CTR_RFC3686_NONCE_SIZE,
224                 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
225         if (rc)
226                 return rc;
227
228         ctx->iv_nonce = cpu_to_le32p((u32 *)&key[keylen -
229                                                  CTR_RFC3686_NONCE_SIZE]);
230
231         return 0;
232 }
233
234 int crypto4xx_rfc3686_encrypt(struct skcipher_request *req)
235 {
236         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
237         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
238         __le32 iv[AES_IV_SIZE / 4] = {
239                 ctx->iv_nonce,
240                 cpu_to_le32p((u32 *) req->iv),
241                 cpu_to_le32p((u32 *) (req->iv + 4)),
242                 cpu_to_le32(1) };
243
244         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
245                                   req->cryptlen, iv, AES_IV_SIZE,
246                                   ctx->sa_out, ctx->sa_len, 0, NULL);
247 }
248
249 int crypto4xx_rfc3686_decrypt(struct skcipher_request *req)
250 {
251         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
252         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
253         __le32 iv[AES_IV_SIZE / 4] = {
254                 ctx->iv_nonce,
255                 cpu_to_le32p((u32 *) req->iv),
256                 cpu_to_le32p((u32 *) (req->iv + 4)),
257                 cpu_to_le32(1) };
258
259         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
260                                   req->cryptlen, iv, AES_IV_SIZE,
261                                   ctx->sa_out, ctx->sa_len, 0, NULL);
262 }
263
264 static int
265 crypto4xx_ctr_crypt(struct skcipher_request *req, bool encrypt)
266 {
267         struct crypto_skcipher *cipher = crypto_skcipher_reqtfm(req);
268         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
269         size_t iv_len = crypto_skcipher_ivsize(cipher);
270         unsigned int counter = be32_to_cpup((__be32 *)(req->iv + iv_len - 4));
271         unsigned int nblks = ALIGN(req->cryptlen, AES_BLOCK_SIZE) /
272                         AES_BLOCK_SIZE;
273
274         /*
275          * The hardware uses only the last 32-bits as the counter while the
276          * kernel tests (aes_ctr_enc_tv_template[4] for example) expect that
277          * the whole IV is a counter.  So fallback if the counter is going to
278          * overlow.
279          */
280         if (counter + nblks < counter) {
281                 struct skcipher_request *subreq = skcipher_request_ctx(req);
282                 int ret;
283
284                 skcipher_request_set_tfm(subreq, ctx->sw_cipher.cipher);
285                 skcipher_request_set_callback(subreq, req->base.flags,
286                         NULL, NULL);
287                 skcipher_request_set_crypt(subreq, req->src, req->dst,
288                         req->cryptlen, req->iv);
289                 ret = encrypt ? crypto_skcipher_encrypt(subreq)
290                         : crypto_skcipher_decrypt(subreq);
291                 skcipher_request_zero(subreq);
292                 return ret;
293         }
294
295         return encrypt ? crypto4xx_encrypt_iv_stream(req)
296                        : crypto4xx_decrypt_iv_stream(req);
297 }
298
299 static int crypto4xx_sk_setup_fallback(struct crypto4xx_ctx *ctx,
300                                        struct crypto_skcipher *cipher,
301                                        const u8 *key,
302                                        unsigned int keylen)
303 {
304         int rc;
305
306         crypto_skcipher_clear_flags(ctx->sw_cipher.cipher,
307                                     CRYPTO_TFM_REQ_MASK);
308         crypto_skcipher_set_flags(ctx->sw_cipher.cipher,
309                 crypto_skcipher_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
310         rc = crypto_skcipher_setkey(ctx->sw_cipher.cipher, key, keylen);
311         crypto_skcipher_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
312         crypto_skcipher_set_flags(cipher,
313                 crypto_skcipher_get_flags(ctx->sw_cipher.cipher) &
314                         CRYPTO_TFM_RES_MASK);
315
316         return rc;
317 }
318
319 int crypto4xx_setkey_aes_ctr(struct crypto_skcipher *cipher,
320                              const u8 *key, unsigned int keylen)
321 {
322         struct crypto4xx_ctx *ctx = crypto_skcipher_ctx(cipher);
323         int rc;
324
325         rc = crypto4xx_sk_setup_fallback(ctx, cipher, key, keylen);
326         if (rc)
327                 return rc;
328
329         return crypto4xx_setkey_aes(cipher, key, keylen,
330                 CRYPTO_MODE_CTR, CRYPTO_FEEDBACK_MODE_NO_FB);
331 }
332
333 int crypto4xx_encrypt_ctr(struct skcipher_request *req)
334 {
335         return crypto4xx_ctr_crypt(req, true);
336 }
337
338 int crypto4xx_decrypt_ctr(struct skcipher_request *req)
339 {
340         return crypto4xx_ctr_crypt(req, false);
341 }
342
343 static inline bool crypto4xx_aead_need_fallback(struct aead_request *req,
344                                                 unsigned int len,
345                                                 bool is_ccm, bool decrypt)
346 {
347         struct crypto_aead *aead = crypto_aead_reqtfm(req);
348
349         /* authsize has to be a multiple of 4 */
350         if (aead->authsize & 3)
351                 return true;
352
353         /*
354          * hardware does not handle cases where plaintext
355          * is less than a block.
356          */
357         if (len < AES_BLOCK_SIZE)
358                 return true;
359
360         /* assoc len needs to be a multiple of 4 and <= 1020 */
361         if (req->assoclen & 0x3 || req->assoclen > 1020)
362                 return true;
363
364         /* CCM supports only counter field length of 2 and 4 bytes */
365         if (is_ccm && !(req->iv[0] == 1 || req->iv[0] == 3))
366                 return true;
367
368         return false;
369 }
370
371 static int crypto4xx_aead_fallback(struct aead_request *req,
372         struct crypto4xx_ctx *ctx, bool do_decrypt)
373 {
374         struct aead_request *subreq = aead_request_ctx(req);
375
376         aead_request_set_tfm(subreq, ctx->sw_cipher.aead);
377         aead_request_set_callback(subreq, req->base.flags,
378                                   req->base.complete, req->base.data);
379         aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
380                                req->iv);
381         aead_request_set_ad(subreq, req->assoclen);
382         return do_decrypt ? crypto_aead_decrypt(subreq) :
383                             crypto_aead_encrypt(subreq);
384 }
385
386 static int crypto4xx_aead_setup_fallback(struct crypto4xx_ctx *ctx,
387                                          struct crypto_aead *cipher,
388                                          const u8 *key,
389                                          unsigned int keylen)
390 {
391         int rc;
392
393         crypto_aead_clear_flags(ctx->sw_cipher.aead, CRYPTO_TFM_REQ_MASK);
394         crypto_aead_set_flags(ctx->sw_cipher.aead,
395                 crypto_aead_get_flags(cipher) & CRYPTO_TFM_REQ_MASK);
396         rc = crypto_aead_setkey(ctx->sw_cipher.aead, key, keylen);
397         crypto_aead_clear_flags(cipher, CRYPTO_TFM_RES_MASK);
398         crypto_aead_set_flags(cipher,
399                 crypto_aead_get_flags(ctx->sw_cipher.aead) &
400                         CRYPTO_TFM_RES_MASK);
401
402         return rc;
403 }
404
405 /**
406  * AES-CCM Functions
407  */
408
409 int crypto4xx_setkey_aes_ccm(struct crypto_aead *cipher, const u8 *key,
410                              unsigned int keylen)
411 {
412         struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
413         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
414         struct dynamic_sa_ctl *sa;
415         int rc = 0;
416
417         rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
418         if (rc)
419                 return rc;
420
421         if (ctx->sa_in || ctx->sa_out)
422                 crypto4xx_free_sa(ctx);
423
424         rc = crypto4xx_alloc_sa(ctx, SA_AES128_CCM_LEN + (keylen - 16) / 4);
425         if (rc)
426                 return rc;
427
428         /* Setup SA */
429         sa = (struct dynamic_sa_ctl *) ctx->sa_in;
430         sa->sa_contents.w = SA_AES_CCM_CONTENTS | (keylen << 2);
431
432         set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
433                                  SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
434                                  SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
435                                  SA_CIPHER_ALG_AES,
436                                  SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
437                                  SA_OPCODE_HASH_DECRYPT, DIR_INBOUND);
438
439         set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
440                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
441                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
442                                  SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
443                                  SA_NOT_COPY_HDR);
444
445         sa->sa_command_1.bf.key_len = keylen >> 3;
446
447         crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa), key, keylen);
448
449         memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
450         sa = (struct dynamic_sa_ctl *) ctx->sa_out;
451
452         set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
453                                  SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
454                                  SA_NO_HEADER_PROC, SA_HASH_ALG_CBC_MAC,
455                                  SA_CIPHER_ALG_AES,
456                                  SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
457                                  SA_OPCODE_ENCRYPT_HASH, DIR_OUTBOUND);
458
459         set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
460                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
461                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
462                                  SA_COPY_PAD, SA_COPY_PAYLOAD,
463                                  SA_NOT_COPY_HDR);
464
465         sa->sa_command_1.bf.key_len = keylen >> 3;
466         return 0;
467 }
468
469 static int crypto4xx_crypt_aes_ccm(struct aead_request *req, bool decrypt)
470 {
471         struct crypto4xx_ctx *ctx  = crypto_tfm_ctx(req->base.tfm);
472         struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
473         struct crypto_aead *aead = crypto_aead_reqtfm(req);
474         __le32 iv[16];
475         u32 tmp_sa[SA_AES128_CCM_LEN + 4];
476         struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *)tmp_sa;
477         unsigned int len = req->cryptlen;
478
479         if (decrypt)
480                 len -= crypto_aead_authsize(aead);
481
482         if (crypto4xx_aead_need_fallback(req, len, true, decrypt))
483                 return crypto4xx_aead_fallback(req, ctx, decrypt);
484
485         memcpy(tmp_sa, decrypt ? ctx->sa_in : ctx->sa_out, ctx->sa_len * 4);
486         sa->sa_command_0.bf.digest_len = crypto_aead_authsize(aead) >> 2;
487
488         if (req->iv[0] == 1) {
489                 /* CRYPTO_MODE_AES_ICM */
490                 sa->sa_command_1.bf.crypto_mode9_8 = 1;
491         }
492
493         iv[3] = cpu_to_le32(0);
494         crypto4xx_memcpy_to_le32(iv, req->iv, 16 - (req->iv[0] + 1));
495
496         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
497                                   len, iv, sizeof(iv),
498                                   sa, ctx->sa_len, req->assoclen, rctx->dst);
499 }
500
501 int crypto4xx_encrypt_aes_ccm(struct aead_request *req)
502 {
503         return crypto4xx_crypt_aes_ccm(req, false);
504 }
505
506 int crypto4xx_decrypt_aes_ccm(struct aead_request *req)
507 {
508         return crypto4xx_crypt_aes_ccm(req, true);
509 }
510
511 int crypto4xx_setauthsize_aead(struct crypto_aead *cipher,
512                                unsigned int authsize)
513 {
514         struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
515         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
516
517         return crypto_aead_setauthsize(ctx->sw_cipher.aead, authsize);
518 }
519
520 /**
521  * AES-GCM Functions
522  */
523
524 static int crypto4xx_aes_gcm_validate_keylen(unsigned int keylen)
525 {
526         switch (keylen) {
527         case 16:
528         case 24:
529         case 32:
530                 return 0;
531         default:
532                 return -EINVAL;
533         }
534 }
535
536 static int crypto4xx_compute_gcm_hash_key_sw(__le32 *hash_start, const u8 *key,
537                                              unsigned int keylen)
538 {
539         struct crypto_cipher *aes_tfm = NULL;
540         uint8_t src[16] = { 0 };
541         int rc = 0;
542
543         aes_tfm = crypto_alloc_cipher("aes", 0, CRYPTO_ALG_ASYNC |
544                                       CRYPTO_ALG_NEED_FALLBACK);
545         if (IS_ERR(aes_tfm)) {
546                 rc = PTR_ERR(aes_tfm);
547                 pr_warn("could not load aes cipher driver: %d\n", rc);
548                 return rc;
549         }
550
551         rc = crypto_cipher_setkey(aes_tfm, key, keylen);
552         if (rc) {
553                 pr_err("setkey() failed: %d\n", rc);
554                 goto out;
555         }
556
557         crypto_cipher_encrypt_one(aes_tfm, src, src);
558         crypto4xx_memcpy_to_le32(hash_start, src, 16);
559 out:
560         crypto_free_cipher(aes_tfm);
561         return rc;
562 }
563
564 int crypto4xx_setkey_aes_gcm(struct crypto_aead *cipher,
565                              const u8 *key, unsigned int keylen)
566 {
567         struct crypto_tfm *tfm = crypto_aead_tfm(cipher);
568         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
569         struct dynamic_sa_ctl *sa;
570         int    rc = 0;
571
572         if (crypto4xx_aes_gcm_validate_keylen(keylen) != 0) {
573                 crypto_aead_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
574                 return -EINVAL;
575         }
576
577         rc = crypto4xx_aead_setup_fallback(ctx, cipher, key, keylen);
578         if (rc)
579                 return rc;
580
581         if (ctx->sa_in || ctx->sa_out)
582                 crypto4xx_free_sa(ctx);
583
584         rc = crypto4xx_alloc_sa(ctx, SA_AES128_GCM_LEN + (keylen - 16) / 4);
585         if (rc)
586                 return rc;
587
588         sa  = (struct dynamic_sa_ctl *) ctx->sa_in;
589
590         sa->sa_contents.w = SA_AES_GCM_CONTENTS | (keylen << 2);
591         set_dynamic_sa_command_0(sa, SA_SAVE_HASH, SA_NOT_SAVE_IV,
592                                  SA_LOAD_HASH_FROM_SA, SA_LOAD_IV_FROM_STATE,
593                                  SA_NO_HEADER_PROC, SA_HASH_ALG_GHASH,
594                                  SA_CIPHER_ALG_AES, SA_PAD_TYPE_ZERO,
595                                  SA_OP_GROUP_BASIC, SA_OPCODE_HASH_DECRYPT,
596                                  DIR_INBOUND);
597         set_dynamic_sa_command_1(sa, CRYPTO_MODE_CTR, SA_HASH_MODE_HASH,
598                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
599                                  SA_SEQ_MASK_ON, SA_MC_DISABLE,
600                                  SA_NOT_COPY_PAD, SA_COPY_PAYLOAD,
601                                  SA_NOT_COPY_HDR);
602
603         sa->sa_command_1.bf.key_len = keylen >> 3;
604
605         crypto4xx_memcpy_to_le32(get_dynamic_sa_key_field(sa),
606                                  key, keylen);
607
608         rc = crypto4xx_compute_gcm_hash_key_sw(get_dynamic_sa_inner_digest(sa),
609                 key, keylen);
610         if (rc) {
611                 pr_err("GCM hash key setting failed = %d\n", rc);
612                 goto err;
613         }
614
615         memcpy(ctx->sa_out, ctx->sa_in, ctx->sa_len * 4);
616         sa = (struct dynamic_sa_ctl *) ctx->sa_out;
617         sa->sa_command_0.bf.dir = DIR_OUTBOUND;
618         sa->sa_command_0.bf.opcode = SA_OPCODE_ENCRYPT_HASH;
619
620         return 0;
621 err:
622         crypto4xx_free_sa(ctx);
623         return rc;
624 }
625
626 static inline int crypto4xx_crypt_aes_gcm(struct aead_request *req,
627                                           bool decrypt)
628 {
629         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
630         struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req);
631         __le32 iv[4];
632         unsigned int len = req->cryptlen;
633
634         if (decrypt)
635                 len -= crypto_aead_authsize(crypto_aead_reqtfm(req));
636
637         if (crypto4xx_aead_need_fallback(req, len, false, decrypt))
638                 return crypto4xx_aead_fallback(req, ctx, decrypt);
639
640         crypto4xx_memcpy_to_le32(iv, req->iv, GCM_AES_IV_SIZE);
641         iv[3] = cpu_to_le32(1);
642
643         return crypto4xx_build_pd(&req->base, ctx, req->src, req->dst,
644                                   len, iv, sizeof(iv),
645                                   decrypt ? ctx->sa_in : ctx->sa_out,
646                                   ctx->sa_len, req->assoclen, rctx->dst);
647 }
648
649 int crypto4xx_encrypt_aes_gcm(struct aead_request *req)
650 {
651         return crypto4xx_crypt_aes_gcm(req, false);
652 }
653
654 int crypto4xx_decrypt_aes_gcm(struct aead_request *req)
655 {
656         return crypto4xx_crypt_aes_gcm(req, true);
657 }
658
659 /**
660  * HASH SHA1 Functions
661  */
662 static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm,
663                                    unsigned int sa_len,
664                                    unsigned char ha,
665                                    unsigned char hm)
666 {
667         struct crypto_alg *alg = tfm->__crt_alg;
668         struct crypto4xx_alg *my_alg;
669         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm);
670         struct dynamic_sa_hash160 *sa;
671         int rc;
672
673         my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg,
674                               alg.u.hash);
675         ctx->dev   = my_alg->dev;
676
677         /* Create SA */
678         if (ctx->sa_in || ctx->sa_out)
679                 crypto4xx_free_sa(ctx);
680
681         rc = crypto4xx_alloc_sa(ctx, sa_len);
682         if (rc)
683                 return rc;
684
685         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
686                                  sizeof(struct crypto4xx_ctx));
687         sa = (struct dynamic_sa_hash160 *)ctx->sa_in;
688         set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV,
689                                  SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA,
690                                  SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL,
691                                  SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC,
692                                  SA_OPCODE_HASH, DIR_INBOUND);
693         set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH,
694                                  CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF,
695                                  SA_SEQ_MASK_OFF, SA_MC_ENABLE,
696                                  SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD,
697                                  SA_NOT_COPY_HDR);
698         /* Need to zero hash digest in SA */
699         memset(sa->inner_digest, 0, sizeof(sa->inner_digest));
700         memset(sa->outer_digest, 0, sizeof(sa->outer_digest));
701
702         return 0;
703 }
704
705 int crypto4xx_hash_init(struct ahash_request *req)
706 {
707         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
708         int ds;
709         struct dynamic_sa_ctl *sa;
710
711         sa = ctx->sa_in;
712         ds = crypto_ahash_digestsize(
713                         __crypto_ahash_cast(req->base.tfm));
714         sa->sa_command_0.bf.digest_len = ds >> 2;
715         sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA;
716
717         return 0;
718 }
719
720 int crypto4xx_hash_update(struct ahash_request *req)
721 {
722         struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
723         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
724         struct scatterlist dst;
725         unsigned int ds = crypto_ahash_digestsize(ahash);
726
727         sg_init_one(&dst, req->result, ds);
728
729         return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
730                                   req->nbytes, NULL, 0, ctx->sa_in,
731                                   ctx->sa_len, 0, NULL);
732 }
733
734 int crypto4xx_hash_final(struct ahash_request *req)
735 {
736         return 0;
737 }
738
739 int crypto4xx_hash_digest(struct ahash_request *req)
740 {
741         struct crypto_ahash *ahash = crypto_ahash_reqtfm(req);
742         struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
743         struct scatterlist dst;
744         unsigned int ds = crypto_ahash_digestsize(ahash);
745
746         sg_init_one(&dst, req->result, ds);
747
748         return crypto4xx_build_pd(&req->base, ctx, req->src, &dst,
749                                   req->nbytes, NULL, 0, ctx->sa_in,
750                                   ctx->sa_len, 0, NULL);
751 }
752
753 /**
754  * SHA1 Algorithm
755  */
756 int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm)
757 {
758         return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1,
759                                        SA_HASH_MODE_HASH);
760 }