GNU Linux-libre 4.19.286-gnu1
[releases.git] / drivers / crypto / ccree / cc_cipher.c
1 // SPDX-License-Identifier: GPL-2.0
2 /* Copyright (C) 2012-2018 ARM Limited or its affiliates. */
3
4 #include <linux/kernel.h>
5 #include <linux/module.h>
6 #include <crypto/algapi.h>
7 #include <crypto/internal/skcipher.h>
8 #include <crypto/des.h>
9 #include <crypto/xts.h>
10 #include <crypto/scatterwalk.h>
11
12 #include "cc_driver.h"
13 #include "cc_lli_defs.h"
14 #include "cc_buffer_mgr.h"
15 #include "cc_cipher.h"
16 #include "cc_request_mgr.h"
17
18 #define MAX_ABLKCIPHER_SEQ_LEN 6
19
20 #define template_skcipher       template_u.skcipher
21
22 struct cc_cipher_handle {
23         struct list_head alg_list;
24 };
25
26 struct cc_user_key_info {
27         u8 *key;
28         dma_addr_t key_dma_addr;
29 };
30
31 struct cc_hw_key_info {
32         enum cc_hw_crypto_key key1_slot;
33         enum cc_hw_crypto_key key2_slot;
34 };
35
36 struct cc_cipher_ctx {
37         struct cc_drvdata *drvdata;
38         int keylen;
39         int key_round_number;
40         int cipher_mode;
41         int flow_mode;
42         unsigned int flags;
43         bool hw_key;
44         struct cc_user_key_info user;
45         struct cc_hw_key_info hw;
46         struct crypto_shash *shash_tfm;
47 };
48
49 static void cc_cipher_complete(struct device *dev, void *cc_req, int err);
50
51 static inline bool cc_is_hw_key(struct crypto_tfm *tfm)
52 {
53         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
54
55         return ctx_p->hw_key;
56 }
57
58 static int validate_keys_sizes(struct cc_cipher_ctx *ctx_p, u32 size)
59 {
60         switch (ctx_p->flow_mode) {
61         case S_DIN_to_AES:
62                 switch (size) {
63                 case CC_AES_128_BIT_KEY_SIZE:
64                 case CC_AES_192_BIT_KEY_SIZE:
65                         if (ctx_p->cipher_mode != DRV_CIPHER_XTS &&
66                             ctx_p->cipher_mode != DRV_CIPHER_ESSIV &&
67                             ctx_p->cipher_mode != DRV_CIPHER_BITLOCKER)
68                                 return 0;
69                         break;
70                 case CC_AES_256_BIT_KEY_SIZE:
71                         return 0;
72                 case (CC_AES_192_BIT_KEY_SIZE * 2):
73                 case (CC_AES_256_BIT_KEY_SIZE * 2):
74                         if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
75                             ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
76                             ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER)
77                                 return 0;
78                         break;
79                 default:
80                         break;
81                 }
82                 break;
83         case S_DIN_to_DES:
84                 if (size == DES3_EDE_KEY_SIZE || size == DES_KEY_SIZE)
85                         return 0;
86                 break;
87         default:
88                 break;
89         }
90         return -EINVAL;
91 }
92
93 static int validate_data_size(struct cc_cipher_ctx *ctx_p,
94                               unsigned int size)
95 {
96         switch (ctx_p->flow_mode) {
97         case S_DIN_to_AES:
98                 switch (ctx_p->cipher_mode) {
99                 case DRV_CIPHER_XTS:
100                         if (size >= AES_BLOCK_SIZE &&
101                             IS_ALIGNED(size, AES_BLOCK_SIZE))
102                                 return 0;
103                         break;
104                 case DRV_CIPHER_CBC_CTS:
105                         if (size >= AES_BLOCK_SIZE)
106                                 return 0;
107                         break;
108                 case DRV_CIPHER_OFB:
109                 case DRV_CIPHER_CTR:
110                                 return 0;
111                 case DRV_CIPHER_ECB:
112                 case DRV_CIPHER_CBC:
113                 case DRV_CIPHER_ESSIV:
114                 case DRV_CIPHER_BITLOCKER:
115                         if (IS_ALIGNED(size, AES_BLOCK_SIZE))
116                                 return 0;
117                         break;
118                 default:
119                         break;
120                 }
121                 break;
122         case S_DIN_to_DES:
123                 if (IS_ALIGNED(size, DES_BLOCK_SIZE))
124                         return 0;
125                 break;
126         default:
127                 break;
128         }
129         return -EINVAL;
130 }
131
132 static int cc_cipher_init(struct crypto_tfm *tfm)
133 {
134         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
135         struct cc_crypto_alg *cc_alg =
136                         container_of(tfm->__crt_alg, struct cc_crypto_alg,
137                                      skcipher_alg.base);
138         struct device *dev = drvdata_to_dev(cc_alg->drvdata);
139         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
140
141         dev_dbg(dev, "Initializing context @%p for %s\n", ctx_p,
142                 crypto_tfm_alg_name(tfm));
143
144         crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
145                                     sizeof(struct cipher_req_ctx));
146
147         ctx_p->cipher_mode = cc_alg->cipher_mode;
148         ctx_p->flow_mode = cc_alg->flow_mode;
149         ctx_p->drvdata = cc_alg->drvdata;
150
151         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
152                 /* Alloc hash tfm for essiv */
153                 ctx_p->shash_tfm = crypto_alloc_shash("sha256-generic", 0, 0);
154                 if (IS_ERR(ctx_p->shash_tfm)) {
155                         dev_err(dev, "Error allocating hash tfm for ESSIV.\n");
156                         return PTR_ERR(ctx_p->shash_tfm);
157                 }
158         }
159
160         /* Allocate key buffer, cache line aligned */
161         ctx_p->user.key = kmalloc(max_key_buf_size, GFP_KERNEL);
162         if (!ctx_p->user.key)
163                 goto free_shash;
164
165         dev_dbg(dev, "Allocated key buffer in context. key=@%p\n",
166                 ctx_p->user.key);
167
168         /* Map key buffer */
169         ctx_p->user.key_dma_addr = dma_map_single(dev, (void *)ctx_p->user.key,
170                                                   max_key_buf_size,
171                                                   DMA_TO_DEVICE);
172         if (dma_mapping_error(dev, ctx_p->user.key_dma_addr)) {
173                 dev_err(dev, "Mapping Key %u B at va=%pK for DMA failed\n",
174                         max_key_buf_size, ctx_p->user.key);
175                 goto free_key;
176         }
177         dev_dbg(dev, "Mapped key %u B at va=%pK to dma=%pad\n",
178                 max_key_buf_size, ctx_p->user.key, &ctx_p->user.key_dma_addr);
179
180         return 0;
181
182 free_key:
183         kfree(ctx_p->user.key);
184 free_shash:
185         crypto_free_shash(ctx_p->shash_tfm);
186
187         return -ENOMEM;
188 }
189
190 static void cc_cipher_exit(struct crypto_tfm *tfm)
191 {
192         struct crypto_alg *alg = tfm->__crt_alg;
193         struct cc_crypto_alg *cc_alg =
194                         container_of(alg, struct cc_crypto_alg,
195                                      skcipher_alg.base);
196         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
197         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
198         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
199
200         dev_dbg(dev, "Clearing context @%p for %s\n",
201                 crypto_tfm_ctx(tfm), crypto_tfm_alg_name(tfm));
202
203         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
204                 /* Free hash tfm for essiv */
205                 crypto_free_shash(ctx_p->shash_tfm);
206                 ctx_p->shash_tfm = NULL;
207         }
208
209         /* Unmap key buffer */
210         dma_unmap_single(dev, ctx_p->user.key_dma_addr, max_key_buf_size,
211                          DMA_TO_DEVICE);
212         dev_dbg(dev, "Unmapped key buffer key_dma_addr=%pad\n",
213                 &ctx_p->user.key_dma_addr);
214
215         /* Free key buffer in context */
216         kzfree(ctx_p->user.key);
217         dev_dbg(dev, "Free key buffer in context. key=@%p\n", ctx_p->user.key);
218 }
219
220 struct tdes_keys {
221         u8      key1[DES_KEY_SIZE];
222         u8      key2[DES_KEY_SIZE];
223         u8      key3[DES_KEY_SIZE];
224 };
225
226 static enum cc_hw_crypto_key cc_slot_to_hw_key(int slot_num)
227 {
228         switch (slot_num) {
229         case 0:
230                 return KFDE0_KEY;
231         case 1:
232                 return KFDE1_KEY;
233         case 2:
234                 return KFDE2_KEY;
235         case 3:
236                 return KFDE3_KEY;
237         }
238         return END_OF_KEYS;
239 }
240
241 static int cc_cipher_sethkey(struct crypto_skcipher *sktfm, const u8 *key,
242                              unsigned int keylen)
243 {
244         struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
245         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
246         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
247         struct cc_hkey_info hki;
248
249         dev_dbg(dev, "Setting HW key in context @%p for %s. keylen=%u\n",
250                 ctx_p, crypto_tfm_alg_name(tfm), keylen);
251         dump_byte_array("key", (u8 *)key, keylen);
252
253         /* STAT_PHASE_0: Init and sanity checks */
254
255         /* This check the size of the hardware key token */
256         if (keylen != sizeof(hki)) {
257                 dev_err(dev, "Unsupported HW key size %d.\n", keylen);
258                 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
259                 return -EINVAL;
260         }
261
262         if (ctx_p->flow_mode != S_DIN_to_AES) {
263                 dev_err(dev, "HW key not supported for non-AES flows\n");
264                 return -EINVAL;
265         }
266
267         memcpy(&hki, key, keylen);
268
269         /* The real key len for crypto op is the size of the HW key
270          * referenced by the HW key slot, not the hardware key token
271          */
272         keylen = hki.keylen;
273
274         if (validate_keys_sizes(ctx_p, keylen)) {
275                 dev_err(dev, "Unsupported key size %d.\n", keylen);
276                 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
277                 return -EINVAL;
278         }
279
280         ctx_p->hw.key1_slot = cc_slot_to_hw_key(hki.hw_key1);
281         if (ctx_p->hw.key1_slot == END_OF_KEYS) {
282                 dev_err(dev, "Unsupported hw key1 number (%d)\n", hki.hw_key1);
283                 return -EINVAL;
284         }
285
286         if (ctx_p->cipher_mode == DRV_CIPHER_XTS ||
287             ctx_p->cipher_mode == DRV_CIPHER_ESSIV ||
288             ctx_p->cipher_mode == DRV_CIPHER_BITLOCKER) {
289                 if (hki.hw_key1 == hki.hw_key2) {
290                         dev_err(dev, "Illegal hw key numbers (%d,%d)\n",
291                                 hki.hw_key1, hki.hw_key2);
292                         return -EINVAL;
293                 }
294                 ctx_p->hw.key2_slot = cc_slot_to_hw_key(hki.hw_key2);
295                 if (ctx_p->hw.key2_slot == END_OF_KEYS) {
296                         dev_err(dev, "Unsupported hw key2 number (%d)\n",
297                                 hki.hw_key2);
298                         return -EINVAL;
299                 }
300         }
301
302         ctx_p->keylen = keylen;
303         ctx_p->hw_key = true;
304         dev_dbg(dev, "cc_is_hw_key ret 0");
305
306         return 0;
307 }
308
309 static int cc_cipher_setkey(struct crypto_skcipher *sktfm, const u8 *key,
310                             unsigned int keylen)
311 {
312         struct crypto_tfm *tfm = crypto_skcipher_tfm(sktfm);
313         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
314         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
315         struct cc_crypto_alg *cc_alg =
316                         container_of(tfm->__crt_alg, struct cc_crypto_alg,
317                                      skcipher_alg.base);
318         unsigned int max_key_buf_size = cc_alg->skcipher_alg.max_keysize;
319
320         dev_dbg(dev, "Setting key in context @%p for %s. keylen=%u\n",
321                 ctx_p, crypto_tfm_alg_name(tfm), keylen);
322         dump_byte_array("key", (u8 *)key, keylen);
323
324         /* STAT_PHASE_0: Init and sanity checks */
325
326         if (validate_keys_sizes(ctx_p, keylen)) {
327                 dev_err(dev, "Unsupported key size %d.\n", keylen);
328                 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
329                 return -EINVAL;
330         }
331
332         ctx_p->hw_key = false;
333
334         /*
335          * Verify DES weak keys
336          * Note that we're dropping the expanded key since the
337          * HW does the expansion on its own.
338          */
339         if (ctx_p->flow_mode == S_DIN_to_DES) {
340                 u32 tmp[DES3_EDE_EXPKEY_WORDS];
341                 if (keylen == DES3_EDE_KEY_SIZE &&
342                     __des3_ede_setkey(tmp, &tfm->crt_flags, key,
343                                       DES3_EDE_KEY_SIZE)) {
344                         dev_dbg(dev, "weak 3DES key");
345                         return -EINVAL;
346                 } else if (!des_ekey(tmp, key) &&
347                     (crypto_tfm_get_flags(tfm) & CRYPTO_TFM_REQ_WEAK_KEY)) {
348                         tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
349                         dev_dbg(dev, "weak DES key");
350                         return -EINVAL;
351                 }
352         }
353
354         if (ctx_p->cipher_mode == DRV_CIPHER_XTS &&
355             xts_check_key(tfm, key, keylen)) {
356                 dev_dbg(dev, "weak XTS key");
357                 return -EINVAL;
358         }
359
360         /* STAT_PHASE_1: Copy key to ctx */
361         dma_sync_single_for_cpu(dev, ctx_p->user.key_dma_addr,
362                                 max_key_buf_size, DMA_TO_DEVICE);
363
364         memcpy(ctx_p->user.key, key, keylen);
365         if (keylen == 24)
366                 memset(ctx_p->user.key + 24, 0, CC_AES_KEY_SIZE_MAX - 24);
367
368         if (ctx_p->cipher_mode == DRV_CIPHER_ESSIV) {
369                 /* sha256 for key2 - use sw implementation */
370                 int key_len = keylen >> 1;
371                 int err;
372
373                 SHASH_DESC_ON_STACK(desc, ctx_p->shash_tfm);
374
375                 desc->tfm = ctx_p->shash_tfm;
376
377                 err = crypto_shash_digest(desc, ctx_p->user.key, key_len,
378                                           ctx_p->user.key + key_len);
379                 if (err) {
380                         dev_err(dev, "Failed to hash ESSIV key.\n");
381                         return err;
382                 }
383         }
384         dma_sync_single_for_device(dev, ctx_p->user.key_dma_addr,
385                                    max_key_buf_size, DMA_TO_DEVICE);
386         ctx_p->keylen = keylen;
387
388         dev_dbg(dev, "return safely");
389         return 0;
390 }
391
392 static void cc_setup_cipher_desc(struct crypto_tfm *tfm,
393                                  struct cipher_req_ctx *req_ctx,
394                                  unsigned int ivsize, unsigned int nbytes,
395                                  struct cc_hw_desc desc[],
396                                  unsigned int *seq_size)
397 {
398         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
399         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
400         int cipher_mode = ctx_p->cipher_mode;
401         int flow_mode = ctx_p->flow_mode;
402         int direction = req_ctx->gen_ctx.op_type;
403         dma_addr_t key_dma_addr = ctx_p->user.key_dma_addr;
404         unsigned int key_len = ctx_p->keylen;
405         dma_addr_t iv_dma_addr = req_ctx->gen_ctx.iv_dma_addr;
406         unsigned int du_size = nbytes;
407
408         struct cc_crypto_alg *cc_alg =
409                 container_of(tfm->__crt_alg, struct cc_crypto_alg,
410                              skcipher_alg.base);
411
412         if (cc_alg->data_unit)
413                 du_size = cc_alg->data_unit;
414
415         switch (cipher_mode) {
416         case DRV_CIPHER_CBC:
417         case DRV_CIPHER_CBC_CTS:
418         case DRV_CIPHER_CTR:
419         case DRV_CIPHER_OFB:
420                 /* Load cipher state */
421                 hw_desc_init(&desc[*seq_size]);
422                 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr, ivsize,
423                              NS_BIT);
424                 set_cipher_config0(&desc[*seq_size], direction);
425                 set_flow_mode(&desc[*seq_size], flow_mode);
426                 set_cipher_mode(&desc[*seq_size], cipher_mode);
427                 if (cipher_mode == DRV_CIPHER_CTR ||
428                     cipher_mode == DRV_CIPHER_OFB) {
429                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
430                 } else {
431                         set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE0);
432                 }
433                 (*seq_size)++;
434                 /*FALLTHROUGH*/
435         case DRV_CIPHER_ECB:
436                 /* Load key */
437                 hw_desc_init(&desc[*seq_size]);
438                 set_cipher_mode(&desc[*seq_size], cipher_mode);
439                 set_cipher_config0(&desc[*seq_size], direction);
440                 if (flow_mode == S_DIN_to_AES) {
441                         if (cc_is_hw_key(tfm)) {
442                                 set_hw_crypto_key(&desc[*seq_size],
443                                                   ctx_p->hw.key1_slot);
444                         } else {
445                                 set_din_type(&desc[*seq_size], DMA_DLLI,
446                                              key_dma_addr, ((key_len == 24) ?
447                                                             AES_MAX_KEY_SIZE :
448                                                             key_len), NS_BIT);
449                         }
450                         set_key_size_aes(&desc[*seq_size], key_len);
451                 } else {
452                         /*des*/
453                         set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
454                                      key_len, NS_BIT);
455                         set_key_size_des(&desc[*seq_size], key_len);
456                 }
457                 set_flow_mode(&desc[*seq_size], flow_mode);
458                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
459                 (*seq_size)++;
460                 break;
461         case DRV_CIPHER_XTS:
462         case DRV_CIPHER_ESSIV:
463         case DRV_CIPHER_BITLOCKER:
464                 /* Load AES key */
465                 hw_desc_init(&desc[*seq_size]);
466                 set_cipher_mode(&desc[*seq_size], cipher_mode);
467                 set_cipher_config0(&desc[*seq_size], direction);
468                 if (cc_is_hw_key(tfm)) {
469                         set_hw_crypto_key(&desc[*seq_size],
470                                           ctx_p->hw.key1_slot);
471                 } else {
472                         set_din_type(&desc[*seq_size], DMA_DLLI, key_dma_addr,
473                                      (key_len / 2), NS_BIT);
474                 }
475                 set_key_size_aes(&desc[*seq_size], (key_len / 2));
476                 set_flow_mode(&desc[*seq_size], flow_mode);
477                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_KEY0);
478                 (*seq_size)++;
479
480                 /* load XEX key */
481                 hw_desc_init(&desc[*seq_size]);
482                 set_cipher_mode(&desc[*seq_size], cipher_mode);
483                 set_cipher_config0(&desc[*seq_size], direction);
484                 if (cc_is_hw_key(tfm)) {
485                         set_hw_crypto_key(&desc[*seq_size],
486                                           ctx_p->hw.key2_slot);
487                 } else {
488                         set_din_type(&desc[*seq_size], DMA_DLLI,
489                                      (key_dma_addr + (key_len / 2)),
490                                      (key_len / 2), NS_BIT);
491                 }
492                 set_xex_data_unit_size(&desc[*seq_size], du_size);
493                 set_flow_mode(&desc[*seq_size], S_DIN_to_AES2);
494                 set_key_size_aes(&desc[*seq_size], (key_len / 2));
495                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_XEX_KEY);
496                 (*seq_size)++;
497
498                 /* Set state */
499                 hw_desc_init(&desc[*seq_size]);
500                 set_setup_mode(&desc[*seq_size], SETUP_LOAD_STATE1);
501                 set_cipher_mode(&desc[*seq_size], cipher_mode);
502                 set_cipher_config0(&desc[*seq_size], direction);
503                 set_key_size_aes(&desc[*seq_size], (key_len / 2));
504                 set_flow_mode(&desc[*seq_size], flow_mode);
505                 set_din_type(&desc[*seq_size], DMA_DLLI, iv_dma_addr,
506                              CC_AES_BLOCK_SIZE, NS_BIT);
507                 (*seq_size)++;
508                 break;
509         default:
510                 dev_err(dev, "Unsupported cipher mode (%d)\n", cipher_mode);
511         }
512 }
513
514 static void cc_setup_cipher_data(struct crypto_tfm *tfm,
515                                  struct cipher_req_ctx *req_ctx,
516                                  struct scatterlist *dst,
517                                  struct scatterlist *src, unsigned int nbytes,
518                                  void *areq, struct cc_hw_desc desc[],
519                                  unsigned int *seq_size)
520 {
521         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
522         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
523         unsigned int flow_mode = ctx_p->flow_mode;
524
525         switch (ctx_p->flow_mode) {
526         case S_DIN_to_AES:
527                 flow_mode = DIN_AES_DOUT;
528                 break;
529         case S_DIN_to_DES:
530                 flow_mode = DIN_DES_DOUT;
531                 break;
532         default:
533                 dev_err(dev, "invalid flow mode, flow_mode = %d\n", flow_mode);
534                 return;
535         }
536         /* Process */
537         if (req_ctx->dma_buf_type == CC_DMA_BUF_DLLI) {
538                 dev_dbg(dev, " data params addr %pad length 0x%X\n",
539                         &sg_dma_address(src), nbytes);
540                 dev_dbg(dev, " data params addr %pad length 0x%X\n",
541                         &sg_dma_address(dst), nbytes);
542                 hw_desc_init(&desc[*seq_size]);
543                 set_din_type(&desc[*seq_size], DMA_DLLI, sg_dma_address(src),
544                              nbytes, NS_BIT);
545                 set_dout_dlli(&desc[*seq_size], sg_dma_address(dst),
546                               nbytes, NS_BIT, (!areq ? 0 : 1));
547                 if (areq)
548                         set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
549
550                 set_flow_mode(&desc[*seq_size], flow_mode);
551                 (*seq_size)++;
552         } else {
553                 /* bypass */
554                 dev_dbg(dev, " bypass params addr %pad length 0x%X addr 0x%08X\n",
555                         &req_ctx->mlli_params.mlli_dma_addr,
556                         req_ctx->mlli_params.mlli_len,
557                         (unsigned int)ctx_p->drvdata->mlli_sram_addr);
558                 hw_desc_init(&desc[*seq_size]);
559                 set_din_type(&desc[*seq_size], DMA_DLLI,
560                              req_ctx->mlli_params.mlli_dma_addr,
561                              req_ctx->mlli_params.mlli_len, NS_BIT);
562                 set_dout_sram(&desc[*seq_size],
563                               ctx_p->drvdata->mlli_sram_addr,
564                               req_ctx->mlli_params.mlli_len);
565                 set_flow_mode(&desc[*seq_size], BYPASS);
566                 (*seq_size)++;
567
568                 hw_desc_init(&desc[*seq_size]);
569                 set_din_type(&desc[*seq_size], DMA_MLLI,
570                              ctx_p->drvdata->mlli_sram_addr,
571                              req_ctx->in_mlli_nents, NS_BIT);
572                 if (req_ctx->out_nents == 0) {
573                         dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
574                                 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
575                                 (unsigned int)ctx_p->drvdata->mlli_sram_addr);
576                         set_dout_mlli(&desc[*seq_size],
577                                       ctx_p->drvdata->mlli_sram_addr,
578                                       req_ctx->in_mlli_nents, NS_BIT,
579                                       (!areq ? 0 : 1));
580                 } else {
581                         dev_dbg(dev, " din/dout params addr 0x%08X addr 0x%08X\n",
582                                 (unsigned int)ctx_p->drvdata->mlli_sram_addr,
583                                 (unsigned int)ctx_p->drvdata->mlli_sram_addr +
584                                 (u32)LLI_ENTRY_BYTE_SIZE * req_ctx->in_nents);
585                         set_dout_mlli(&desc[*seq_size],
586                                       (ctx_p->drvdata->mlli_sram_addr +
587                                        (LLI_ENTRY_BYTE_SIZE *
588                                         req_ctx->in_mlli_nents)),
589                                       req_ctx->out_mlli_nents, NS_BIT,
590                                       (!areq ? 0 : 1));
591                 }
592                 if (areq)
593                         set_queue_last_ind(ctx_p->drvdata, &desc[*seq_size]);
594
595                 set_flow_mode(&desc[*seq_size], flow_mode);
596                 (*seq_size)++;
597         }
598 }
599
600 /*
601  * Update a CTR-AES 128 bit counter
602  */
603 static void cc_update_ctr(u8 *ctr, unsigned int increment)
604 {
605         if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
606             IS_ALIGNED((unsigned long)ctr, 8)) {
607
608                 __be64 *high_be = (__be64 *)ctr;
609                 __be64 *low_be = high_be + 1;
610                 u64 orig_low = __be64_to_cpu(*low_be);
611                 u64 new_low = orig_low + (u64)increment;
612
613                 *low_be = __cpu_to_be64(new_low);
614
615                 if (new_low < orig_low)
616                         *high_be = __cpu_to_be64(__be64_to_cpu(*high_be) + 1);
617         } else {
618                 u8 *pos = (ctr + AES_BLOCK_SIZE);
619                 u8 val;
620                 unsigned int size;
621
622                 for (; increment; increment--)
623                         for (size = AES_BLOCK_SIZE; size; size--) {
624                                 val = *--pos + 1;
625                                 *pos = val;
626                                 if (val)
627                                         break;
628                         }
629         }
630 }
631
632 static void cc_cipher_complete(struct device *dev, void *cc_req, int err)
633 {
634         struct skcipher_request *req = (struct skcipher_request *)cc_req;
635         struct scatterlist *dst = req->dst;
636         struct scatterlist *src = req->src;
637         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
638         struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
639         struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
640         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
641         unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
642         unsigned int len;
643
644         cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
645
646         switch (ctx_p->cipher_mode) {
647         case DRV_CIPHER_CBC:
648                 /*
649                  * The crypto API expects us to set the req->iv to the last
650                  * ciphertext block. For encrypt, simply copy from the result.
651                  * For decrypt, we must copy from a saved buffer since this
652                  * could be an in-place decryption operation and the src is
653                  * lost by this point.
654                  */
655                 if (req_ctx->gen_ctx.op_type == DRV_CRYPTO_DIRECTION_DECRYPT)  {
656                         memcpy(req->iv, req_ctx->backup_info, ivsize);
657                         kzfree(req_ctx->backup_info);
658                 } else if (!err) {
659                         len = req->cryptlen - ivsize;
660                         scatterwalk_map_and_copy(req->iv, req->dst, len,
661                                                  ivsize, 0);
662                 }
663                 break;
664
665         case DRV_CIPHER_CTR:
666                 /* Compute the counter of the last block */
667                 len = ALIGN(req->cryptlen, AES_BLOCK_SIZE) / AES_BLOCK_SIZE;
668                 cc_update_ctr((u8 *)req->iv, len);
669                 break;
670
671         default:
672                 break;
673         }
674
675         kzfree(req_ctx->iv);
676
677         skcipher_request_complete(req, err);
678 }
679
680 static int cc_cipher_process(struct skcipher_request *req,
681                              enum drv_crypto_direction direction)
682 {
683         struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
684         struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
685         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
686         unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
687         struct scatterlist *dst = req->dst;
688         struct scatterlist *src = req->src;
689         unsigned int nbytes = req->cryptlen;
690         void *iv = req->iv;
691         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
692         struct device *dev = drvdata_to_dev(ctx_p->drvdata);
693         struct cc_hw_desc desc[MAX_ABLKCIPHER_SEQ_LEN];
694         struct cc_crypto_req cc_req = {};
695         int rc;
696         unsigned int seq_len = 0;
697         gfp_t flags = cc_gfp_flags(&req->base);
698
699         dev_dbg(dev, "%s req=%p iv=%p nbytes=%d\n",
700                 ((direction == DRV_CRYPTO_DIRECTION_ENCRYPT) ?
701                 "Encrypt" : "Decrypt"), req, iv, nbytes);
702
703         /* STAT_PHASE_0: Init and sanity checks */
704
705         /* TODO: check data length according to mode */
706         if (validate_data_size(ctx_p, nbytes)) {
707                 dev_err(dev, "Unsupported data size %d.\n", nbytes);
708                 crypto_tfm_set_flags(tfm, CRYPTO_TFM_RES_BAD_BLOCK_LEN);
709                 rc = -EINVAL;
710                 goto exit_process;
711         }
712         if (nbytes == 0) {
713                 /* No data to process is valid */
714                 rc = 0;
715                 goto exit_process;
716         }
717
718         /* The IV we are handed may be allocted from the stack so
719          * we must copy it to a DMAable buffer before use.
720          */
721         req_ctx->iv = kmemdup(iv, ivsize, flags);
722         if (!req_ctx->iv) {
723                 rc = -ENOMEM;
724                 goto exit_process;
725         }
726
727         /* Setup request structure */
728         cc_req.user_cb = (void *)cc_cipher_complete;
729         cc_req.user_arg = (void *)req;
730
731         /* Setup request context */
732         req_ctx->gen_ctx.op_type = direction;
733
734         /* STAT_PHASE_1: Map buffers */
735
736         rc = cc_map_cipher_request(ctx_p->drvdata, req_ctx, ivsize, nbytes,
737                                       req_ctx->iv, src, dst, flags);
738         if (rc) {
739                 dev_err(dev, "map_request() failed\n");
740                 goto exit_process;
741         }
742
743         /* STAT_PHASE_2: Create sequence */
744
745         /* Setup processing */
746         cc_setup_cipher_desc(tfm, req_ctx, ivsize, nbytes, desc, &seq_len);
747         /* Data processing */
748         cc_setup_cipher_data(tfm, req_ctx, dst, src, nbytes, req, desc,
749                              &seq_len);
750
751         /* STAT_PHASE_3: Lock HW and push sequence */
752
753         rc = cc_send_request(ctx_p->drvdata, &cc_req, desc, seq_len,
754                              &req->base);
755         if (rc != -EINPROGRESS && rc != -EBUSY) {
756                 /* Failed to send the request or request completed
757                  * synchronously
758                  */
759                 cc_unmap_cipher_request(dev, req_ctx, ivsize, src, dst);
760         }
761
762 exit_process:
763         if (rc != -EINPROGRESS && rc != -EBUSY) {
764                 kzfree(req_ctx->backup_info);
765                 kzfree(req_ctx->iv);
766         }
767
768         return rc;
769 }
770
771 static int cc_cipher_encrypt(struct skcipher_request *req)
772 {
773         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
774
775         memset(req_ctx, 0, sizeof(*req_ctx));
776
777         return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_ENCRYPT);
778 }
779
780 static int cc_cipher_decrypt(struct skcipher_request *req)
781 {
782         struct crypto_skcipher *sk_tfm = crypto_skcipher_reqtfm(req);
783         struct crypto_tfm *tfm = crypto_skcipher_tfm(sk_tfm);
784         struct cc_cipher_ctx *ctx_p = crypto_tfm_ctx(tfm);
785         struct cipher_req_ctx *req_ctx = skcipher_request_ctx(req);
786         unsigned int ivsize = crypto_skcipher_ivsize(sk_tfm);
787         gfp_t flags = cc_gfp_flags(&req->base);
788         unsigned int len;
789
790         memset(req_ctx, 0, sizeof(*req_ctx));
791
792         if ((ctx_p->cipher_mode == DRV_CIPHER_CBC) &&
793             (req->cryptlen >= ivsize)) {
794
795                 /* Allocate and save the last IV sized bytes of the source,
796                  * which will be lost in case of in-place decryption.
797                  */
798                 req_ctx->backup_info = kzalloc(ivsize, flags);
799                 if (!req_ctx->backup_info)
800                         return -ENOMEM;
801
802                 len = req->cryptlen - ivsize;
803                 scatterwalk_map_and_copy(req_ctx->backup_info, req->src, len,
804                                          ivsize, 0);
805         }
806
807         return cc_cipher_process(req, DRV_CRYPTO_DIRECTION_DECRYPT);
808 }
809
810 /* Block cipher alg */
811 static const struct cc_alg_template skcipher_algs[] = {
812         {
813                 .name = "xts(paes)",
814                 .driver_name = "xts-paes-ccree",
815                 .blocksize = AES_BLOCK_SIZE,
816                 .template_skcipher = {
817                         .setkey = cc_cipher_sethkey,
818                         .encrypt = cc_cipher_encrypt,
819                         .decrypt = cc_cipher_decrypt,
820                         .min_keysize = CC_HW_KEY_SIZE,
821                         .max_keysize = CC_HW_KEY_SIZE,
822                         .ivsize = AES_BLOCK_SIZE,
823                         },
824                 .cipher_mode = DRV_CIPHER_XTS,
825                 .flow_mode = S_DIN_to_AES,
826                 .min_hw_rev = CC_HW_REV_630,
827         },
828         {
829                 .name = "xts512(paes)",
830                 .driver_name = "xts-paes-du512-ccree",
831                 .blocksize = AES_BLOCK_SIZE,
832                 .template_skcipher = {
833                         .setkey = cc_cipher_sethkey,
834                         .encrypt = cc_cipher_encrypt,
835                         .decrypt = cc_cipher_decrypt,
836                         .min_keysize = CC_HW_KEY_SIZE,
837                         .max_keysize = CC_HW_KEY_SIZE,
838                         .ivsize = AES_BLOCK_SIZE,
839                         },
840                 .cipher_mode = DRV_CIPHER_XTS,
841                 .flow_mode = S_DIN_to_AES,
842                 .data_unit = 512,
843                 .min_hw_rev = CC_HW_REV_712,
844         },
845         {
846                 .name = "xts4096(paes)",
847                 .driver_name = "xts-paes-du4096-ccree",
848                 .blocksize = AES_BLOCK_SIZE,
849                 .template_skcipher = {
850                         .setkey = cc_cipher_sethkey,
851                         .encrypt = cc_cipher_encrypt,
852                         .decrypt = cc_cipher_decrypt,
853                         .min_keysize = CC_HW_KEY_SIZE,
854                         .max_keysize = CC_HW_KEY_SIZE,
855                         .ivsize = AES_BLOCK_SIZE,
856                         },
857                 .cipher_mode = DRV_CIPHER_XTS,
858                 .flow_mode = S_DIN_to_AES,
859                 .data_unit = 4096,
860                 .min_hw_rev = CC_HW_REV_712,
861         },
862         {
863                 .name = "essiv(paes)",
864                 .driver_name = "essiv-paes-ccree",
865                 .blocksize = AES_BLOCK_SIZE,
866                 .template_skcipher = {
867                         .setkey = cc_cipher_sethkey,
868                         .encrypt = cc_cipher_encrypt,
869                         .decrypt = cc_cipher_decrypt,
870                         .min_keysize = CC_HW_KEY_SIZE,
871                         .max_keysize = CC_HW_KEY_SIZE,
872                         .ivsize = AES_BLOCK_SIZE,
873                         },
874                 .cipher_mode = DRV_CIPHER_ESSIV,
875                 .flow_mode = S_DIN_to_AES,
876                 .min_hw_rev = CC_HW_REV_712,
877         },
878         {
879                 .name = "essiv512(paes)",
880                 .driver_name = "essiv-paes-du512-ccree",
881                 .blocksize = AES_BLOCK_SIZE,
882                 .template_skcipher = {
883                         .setkey = cc_cipher_sethkey,
884                         .encrypt = cc_cipher_encrypt,
885                         .decrypt = cc_cipher_decrypt,
886                         .min_keysize = CC_HW_KEY_SIZE,
887                         .max_keysize = CC_HW_KEY_SIZE,
888                         .ivsize = AES_BLOCK_SIZE,
889                         },
890                 .cipher_mode = DRV_CIPHER_ESSIV,
891                 .flow_mode = S_DIN_to_AES,
892                 .data_unit = 512,
893                 .min_hw_rev = CC_HW_REV_712,
894         },
895         {
896                 .name = "essiv4096(paes)",
897                 .driver_name = "essiv-paes-du4096-ccree",
898                 .blocksize = AES_BLOCK_SIZE,
899                 .template_skcipher = {
900                         .setkey = cc_cipher_sethkey,
901                         .encrypt = cc_cipher_encrypt,
902                         .decrypt = cc_cipher_decrypt,
903                         .min_keysize = CC_HW_KEY_SIZE,
904                         .max_keysize = CC_HW_KEY_SIZE,
905                         .ivsize = AES_BLOCK_SIZE,
906                         },
907                 .cipher_mode = DRV_CIPHER_ESSIV,
908                 .flow_mode = S_DIN_to_AES,
909                 .data_unit = 4096,
910                 .min_hw_rev = CC_HW_REV_712,
911         },
912         {
913                 .name = "bitlocker(paes)",
914                 .driver_name = "bitlocker-paes-ccree",
915                 .blocksize = AES_BLOCK_SIZE,
916                 .template_skcipher = {
917                         .setkey = cc_cipher_sethkey,
918                         .encrypt = cc_cipher_encrypt,
919                         .decrypt = cc_cipher_decrypt,
920                         .min_keysize = CC_HW_KEY_SIZE,
921                         .max_keysize = CC_HW_KEY_SIZE,
922                         .ivsize = AES_BLOCK_SIZE,
923                         },
924                 .cipher_mode = DRV_CIPHER_BITLOCKER,
925                 .flow_mode = S_DIN_to_AES,
926                 .min_hw_rev = CC_HW_REV_712,
927         },
928         {
929                 .name = "bitlocker512(paes)",
930                 .driver_name = "bitlocker-paes-du512-ccree",
931                 .blocksize = AES_BLOCK_SIZE,
932                 .template_skcipher = {
933                         .setkey = cc_cipher_sethkey,
934                         .encrypt = cc_cipher_encrypt,
935                         .decrypt = cc_cipher_decrypt,
936                         .min_keysize = CC_HW_KEY_SIZE,
937                         .max_keysize = CC_HW_KEY_SIZE,
938                         .ivsize = AES_BLOCK_SIZE,
939                         },
940                 .cipher_mode = DRV_CIPHER_BITLOCKER,
941                 .flow_mode = S_DIN_to_AES,
942                 .data_unit = 512,
943                 .min_hw_rev = CC_HW_REV_712,
944         },
945         {
946                 .name = "bitlocker4096(paes)",
947                 .driver_name = "bitlocker-paes-du4096-ccree",
948                 .blocksize = AES_BLOCK_SIZE,
949                 .template_skcipher = {
950                         .setkey = cc_cipher_sethkey,
951                         .encrypt = cc_cipher_encrypt,
952                         .decrypt = cc_cipher_decrypt,
953                         .min_keysize = CC_HW_KEY_SIZE,
954                         .max_keysize =  CC_HW_KEY_SIZE,
955                         .ivsize = AES_BLOCK_SIZE,
956                         },
957                 .cipher_mode = DRV_CIPHER_BITLOCKER,
958                 .flow_mode = S_DIN_to_AES,
959                 .data_unit = 4096,
960                 .min_hw_rev = CC_HW_REV_712,
961         },
962         {
963                 .name = "ecb(paes)",
964                 .driver_name = "ecb-paes-ccree",
965                 .blocksize = AES_BLOCK_SIZE,
966                 .template_skcipher = {
967                         .setkey = cc_cipher_sethkey,
968                         .encrypt = cc_cipher_encrypt,
969                         .decrypt = cc_cipher_decrypt,
970                         .min_keysize = CC_HW_KEY_SIZE,
971                         .max_keysize = CC_HW_KEY_SIZE,
972                         .ivsize = 0,
973                         },
974                 .cipher_mode = DRV_CIPHER_ECB,
975                 .flow_mode = S_DIN_to_AES,
976                 .min_hw_rev = CC_HW_REV_712,
977         },
978         {
979                 .name = "cbc(paes)",
980                 .driver_name = "cbc-paes-ccree",
981                 .blocksize = AES_BLOCK_SIZE,
982                 .template_skcipher = {
983                         .setkey = cc_cipher_sethkey,
984                         .encrypt = cc_cipher_encrypt,
985                         .decrypt = cc_cipher_decrypt,
986                         .min_keysize = CC_HW_KEY_SIZE,
987                         .max_keysize = CC_HW_KEY_SIZE,
988                         .ivsize = AES_BLOCK_SIZE,
989                 },
990                 .cipher_mode = DRV_CIPHER_CBC,
991                 .flow_mode = S_DIN_to_AES,
992                 .min_hw_rev = CC_HW_REV_712,
993         },
994         {
995                 .name = "ofb(paes)",
996                 .driver_name = "ofb-paes-ccree",
997                 .blocksize = AES_BLOCK_SIZE,
998                 .template_skcipher = {
999                         .setkey = cc_cipher_sethkey,
1000                         .encrypt = cc_cipher_encrypt,
1001                         .decrypt = cc_cipher_decrypt,
1002                         .min_keysize = CC_HW_KEY_SIZE,
1003                         .max_keysize = CC_HW_KEY_SIZE,
1004                         .ivsize = AES_BLOCK_SIZE,
1005                         },
1006                 .cipher_mode = DRV_CIPHER_OFB,
1007                 .flow_mode = S_DIN_to_AES,
1008                 .min_hw_rev = CC_HW_REV_712,
1009         },
1010         {
1011                 .name = "cts(cbc(paes))",
1012                 .driver_name = "cts-cbc-paes-ccree",
1013                 .blocksize = AES_BLOCK_SIZE,
1014                 .template_skcipher = {
1015                         .setkey = cc_cipher_sethkey,
1016                         .encrypt = cc_cipher_encrypt,
1017                         .decrypt = cc_cipher_decrypt,
1018                         .min_keysize = CC_HW_KEY_SIZE,
1019                         .max_keysize = CC_HW_KEY_SIZE,
1020                         .ivsize = AES_BLOCK_SIZE,
1021                         },
1022                 .cipher_mode = DRV_CIPHER_CBC_CTS,
1023                 .flow_mode = S_DIN_to_AES,
1024                 .min_hw_rev = CC_HW_REV_712,
1025         },
1026         {
1027                 .name = "ctr(paes)",
1028                 .driver_name = "ctr-paes-ccree",
1029                 .blocksize = 1,
1030                 .template_skcipher = {
1031                         .setkey = cc_cipher_sethkey,
1032                         .encrypt = cc_cipher_encrypt,
1033                         .decrypt = cc_cipher_decrypt,
1034                         .min_keysize = CC_HW_KEY_SIZE,
1035                         .max_keysize = CC_HW_KEY_SIZE,
1036                         .ivsize = AES_BLOCK_SIZE,
1037                         },
1038                 .cipher_mode = DRV_CIPHER_CTR,
1039                 .flow_mode = S_DIN_to_AES,
1040                 .min_hw_rev = CC_HW_REV_712,
1041         },
1042         {
1043                 .name = "xts(aes)",
1044                 .driver_name = "xts-aes-ccree",
1045                 .blocksize = AES_BLOCK_SIZE,
1046                 .template_skcipher = {
1047                         .setkey = cc_cipher_setkey,
1048                         .encrypt = cc_cipher_encrypt,
1049                         .decrypt = cc_cipher_decrypt,
1050                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1051                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1052                         .ivsize = AES_BLOCK_SIZE,
1053                         },
1054                 .cipher_mode = DRV_CIPHER_XTS,
1055                 .flow_mode = S_DIN_to_AES,
1056                 .min_hw_rev = CC_HW_REV_630,
1057         },
1058         {
1059                 .name = "xts512(aes)",
1060                 .driver_name = "xts-aes-du512-ccree",
1061                 .blocksize = AES_BLOCK_SIZE,
1062                 .template_skcipher = {
1063                         .setkey = cc_cipher_setkey,
1064                         .encrypt = cc_cipher_encrypt,
1065                         .decrypt = cc_cipher_decrypt,
1066                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1067                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1068                         .ivsize = AES_BLOCK_SIZE,
1069                         },
1070                 .cipher_mode = DRV_CIPHER_XTS,
1071                 .flow_mode = S_DIN_to_AES,
1072                 .data_unit = 512,
1073                 .min_hw_rev = CC_HW_REV_712,
1074         },
1075         {
1076                 .name = "xts4096(aes)",
1077                 .driver_name = "xts-aes-du4096-ccree",
1078                 .blocksize = AES_BLOCK_SIZE,
1079                 .template_skcipher = {
1080                         .setkey = cc_cipher_setkey,
1081                         .encrypt = cc_cipher_encrypt,
1082                         .decrypt = cc_cipher_decrypt,
1083                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1084                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1085                         .ivsize = AES_BLOCK_SIZE,
1086                         },
1087                 .cipher_mode = DRV_CIPHER_XTS,
1088                 .flow_mode = S_DIN_to_AES,
1089                 .data_unit = 4096,
1090                 .min_hw_rev = CC_HW_REV_712,
1091         },
1092         {
1093                 .name = "essiv(aes)",
1094                 .driver_name = "essiv-aes-ccree",
1095                 .blocksize = AES_BLOCK_SIZE,
1096                 .template_skcipher = {
1097                         .setkey = cc_cipher_setkey,
1098                         .encrypt = cc_cipher_encrypt,
1099                         .decrypt = cc_cipher_decrypt,
1100                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1101                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1102                         .ivsize = AES_BLOCK_SIZE,
1103                         },
1104                 .cipher_mode = DRV_CIPHER_ESSIV,
1105                 .flow_mode = S_DIN_to_AES,
1106                 .min_hw_rev = CC_HW_REV_712,
1107         },
1108         {
1109                 .name = "essiv512(aes)",
1110                 .driver_name = "essiv-aes-du512-ccree",
1111                 .blocksize = AES_BLOCK_SIZE,
1112                 .template_skcipher = {
1113                         .setkey = cc_cipher_setkey,
1114                         .encrypt = cc_cipher_encrypt,
1115                         .decrypt = cc_cipher_decrypt,
1116                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1117                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1118                         .ivsize = AES_BLOCK_SIZE,
1119                         },
1120                 .cipher_mode = DRV_CIPHER_ESSIV,
1121                 .flow_mode = S_DIN_to_AES,
1122                 .data_unit = 512,
1123                 .min_hw_rev = CC_HW_REV_712,
1124         },
1125         {
1126                 .name = "essiv4096(aes)",
1127                 .driver_name = "essiv-aes-du4096-ccree",
1128                 .blocksize = AES_BLOCK_SIZE,
1129                 .template_skcipher = {
1130                         .setkey = cc_cipher_setkey,
1131                         .encrypt = cc_cipher_encrypt,
1132                         .decrypt = cc_cipher_decrypt,
1133                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1134                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1135                         .ivsize = AES_BLOCK_SIZE,
1136                         },
1137                 .cipher_mode = DRV_CIPHER_ESSIV,
1138                 .flow_mode = S_DIN_to_AES,
1139                 .data_unit = 4096,
1140                 .min_hw_rev = CC_HW_REV_712,
1141         },
1142         {
1143                 .name = "bitlocker(aes)",
1144                 .driver_name = "bitlocker-aes-ccree",
1145                 .blocksize = AES_BLOCK_SIZE,
1146                 .template_skcipher = {
1147                         .setkey = cc_cipher_setkey,
1148                         .encrypt = cc_cipher_encrypt,
1149                         .decrypt = cc_cipher_decrypt,
1150                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1151                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1152                         .ivsize = AES_BLOCK_SIZE,
1153                         },
1154                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1155                 .flow_mode = S_DIN_to_AES,
1156                 .min_hw_rev = CC_HW_REV_712,
1157         },
1158         {
1159                 .name = "bitlocker512(aes)",
1160                 .driver_name = "bitlocker-aes-du512-ccree",
1161                 .blocksize = AES_BLOCK_SIZE,
1162                 .template_skcipher = {
1163                         .setkey = cc_cipher_setkey,
1164                         .encrypt = cc_cipher_encrypt,
1165                         .decrypt = cc_cipher_decrypt,
1166                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1167                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1168                         .ivsize = AES_BLOCK_SIZE,
1169                         },
1170                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1171                 .flow_mode = S_DIN_to_AES,
1172                 .data_unit = 512,
1173                 .min_hw_rev = CC_HW_REV_712,
1174         },
1175         {
1176                 .name = "bitlocker4096(aes)",
1177                 .driver_name = "bitlocker-aes-du4096-ccree",
1178                 .blocksize = AES_BLOCK_SIZE,
1179                 .template_skcipher = {
1180                         .setkey = cc_cipher_setkey,
1181                         .encrypt = cc_cipher_encrypt,
1182                         .decrypt = cc_cipher_decrypt,
1183                         .min_keysize = AES_MIN_KEY_SIZE * 2,
1184                         .max_keysize = AES_MAX_KEY_SIZE * 2,
1185                         .ivsize = AES_BLOCK_SIZE,
1186                         },
1187                 .cipher_mode = DRV_CIPHER_BITLOCKER,
1188                 .flow_mode = S_DIN_to_AES,
1189                 .data_unit = 4096,
1190                 .min_hw_rev = CC_HW_REV_712,
1191         },
1192         {
1193                 .name = "ecb(aes)",
1194                 .driver_name = "ecb-aes-ccree",
1195                 .blocksize = AES_BLOCK_SIZE,
1196                 .template_skcipher = {
1197                         .setkey = cc_cipher_setkey,
1198                         .encrypt = cc_cipher_encrypt,
1199                         .decrypt = cc_cipher_decrypt,
1200                         .min_keysize = AES_MIN_KEY_SIZE,
1201                         .max_keysize = AES_MAX_KEY_SIZE,
1202                         .ivsize = 0,
1203                         },
1204                 .cipher_mode = DRV_CIPHER_ECB,
1205                 .flow_mode = S_DIN_to_AES,
1206                 .min_hw_rev = CC_HW_REV_630,
1207         },
1208         {
1209                 .name = "cbc(aes)",
1210                 .driver_name = "cbc-aes-ccree",
1211                 .blocksize = AES_BLOCK_SIZE,
1212                 .template_skcipher = {
1213                         .setkey = cc_cipher_setkey,
1214                         .encrypt = cc_cipher_encrypt,
1215                         .decrypt = cc_cipher_decrypt,
1216                         .min_keysize = AES_MIN_KEY_SIZE,
1217                         .max_keysize = AES_MAX_KEY_SIZE,
1218                         .ivsize = AES_BLOCK_SIZE,
1219                 },
1220                 .cipher_mode = DRV_CIPHER_CBC,
1221                 .flow_mode = S_DIN_to_AES,
1222                 .min_hw_rev = CC_HW_REV_630,
1223         },
1224         {
1225                 .name = "ofb(aes)",
1226                 .driver_name = "ofb-aes-ccree",
1227                 .blocksize = AES_BLOCK_SIZE,
1228                 .template_skcipher = {
1229                         .setkey = cc_cipher_setkey,
1230                         .encrypt = cc_cipher_encrypt,
1231                         .decrypt = cc_cipher_decrypt,
1232                         .min_keysize = AES_MIN_KEY_SIZE,
1233                         .max_keysize = AES_MAX_KEY_SIZE,
1234                         .ivsize = AES_BLOCK_SIZE,
1235                         },
1236                 .cipher_mode = DRV_CIPHER_OFB,
1237                 .flow_mode = S_DIN_to_AES,
1238                 .min_hw_rev = CC_HW_REV_630,
1239         },
1240         {
1241                 .name = "cts(cbc(aes))",
1242                 .driver_name = "cts-cbc-aes-ccree",
1243                 .blocksize = AES_BLOCK_SIZE,
1244                 .template_skcipher = {
1245                         .setkey = cc_cipher_setkey,
1246                         .encrypt = cc_cipher_encrypt,
1247                         .decrypt = cc_cipher_decrypt,
1248                         .min_keysize = AES_MIN_KEY_SIZE,
1249                         .max_keysize = AES_MAX_KEY_SIZE,
1250                         .ivsize = AES_BLOCK_SIZE,
1251                         },
1252                 .cipher_mode = DRV_CIPHER_CBC_CTS,
1253                 .flow_mode = S_DIN_to_AES,
1254                 .min_hw_rev = CC_HW_REV_630,
1255         },
1256         {
1257                 .name = "ctr(aes)",
1258                 .driver_name = "ctr-aes-ccree",
1259                 .blocksize = 1,
1260                 .template_skcipher = {
1261                         .setkey = cc_cipher_setkey,
1262                         .encrypt = cc_cipher_encrypt,
1263                         .decrypt = cc_cipher_decrypt,
1264                         .min_keysize = AES_MIN_KEY_SIZE,
1265                         .max_keysize = AES_MAX_KEY_SIZE,
1266                         .ivsize = AES_BLOCK_SIZE,
1267                         },
1268                 .cipher_mode = DRV_CIPHER_CTR,
1269                 .flow_mode = S_DIN_to_AES,
1270                 .min_hw_rev = CC_HW_REV_630,
1271         },
1272         {
1273                 .name = "cbc(des3_ede)",
1274                 .driver_name = "cbc-3des-ccree",
1275                 .blocksize = DES3_EDE_BLOCK_SIZE,
1276                 .template_skcipher = {
1277                         .setkey = cc_cipher_setkey,
1278                         .encrypt = cc_cipher_encrypt,
1279                         .decrypt = cc_cipher_decrypt,
1280                         .min_keysize = DES3_EDE_KEY_SIZE,
1281                         .max_keysize = DES3_EDE_KEY_SIZE,
1282                         .ivsize = DES3_EDE_BLOCK_SIZE,
1283                         },
1284                 .cipher_mode = DRV_CIPHER_CBC,
1285                 .flow_mode = S_DIN_to_DES,
1286                 .min_hw_rev = CC_HW_REV_630,
1287         },
1288         {
1289                 .name = "ecb(des3_ede)",
1290                 .driver_name = "ecb-3des-ccree",
1291                 .blocksize = DES3_EDE_BLOCK_SIZE,
1292                 .template_skcipher = {
1293                         .setkey = cc_cipher_setkey,
1294                         .encrypt = cc_cipher_encrypt,
1295                         .decrypt = cc_cipher_decrypt,
1296                         .min_keysize = DES3_EDE_KEY_SIZE,
1297                         .max_keysize = DES3_EDE_KEY_SIZE,
1298                         .ivsize = 0,
1299                         },
1300                 .cipher_mode = DRV_CIPHER_ECB,
1301                 .flow_mode = S_DIN_to_DES,
1302                 .min_hw_rev = CC_HW_REV_630,
1303         },
1304         {
1305                 .name = "cbc(des)",
1306                 .driver_name = "cbc-des-ccree",
1307                 .blocksize = DES_BLOCK_SIZE,
1308                 .template_skcipher = {
1309                         .setkey = cc_cipher_setkey,
1310                         .encrypt = cc_cipher_encrypt,
1311                         .decrypt = cc_cipher_decrypt,
1312                         .min_keysize = DES_KEY_SIZE,
1313                         .max_keysize = DES_KEY_SIZE,
1314                         .ivsize = DES_BLOCK_SIZE,
1315                         },
1316                 .cipher_mode = DRV_CIPHER_CBC,
1317                 .flow_mode = S_DIN_to_DES,
1318                 .min_hw_rev = CC_HW_REV_630,
1319         },
1320         {
1321                 .name = "ecb(des)",
1322                 .driver_name = "ecb-des-ccree",
1323                 .blocksize = DES_BLOCK_SIZE,
1324                 .template_skcipher = {
1325                         .setkey = cc_cipher_setkey,
1326                         .encrypt = cc_cipher_encrypt,
1327                         .decrypt = cc_cipher_decrypt,
1328                         .min_keysize = DES_KEY_SIZE,
1329                         .max_keysize = DES_KEY_SIZE,
1330                         .ivsize = 0,
1331                         },
1332                 .cipher_mode = DRV_CIPHER_ECB,
1333                 .flow_mode = S_DIN_to_DES,
1334                 .min_hw_rev = CC_HW_REV_630,
1335         },
1336 };
1337
1338 static struct cc_crypto_alg *cc_create_alg(const struct cc_alg_template *tmpl,
1339                                            struct device *dev)
1340 {
1341         struct cc_crypto_alg *t_alg;
1342         struct skcipher_alg *alg;
1343
1344         t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
1345         if (!t_alg)
1346                 return ERR_PTR(-ENOMEM);
1347
1348         alg = &t_alg->skcipher_alg;
1349
1350         memcpy(alg, &tmpl->template_skcipher, sizeof(*alg));
1351
1352         snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", tmpl->name);
1353         snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
1354                  tmpl->driver_name);
1355         alg->base.cra_module = THIS_MODULE;
1356         alg->base.cra_priority = CC_CRA_PRIO;
1357         alg->base.cra_blocksize = tmpl->blocksize;
1358         alg->base.cra_alignmask = 0;
1359         alg->base.cra_ctxsize = sizeof(struct cc_cipher_ctx);
1360
1361         alg->base.cra_init = cc_cipher_init;
1362         alg->base.cra_exit = cc_cipher_exit;
1363         alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
1364
1365         t_alg->cipher_mode = tmpl->cipher_mode;
1366         t_alg->flow_mode = tmpl->flow_mode;
1367         t_alg->data_unit = tmpl->data_unit;
1368
1369         return t_alg;
1370 }
1371
1372 int cc_cipher_free(struct cc_drvdata *drvdata)
1373 {
1374         struct cc_crypto_alg *t_alg, *n;
1375         struct cc_cipher_handle *cipher_handle = drvdata->cipher_handle;
1376
1377         if (cipher_handle) {
1378                 /* Remove registered algs */
1379                 list_for_each_entry_safe(t_alg, n, &cipher_handle->alg_list,
1380                                          entry) {
1381                         crypto_unregister_skcipher(&t_alg->skcipher_alg);
1382                         list_del(&t_alg->entry);
1383                         kfree(t_alg);
1384                 }
1385                 kfree(cipher_handle);
1386                 drvdata->cipher_handle = NULL;
1387         }
1388         return 0;
1389 }
1390
1391 int cc_cipher_alloc(struct cc_drvdata *drvdata)
1392 {
1393         struct cc_cipher_handle *cipher_handle;
1394         struct cc_crypto_alg *t_alg;
1395         struct device *dev = drvdata_to_dev(drvdata);
1396         int rc = -ENOMEM;
1397         int alg;
1398
1399         cipher_handle = kmalloc(sizeof(*cipher_handle), GFP_KERNEL);
1400         if (!cipher_handle)
1401                 return -ENOMEM;
1402
1403         INIT_LIST_HEAD(&cipher_handle->alg_list);
1404         drvdata->cipher_handle = cipher_handle;
1405
1406         /* Linux crypto */
1407         dev_dbg(dev, "Number of algorithms = %zu\n",
1408                 ARRAY_SIZE(skcipher_algs));
1409         for (alg = 0; alg < ARRAY_SIZE(skcipher_algs); alg++) {
1410                 if (skcipher_algs[alg].min_hw_rev > drvdata->hw_rev)
1411                         continue;
1412
1413                 dev_dbg(dev, "creating %s\n", skcipher_algs[alg].driver_name);
1414                 t_alg = cc_create_alg(&skcipher_algs[alg], dev);
1415                 if (IS_ERR(t_alg)) {
1416                         rc = PTR_ERR(t_alg);
1417                         dev_err(dev, "%s alg allocation failed\n",
1418                                 skcipher_algs[alg].driver_name);
1419                         goto fail0;
1420                 }
1421                 t_alg->drvdata = drvdata;
1422
1423                 dev_dbg(dev, "registering %s\n",
1424                         skcipher_algs[alg].driver_name);
1425                 rc = crypto_register_skcipher(&t_alg->skcipher_alg);
1426                 dev_dbg(dev, "%s alg registration rc = %x\n",
1427                         t_alg->skcipher_alg.base.cra_driver_name, rc);
1428                 if (rc) {
1429                         dev_err(dev, "%s alg registration failed\n",
1430                                 t_alg->skcipher_alg.base.cra_driver_name);
1431                         kfree(t_alg);
1432                         goto fail0;
1433                 } else {
1434                         list_add_tail(&t_alg->entry,
1435                                       &cipher_handle->alg_list);
1436                         dev_dbg(dev, "Registered %s\n",
1437                                 t_alg->skcipher_alg.base.cra_driver_name);
1438                 }
1439         }
1440         return 0;
1441
1442 fail0:
1443         cc_cipher_free(drvdata);
1444         return rc;
1445 }