GNU Linux-libre 4.19.286-gnu1
[releases.git] / drivers / crypto / talitos.c
1 /*
2  * talitos - Freescale Integrated Security Engine (SEC) device driver
3  *
4  * Copyright (c) 2008-2011 Freescale Semiconductor, Inc.
5  *
6  * Scatterlist Crypto API glue code copied from files with the following:
7  * Copyright (c) 2006-2007 Herbert Xu <herbert@gondor.apana.org.au>
8  *
9  * Crypto algorithm registration code copied from hifn driver:
10  * 2007+ Copyright (c) Evgeniy Polyakov <johnpol@2ka.mipt.ru>
11  * All rights reserved.
12  *
13  * This program is free software; you can redistribute it and/or modify
14  * it under the terms of the GNU General Public License as published by
15  * the Free Software Foundation; either version 2 of the License, or
16  * (at your option) any later version.
17  *
18  * This program is distributed in the hope that it will be useful,
19  * but WITHOUT ANY WARRANTY; without even the implied warranty of
20  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
21  * GNU General Public License for more details.
22  *
23  * You should have received a copy of the GNU General Public License
24  * along with this program; if not, write to the Free Software
25  * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
26  */
27
28 #include <linux/kernel.h>
29 #include <linux/module.h>
30 #include <linux/mod_devicetable.h>
31 #include <linux/device.h>
32 #include <linux/interrupt.h>
33 #include <linux/crypto.h>
34 #include <linux/hw_random.h>
35 #include <linux/of_address.h>
36 #include <linux/of_irq.h>
37 #include <linux/of_platform.h>
38 #include <linux/dma-mapping.h>
39 #include <linux/io.h>
40 #include <linux/spinlock.h>
41 #include <linux/rtnetlink.h>
42 #include <linux/slab.h>
43
44 #include <crypto/algapi.h>
45 #include <crypto/aes.h>
46 #include <crypto/des.h>
47 #include <crypto/sha.h>
48 #include <crypto/md5.h>
49 #include <crypto/internal/aead.h>
50 #include <crypto/authenc.h>
51 #include <crypto/skcipher.h>
52 #include <crypto/hash.h>
53 #include <crypto/internal/hash.h>
54 #include <crypto/scatterwalk.h>
55
56 #include "talitos.h"
57
58 static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
59                            unsigned int len, bool is_sec1)
60 {
61         ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
62         if (is_sec1) {
63                 ptr->len1 = cpu_to_be16(len);
64         } else {
65                 ptr->len = cpu_to_be16(len);
66                 ptr->eptr = upper_32_bits(dma_addr);
67         }
68 }
69
70 static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
71                              struct talitos_ptr *src_ptr, bool is_sec1)
72 {
73         dst_ptr->ptr = src_ptr->ptr;
74         if (is_sec1) {
75                 dst_ptr->len1 = src_ptr->len1;
76         } else {
77                 dst_ptr->len = src_ptr->len;
78                 dst_ptr->eptr = src_ptr->eptr;
79         }
80 }
81
82 static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
83                                            bool is_sec1)
84 {
85         if (is_sec1)
86                 return be16_to_cpu(ptr->len1);
87         else
88                 return be16_to_cpu(ptr->len);
89 }
90
91 static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
92                                    bool is_sec1)
93 {
94         if (!is_sec1)
95                 ptr->j_extent = val;
96 }
97
98 static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
99 {
100         if (!is_sec1)
101                 ptr->j_extent |= val;
102 }
103
104 /*
105  * map virtual single (contiguous) pointer to h/w descriptor pointer
106  */
107 static void __map_single_talitos_ptr(struct device *dev,
108                                      struct talitos_ptr *ptr,
109                                      unsigned int len, void *data,
110                                      enum dma_data_direction dir,
111                                      unsigned long attrs)
112 {
113         dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
114         struct talitos_private *priv = dev_get_drvdata(dev);
115         bool is_sec1 = has_ftr_sec1(priv);
116
117         to_talitos_ptr(ptr, dma_addr, len, is_sec1);
118 }
119
120 static void map_single_talitos_ptr(struct device *dev,
121                                    struct talitos_ptr *ptr,
122                                    unsigned int len, void *data,
123                                    enum dma_data_direction dir)
124 {
125         __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
126 }
127
128 static void map_single_talitos_ptr_nosync(struct device *dev,
129                                           struct talitos_ptr *ptr,
130                                           unsigned int len, void *data,
131                                           enum dma_data_direction dir)
132 {
133         __map_single_talitos_ptr(dev, ptr, len, data, dir,
134                                  DMA_ATTR_SKIP_CPU_SYNC);
135 }
136
137 /*
138  * unmap bus single (contiguous) h/w descriptor pointer
139  */
140 static void unmap_single_talitos_ptr(struct device *dev,
141                                      struct talitos_ptr *ptr,
142                                      enum dma_data_direction dir)
143 {
144         struct talitos_private *priv = dev_get_drvdata(dev);
145         bool is_sec1 = has_ftr_sec1(priv);
146
147         dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
148                          from_talitos_ptr_len(ptr, is_sec1), dir);
149 }
150
151 static int reset_channel(struct device *dev, int ch)
152 {
153         struct talitos_private *priv = dev_get_drvdata(dev);
154         unsigned int timeout = TALITOS_TIMEOUT;
155         bool is_sec1 = has_ftr_sec1(priv);
156
157         if (is_sec1) {
158                 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
159                           TALITOS1_CCCR_LO_RESET);
160
161                 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
162                         TALITOS1_CCCR_LO_RESET) && --timeout)
163                         cpu_relax();
164         } else {
165                 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
166                           TALITOS2_CCCR_RESET);
167
168                 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
169                         TALITOS2_CCCR_RESET) && --timeout)
170                         cpu_relax();
171         }
172
173         if (timeout == 0) {
174                 dev_err(dev, "failed to reset channel %d\n", ch);
175                 return -EIO;
176         }
177
178         /* set 36-bit addressing, done writeback enable and done IRQ enable */
179         setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
180                   TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
181         /* enable chaining descriptors */
182         if (is_sec1)
183                 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
184                           TALITOS_CCCR_LO_NE);
185
186         /* and ICCR writeback, if available */
187         if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
188                 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
189                           TALITOS_CCCR_LO_IWSE);
190
191         return 0;
192 }
193
194 static int reset_device(struct device *dev)
195 {
196         struct talitos_private *priv = dev_get_drvdata(dev);
197         unsigned int timeout = TALITOS_TIMEOUT;
198         bool is_sec1 = has_ftr_sec1(priv);
199         u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
200
201         setbits32(priv->reg + TALITOS_MCR, mcr);
202
203         while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
204                && --timeout)
205                 cpu_relax();
206
207         if (priv->irq[1]) {
208                 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
209                 setbits32(priv->reg + TALITOS_MCR, mcr);
210         }
211
212         if (timeout == 0) {
213                 dev_err(dev, "failed to reset device\n");
214                 return -EIO;
215         }
216
217         return 0;
218 }
219
220 /*
221  * Reset and initialize the device
222  */
223 static int init_device(struct device *dev)
224 {
225         struct talitos_private *priv = dev_get_drvdata(dev);
226         int ch, err;
227         bool is_sec1 = has_ftr_sec1(priv);
228
229         /*
230          * Master reset
231          * errata documentation: warning: certain SEC interrupts
232          * are not fully cleared by writing the MCR:SWR bit,
233          * set bit twice to completely reset
234          */
235         err = reset_device(dev);
236         if (err)
237                 return err;
238
239         err = reset_device(dev);
240         if (err)
241                 return err;
242
243         /* reset channels */
244         for (ch = 0; ch < priv->num_channels; ch++) {
245                 err = reset_channel(dev, ch);
246                 if (err)
247                         return err;
248         }
249
250         /* enable channel done and error interrupts */
251         if (is_sec1) {
252                 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
253                 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
254                 /* disable parity error check in DEU (erroneous? test vect.) */
255                 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
256         } else {
257                 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
258                 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
259         }
260
261         /* disable integrity check error interrupts (use writeback instead) */
262         if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
263                 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
264                           TALITOS_MDEUICR_LO_ICE);
265
266         return 0;
267 }
268
269 /**
270  * talitos_submit - submits a descriptor to the device for processing
271  * @dev:        the SEC device to be used
272  * @ch:         the SEC device channel to be used
273  * @desc:       the descriptor to be processed by the device
274  * @callback:   whom to call when processing is complete
275  * @context:    a handle for use by caller (optional)
276  *
277  * desc must contain valid dma-mapped (bus physical) address pointers.
278  * callback must check err and feedback in descriptor header
279  * for device processing status.
280  */
281 int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
282                    void (*callback)(struct device *dev,
283                                     struct talitos_desc *desc,
284                                     void *context, int error),
285                    void *context)
286 {
287         struct talitos_private *priv = dev_get_drvdata(dev);
288         struct talitos_request *request;
289         unsigned long flags;
290         int head;
291         bool is_sec1 = has_ftr_sec1(priv);
292
293         spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
294
295         if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
296                 /* h/w fifo is full */
297                 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
298                 return -EAGAIN;
299         }
300
301         head = priv->chan[ch].head;
302         request = &priv->chan[ch].fifo[head];
303
304         /* map descriptor and save caller data */
305         if (is_sec1) {
306                 desc->hdr1 = desc->hdr;
307                 request->dma_desc = dma_map_single(dev, &desc->hdr1,
308                                                    TALITOS_DESC_SIZE,
309                                                    DMA_BIDIRECTIONAL);
310         } else {
311                 request->dma_desc = dma_map_single(dev, desc,
312                                                    TALITOS_DESC_SIZE,
313                                                    DMA_BIDIRECTIONAL);
314         }
315         request->callback = callback;
316         request->context = context;
317
318         /* increment fifo head */
319         priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
320
321         smp_wmb();
322         request->desc = desc;
323
324         /* GO! */
325         wmb();
326         out_be32(priv->chan[ch].reg + TALITOS_FF,
327                  upper_32_bits(request->dma_desc));
328         out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
329                  lower_32_bits(request->dma_desc));
330
331         spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
332
333         return -EINPROGRESS;
334 }
335 EXPORT_SYMBOL(talitos_submit);
336
337 static __be32 get_request_hdr(struct talitos_request *request, bool is_sec1)
338 {
339         struct talitos_edesc *edesc;
340
341         if (!is_sec1)
342                 return request->desc->hdr;
343
344         if (!request->desc->next_desc)
345                 return request->desc->hdr1;
346
347         edesc = container_of(request->desc, struct talitos_edesc, desc);
348
349         return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1;
350 }
351
352 /*
353  * process what was done, notify callback of error if not
354  */
355 static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
356 {
357         struct talitos_private *priv = dev_get_drvdata(dev);
358         struct talitos_request *request, saved_req;
359         unsigned long flags;
360         int tail, status;
361         bool is_sec1 = has_ftr_sec1(priv);
362
363         spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
364
365         tail = priv->chan[ch].tail;
366         while (priv->chan[ch].fifo[tail].desc) {
367                 __be32 hdr;
368
369                 request = &priv->chan[ch].fifo[tail];
370
371                 /* descriptors with their done bits set don't get the error */
372                 rmb();
373                 hdr = get_request_hdr(request, is_sec1);
374
375                 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
376                         status = 0;
377                 else
378                         if (!error)
379                                 break;
380                         else
381                                 status = error;
382
383                 dma_unmap_single(dev, request->dma_desc,
384                                  TALITOS_DESC_SIZE,
385                                  DMA_BIDIRECTIONAL);
386
387                 /* copy entries so we can call callback outside lock */
388                 saved_req.desc = request->desc;
389                 saved_req.callback = request->callback;
390                 saved_req.context = request->context;
391
392                 /* release request entry in fifo */
393                 smp_wmb();
394                 request->desc = NULL;
395
396                 /* increment fifo tail */
397                 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
398
399                 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
400
401                 atomic_dec(&priv->chan[ch].submit_count);
402
403                 saved_req.callback(dev, saved_req.desc, saved_req.context,
404                                    status);
405                 /* channel may resume processing in single desc error case */
406                 if (error && !reset_ch && status == error)
407                         return;
408                 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
409                 tail = priv->chan[ch].tail;
410         }
411
412         spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
413 }
414
415 /*
416  * process completed requests for channels that have done status
417  */
418 #define DEF_TALITOS1_DONE(name, ch_done_mask)                           \
419 static void talitos1_done_##name(unsigned long data)                    \
420 {                                                                       \
421         struct device *dev = (struct device *)data;                     \
422         struct talitos_private *priv = dev_get_drvdata(dev);            \
423         unsigned long flags;                                            \
424                                                                         \
425         if (ch_done_mask & 0x10000000)                                  \
426                 flush_channel(dev, 0, 0, 0);                    \
427         if (ch_done_mask & 0x40000000)                                  \
428                 flush_channel(dev, 1, 0, 0);                    \
429         if (ch_done_mask & 0x00010000)                                  \
430                 flush_channel(dev, 2, 0, 0);                    \
431         if (ch_done_mask & 0x00040000)                                  \
432                 flush_channel(dev, 3, 0, 0);                    \
433                                                                         \
434         /* At this point, all completed channels have been processed */ \
435         /* Unmask done interrupts for channels completed later on. */   \
436         spin_lock_irqsave(&priv->reg_lock, flags);                      \
437         clrbits32(priv->reg + TALITOS_IMR, ch_done_mask);               \
438         clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);    \
439         spin_unlock_irqrestore(&priv->reg_lock, flags);                 \
440 }
441
442 DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
443 DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
444
445 #define DEF_TALITOS2_DONE(name, ch_done_mask)                           \
446 static void talitos2_done_##name(unsigned long data)                    \
447 {                                                                       \
448         struct device *dev = (struct device *)data;                     \
449         struct talitos_private *priv = dev_get_drvdata(dev);            \
450         unsigned long flags;                                            \
451                                                                         \
452         if (ch_done_mask & 1)                                           \
453                 flush_channel(dev, 0, 0, 0);                            \
454         if (ch_done_mask & (1 << 2))                                    \
455                 flush_channel(dev, 1, 0, 0);                            \
456         if (ch_done_mask & (1 << 4))                                    \
457                 flush_channel(dev, 2, 0, 0);                            \
458         if (ch_done_mask & (1 << 6))                                    \
459                 flush_channel(dev, 3, 0, 0);                            \
460                                                                         \
461         /* At this point, all completed channels have been processed */ \
462         /* Unmask done interrupts for channels completed later on. */   \
463         spin_lock_irqsave(&priv->reg_lock, flags);                      \
464         setbits32(priv->reg + TALITOS_IMR, ch_done_mask);               \
465         setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);    \
466         spin_unlock_irqrestore(&priv->reg_lock, flags);                 \
467 }
468
469 DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
470 DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
471 DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
472 DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
473
474 /*
475  * locate current (offending) descriptor
476  */
477 static __be32 current_desc_hdr(struct device *dev, int ch)
478 {
479         struct talitos_private *priv = dev_get_drvdata(dev);
480         int tail, iter;
481         dma_addr_t cur_desc;
482
483         cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
484         cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
485
486         if (!cur_desc) {
487                 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
488                 return 0;
489         }
490
491         tail = priv->chan[ch].tail;
492
493         iter = tail;
494         while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
495                priv->chan[ch].fifo[iter].desc->next_desc != cpu_to_be32(cur_desc)) {
496                 iter = (iter + 1) & (priv->fifo_len - 1);
497                 if (iter == tail) {
498                         dev_err(dev, "couldn't locate current descriptor\n");
499                         return 0;
500                 }
501         }
502
503         if (priv->chan[ch].fifo[iter].desc->next_desc == cpu_to_be32(cur_desc)) {
504                 struct talitos_edesc *edesc;
505
506                 edesc = container_of(priv->chan[ch].fifo[iter].desc,
507                                      struct talitos_edesc, desc);
508                 return ((struct talitos_desc *)
509                         (edesc->buf + edesc->dma_len))->hdr;
510         }
511
512         return priv->chan[ch].fifo[iter].desc->hdr;
513 }
514
515 /*
516  * user diagnostics; report root cause of error based on execution unit status
517  */
518 static void report_eu_error(struct device *dev, int ch, __be32 desc_hdr)
519 {
520         struct talitos_private *priv = dev_get_drvdata(dev);
521         int i;
522
523         if (!desc_hdr)
524                 desc_hdr = cpu_to_be32(in_be32(priv->chan[ch].reg + TALITOS_DESCBUF));
525
526         switch (desc_hdr & DESC_HDR_SEL0_MASK) {
527         case DESC_HDR_SEL0_AFEU:
528                 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
529                         in_be32(priv->reg_afeu + TALITOS_EUISR),
530                         in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
531                 break;
532         case DESC_HDR_SEL0_DEU:
533                 dev_err(dev, "DEUISR 0x%08x_%08x\n",
534                         in_be32(priv->reg_deu + TALITOS_EUISR),
535                         in_be32(priv->reg_deu + TALITOS_EUISR_LO));
536                 break;
537         case DESC_HDR_SEL0_MDEUA:
538         case DESC_HDR_SEL0_MDEUB:
539                 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
540                         in_be32(priv->reg_mdeu + TALITOS_EUISR),
541                         in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
542                 break;
543         case DESC_HDR_SEL0_RNG:
544                 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
545                         in_be32(priv->reg_rngu + TALITOS_ISR),
546                         in_be32(priv->reg_rngu + TALITOS_ISR_LO));
547                 break;
548         case DESC_HDR_SEL0_PKEU:
549                 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
550                         in_be32(priv->reg_pkeu + TALITOS_EUISR),
551                         in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
552                 break;
553         case DESC_HDR_SEL0_AESU:
554                 dev_err(dev, "AESUISR 0x%08x_%08x\n",
555                         in_be32(priv->reg_aesu + TALITOS_EUISR),
556                         in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
557                 break;
558         case DESC_HDR_SEL0_CRCU:
559                 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
560                         in_be32(priv->reg_crcu + TALITOS_EUISR),
561                         in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
562                 break;
563         case DESC_HDR_SEL0_KEU:
564                 dev_err(dev, "KEUISR 0x%08x_%08x\n",
565                         in_be32(priv->reg_pkeu + TALITOS_EUISR),
566                         in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
567                 break;
568         }
569
570         switch (desc_hdr & DESC_HDR_SEL1_MASK) {
571         case DESC_HDR_SEL1_MDEUA:
572         case DESC_HDR_SEL1_MDEUB:
573                 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
574                         in_be32(priv->reg_mdeu + TALITOS_EUISR),
575                         in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
576                 break;
577         case DESC_HDR_SEL1_CRCU:
578                 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
579                         in_be32(priv->reg_crcu + TALITOS_EUISR),
580                         in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
581                 break;
582         }
583
584         for (i = 0; i < 8; i++)
585                 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
586                         in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
587                         in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
588 }
589
590 /*
591  * recover from error interrupts
592  */
593 static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
594 {
595         struct talitos_private *priv = dev_get_drvdata(dev);
596         unsigned int timeout = TALITOS_TIMEOUT;
597         int ch, error, reset_dev = 0;
598         u32 v_lo;
599         bool is_sec1 = has_ftr_sec1(priv);
600         int reset_ch = is_sec1 ? 1 : 0; /* only SEC2 supports continuation */
601
602         for (ch = 0; ch < priv->num_channels; ch++) {
603                 /* skip channels without errors */
604                 if (is_sec1) {
605                         /* bits 29, 31, 17, 19 */
606                         if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
607                                 continue;
608                 } else {
609                         if (!(isr & (1 << (ch * 2 + 1))))
610                                 continue;
611                 }
612
613                 error = -EINVAL;
614
615                 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
616
617                 if (v_lo & TALITOS_CCPSR_LO_DOF) {
618                         dev_err(dev, "double fetch fifo overflow error\n");
619                         error = -EAGAIN;
620                         reset_ch = 1;
621                 }
622                 if (v_lo & TALITOS_CCPSR_LO_SOF) {
623                         /* h/w dropped descriptor */
624                         dev_err(dev, "single fetch fifo overflow error\n");
625                         error = -EAGAIN;
626                 }
627                 if (v_lo & TALITOS_CCPSR_LO_MDTE)
628                         dev_err(dev, "master data transfer error\n");
629                 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
630                         dev_err(dev, is_sec1 ? "pointer not complete error\n"
631                                              : "s/g data length zero error\n");
632                 if (v_lo & TALITOS_CCPSR_LO_FPZ)
633                         dev_err(dev, is_sec1 ? "parity error\n"
634                                              : "fetch pointer zero error\n");
635                 if (v_lo & TALITOS_CCPSR_LO_IDH)
636                         dev_err(dev, "illegal descriptor header error\n");
637                 if (v_lo & TALITOS_CCPSR_LO_IEU)
638                         dev_err(dev, is_sec1 ? "static assignment error\n"
639                                              : "invalid exec unit error\n");
640                 if (v_lo & TALITOS_CCPSR_LO_EU)
641                         report_eu_error(dev, ch, current_desc_hdr(dev, ch));
642                 if (!is_sec1) {
643                         if (v_lo & TALITOS_CCPSR_LO_GB)
644                                 dev_err(dev, "gather boundary error\n");
645                         if (v_lo & TALITOS_CCPSR_LO_GRL)
646                                 dev_err(dev, "gather return/length error\n");
647                         if (v_lo & TALITOS_CCPSR_LO_SB)
648                                 dev_err(dev, "scatter boundary error\n");
649                         if (v_lo & TALITOS_CCPSR_LO_SRL)
650                                 dev_err(dev, "scatter return/length error\n");
651                 }
652
653                 flush_channel(dev, ch, error, reset_ch);
654
655                 if (reset_ch) {
656                         reset_channel(dev, ch);
657                 } else {
658                         setbits32(priv->chan[ch].reg + TALITOS_CCCR,
659                                   TALITOS2_CCCR_CONT);
660                         setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
661                         while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
662                                TALITOS2_CCCR_CONT) && --timeout)
663                                 cpu_relax();
664                         if (timeout == 0) {
665                                 dev_err(dev, "failed to restart channel %d\n",
666                                         ch);
667                                 reset_dev = 1;
668                         }
669                 }
670         }
671         if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
672             (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
673                 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
674                         dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
675                                 isr, isr_lo);
676                 else
677                         dev_err(dev, "done overflow, internal time out, or "
678                                 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
679
680                 /* purge request queues */
681                 for (ch = 0; ch < priv->num_channels; ch++)
682                         flush_channel(dev, ch, -EIO, 1);
683
684                 /* reset and reinitialize the device */
685                 init_device(dev);
686         }
687 }
688
689 #define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet)          \
690 static irqreturn_t talitos1_interrupt_##name(int irq, void *data)              \
691 {                                                                              \
692         struct device *dev = data;                                             \
693         struct talitos_private *priv = dev_get_drvdata(dev);                   \
694         u32 isr, isr_lo;                                                       \
695         unsigned long flags;                                                   \
696                                                                                \
697         spin_lock_irqsave(&priv->reg_lock, flags);                             \
698         isr = in_be32(priv->reg + TALITOS_ISR);                                \
699         isr_lo = in_be32(priv->reg + TALITOS_ISR_LO);                          \
700         /* Acknowledge interrupt */                                            \
701         out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
702         out_be32(priv->reg + TALITOS_ICR_LO, isr_lo);                          \
703                                                                                \
704         if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) {    \
705                 spin_unlock_irqrestore(&priv->reg_lock, flags);                \
706                 talitos_error(dev, isr & ch_err_mask, isr_lo);                 \
707         }                                                                      \
708         else {                                                                 \
709                 if (likely(isr & ch_done_mask)) {                              \
710                         /* mask further done interrupts. */                    \
711                         setbits32(priv->reg + TALITOS_IMR, ch_done_mask);      \
712                         /* done_task will unmask done interrupts at exit */    \
713                         tasklet_schedule(&priv->done_task[tlet]);              \
714                 }                                                              \
715                 spin_unlock_irqrestore(&priv->reg_lock, flags);                \
716         }                                                                      \
717                                                                                \
718         return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED :  \
719                                                                 IRQ_NONE;      \
720 }
721
722 DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
723
724 #define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet)          \
725 static irqreturn_t talitos2_interrupt_##name(int irq, void *data)              \
726 {                                                                              \
727         struct device *dev = data;                                             \
728         struct talitos_private *priv = dev_get_drvdata(dev);                   \
729         u32 isr, isr_lo;                                                       \
730         unsigned long flags;                                                   \
731                                                                                \
732         spin_lock_irqsave(&priv->reg_lock, flags);                             \
733         isr = in_be32(priv->reg + TALITOS_ISR);                                \
734         isr_lo = in_be32(priv->reg + TALITOS_ISR_LO);                          \
735         /* Acknowledge interrupt */                                            \
736         out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
737         out_be32(priv->reg + TALITOS_ICR_LO, isr_lo);                          \
738                                                                                \
739         if (unlikely(isr & ch_err_mask || isr_lo)) {                           \
740                 spin_unlock_irqrestore(&priv->reg_lock, flags);                \
741                 talitos_error(dev, isr & ch_err_mask, isr_lo);                 \
742         }                                                                      \
743         else {                                                                 \
744                 if (likely(isr & ch_done_mask)) {                              \
745                         /* mask further done interrupts. */                    \
746                         clrbits32(priv->reg + TALITOS_IMR, ch_done_mask);      \
747                         /* done_task will unmask done interrupts at exit */    \
748                         tasklet_schedule(&priv->done_task[tlet]);              \
749                 }                                                              \
750                 spin_unlock_irqrestore(&priv->reg_lock, flags);                \
751         }                                                                      \
752                                                                                \
753         return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED :  \
754                                                                 IRQ_NONE;      \
755 }
756
757 DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
758 DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
759                        0)
760 DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
761                        1)
762
763 /*
764  * hwrng
765  */
766 static int talitos_rng_data_present(struct hwrng *rng, int wait)
767 {
768         struct device *dev = (struct device *)rng->priv;
769         struct talitos_private *priv = dev_get_drvdata(dev);
770         u32 ofl;
771         int i;
772
773         for (i = 0; i < 20; i++) {
774                 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
775                       TALITOS_RNGUSR_LO_OFL;
776                 if (ofl || !wait)
777                         break;
778                 udelay(10);
779         }
780
781         return !!ofl;
782 }
783
784 static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
785 {
786         struct device *dev = (struct device *)rng->priv;
787         struct talitos_private *priv = dev_get_drvdata(dev);
788
789         /* rng fifo requires 64-bit accesses */
790         *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
791         *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
792
793         return sizeof(u32);
794 }
795
796 static int talitos_rng_init(struct hwrng *rng)
797 {
798         struct device *dev = (struct device *)rng->priv;
799         struct talitos_private *priv = dev_get_drvdata(dev);
800         unsigned int timeout = TALITOS_TIMEOUT;
801
802         setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
803         while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
804                  & TALITOS_RNGUSR_LO_RD)
805                && --timeout)
806                 cpu_relax();
807         if (timeout == 0) {
808                 dev_err(dev, "failed to reset rng hw\n");
809                 return -ENODEV;
810         }
811
812         /* start generating */
813         setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
814
815         return 0;
816 }
817
818 static int talitos_register_rng(struct device *dev)
819 {
820         struct talitos_private *priv = dev_get_drvdata(dev);
821         int err;
822
823         priv->rng.name          = dev_driver_string(dev),
824         priv->rng.init          = talitos_rng_init,
825         priv->rng.data_present  = talitos_rng_data_present,
826         priv->rng.data_read     = talitos_rng_data_read,
827         priv->rng.priv          = (unsigned long)dev;
828
829         err = hwrng_register(&priv->rng);
830         if (!err)
831                 priv->rng_registered = true;
832
833         return err;
834 }
835
836 static void talitos_unregister_rng(struct device *dev)
837 {
838         struct talitos_private *priv = dev_get_drvdata(dev);
839
840         if (!priv->rng_registered)
841                 return;
842
843         hwrng_unregister(&priv->rng);
844         priv->rng_registered = false;
845 }
846
847 /*
848  * crypto alg
849  */
850 #define TALITOS_CRA_PRIORITY            3000
851 /*
852  * Defines a priority for doing AEAD with descriptors type
853  * HMAC_SNOOP_NO_AFEA (HSNA) instead of type IPSEC_ESP
854  */
855 #define TALITOS_CRA_PRIORITY_AEAD_HSNA  (TALITOS_CRA_PRIORITY - 1)
856 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
857 #define TALITOS_MAX_KEY_SIZE            (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
858 #else
859 #define TALITOS_MAX_KEY_SIZE            (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
860 #endif
861 #define TALITOS_MAX_IV_LENGTH           16 /* max of AES_BLOCK_SIZE, DES3_EDE_BLOCK_SIZE */
862
863 struct talitos_ctx {
864         struct device *dev;
865         int ch;
866         __be32 desc_hdr_template;
867         u8 key[TALITOS_MAX_KEY_SIZE];
868         u8 iv[TALITOS_MAX_IV_LENGTH];
869         dma_addr_t dma_key;
870         unsigned int keylen;
871         unsigned int enckeylen;
872         unsigned int authkeylen;
873 };
874
875 #define HASH_MAX_BLOCK_SIZE             SHA512_BLOCK_SIZE
876 #define TALITOS_MDEU_MAX_CONTEXT_SIZE   TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
877
878 struct talitos_ahash_req_ctx {
879         u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
880         unsigned int hw_context_size;
881         u8 buf[2][HASH_MAX_BLOCK_SIZE];
882         int buf_idx;
883         unsigned int swinit;
884         unsigned int first;
885         unsigned int last;
886         unsigned int to_hash_later;
887         unsigned int nbuf;
888         struct scatterlist bufsl[2];
889         struct scatterlist *psrc;
890 };
891
892 struct talitos_export_state {
893         u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
894         u8 buf[HASH_MAX_BLOCK_SIZE];
895         unsigned int swinit;
896         unsigned int first;
897         unsigned int last;
898         unsigned int to_hash_later;
899         unsigned int nbuf;
900 };
901
902 static int aead_setkey(struct crypto_aead *authenc,
903                        const u8 *key, unsigned int keylen)
904 {
905         struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
906         struct device *dev = ctx->dev;
907         struct crypto_authenc_keys keys;
908
909         if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
910                 goto badkey;
911
912         if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
913                 goto badkey;
914
915         if (ctx->keylen)
916                 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
917
918         memcpy(ctx->key, keys.authkey, keys.authkeylen);
919         memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
920
921         ctx->keylen = keys.authkeylen + keys.enckeylen;
922         ctx->enckeylen = keys.enckeylen;
923         ctx->authkeylen = keys.authkeylen;
924         ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
925                                       DMA_TO_DEVICE);
926
927         memzero_explicit(&keys, sizeof(keys));
928         return 0;
929
930 badkey:
931         crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
932         memzero_explicit(&keys, sizeof(keys));
933         return -EINVAL;
934 }
935
936 static void talitos_sg_unmap(struct device *dev,
937                              struct talitos_edesc *edesc,
938                              struct scatterlist *src,
939                              struct scatterlist *dst,
940                              unsigned int len, unsigned int offset)
941 {
942         struct talitos_private *priv = dev_get_drvdata(dev);
943         bool is_sec1 = has_ftr_sec1(priv);
944         unsigned int src_nents = edesc->src_nents ? : 1;
945         unsigned int dst_nents = edesc->dst_nents ? : 1;
946
947         if (is_sec1 && dst && dst_nents > 1) {
948                 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
949                                            len, DMA_FROM_DEVICE);
950                 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
951                                      offset);
952         }
953         if (src != dst) {
954                 if (src_nents == 1 || !is_sec1)
955                         dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
956
957                 if (dst && (dst_nents == 1 || !is_sec1))
958                         dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
959         } else if (src_nents == 1 || !is_sec1) {
960                 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
961         }
962 }
963
964 static void ipsec_esp_unmap(struct device *dev,
965                             struct talitos_edesc *edesc,
966                             struct aead_request *areq, bool encrypt)
967 {
968         struct crypto_aead *aead = crypto_aead_reqtfm(areq);
969         struct talitos_ctx *ctx = crypto_aead_ctx(aead);
970         unsigned int ivsize = crypto_aead_ivsize(aead);
971         unsigned int authsize = crypto_aead_authsize(aead);
972         unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
973         bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
974         struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
975
976         if (is_ipsec_esp)
977                 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
978                                          DMA_FROM_DEVICE);
979         unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
980
981         talitos_sg_unmap(dev, edesc, areq->src, areq->dst,
982                          cryptlen + authsize, areq->assoclen);
983
984         if (edesc->dma_len)
985                 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
986                                  DMA_BIDIRECTIONAL);
987
988         if (!is_ipsec_esp) {
989                 unsigned int dst_nents = edesc->dst_nents ? : 1;
990
991                 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
992                                    areq->assoclen + cryptlen - ivsize);
993         }
994 }
995
996 /*
997  * ipsec_esp descriptor callbacks
998  */
999 static void ipsec_esp_encrypt_done(struct device *dev,
1000                                    struct talitos_desc *desc, void *context,
1001                                    int err)
1002 {
1003         struct aead_request *areq = context;
1004         struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1005         unsigned int ivsize = crypto_aead_ivsize(authenc);
1006         struct talitos_edesc *edesc;
1007
1008         edesc = container_of(desc, struct talitos_edesc, desc);
1009
1010         ipsec_esp_unmap(dev, edesc, areq, true);
1011
1012         dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1013
1014         kfree(edesc);
1015
1016         aead_request_complete(areq, err);
1017 }
1018
1019 static void ipsec_esp_decrypt_swauth_done(struct device *dev,
1020                                           struct talitos_desc *desc,
1021                                           void *context, int err)
1022 {
1023         struct aead_request *req = context;
1024         struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1025         unsigned int authsize = crypto_aead_authsize(authenc);
1026         struct talitos_edesc *edesc;
1027         char *oicv, *icv;
1028
1029         edesc = container_of(desc, struct talitos_edesc, desc);
1030
1031         ipsec_esp_unmap(dev, edesc, req, false);
1032
1033         if (!err) {
1034                 /* auth check */
1035                 oicv = edesc->buf + edesc->dma_len;
1036                 icv = oicv - authsize;
1037
1038                 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
1039         }
1040
1041         kfree(edesc);
1042
1043         aead_request_complete(req, err);
1044 }
1045
1046 static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
1047                                           struct talitos_desc *desc,
1048                                           void *context, int err)
1049 {
1050         struct aead_request *req = context;
1051         struct talitos_edesc *edesc;
1052
1053         edesc = container_of(desc, struct talitos_edesc, desc);
1054
1055         ipsec_esp_unmap(dev, edesc, req, false);
1056
1057         /* check ICV auth status */
1058         if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1059                      DESC_HDR_LO_ICCR1_PASS))
1060                 err = -EBADMSG;
1061
1062         kfree(edesc);
1063
1064         aead_request_complete(req, err);
1065 }
1066
1067 /*
1068  * convert scatterlist to SEC h/w link table format
1069  * stop at cryptlen bytes
1070  */
1071 static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1072                                  unsigned int offset, int datalen, int elen,
1073                                  struct talitos_ptr *link_tbl_ptr, int align)
1074 {
1075         int n_sg = elen ? sg_count + 1 : sg_count;
1076         int count = 0;
1077         int cryptlen = datalen + elen;
1078         int padding = ALIGN(cryptlen, align) - cryptlen;
1079
1080         while (cryptlen && sg && n_sg--) {
1081                 unsigned int len = sg_dma_len(sg);
1082
1083                 if (offset >= len) {
1084                         offset -= len;
1085                         goto next;
1086                 }
1087
1088                 len -= offset;
1089
1090                 if (len > cryptlen)
1091                         len = cryptlen;
1092
1093                 if (datalen > 0 && len > datalen) {
1094                         to_talitos_ptr(link_tbl_ptr + count,
1095                                        sg_dma_address(sg) + offset, datalen, 0);
1096                         to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1097                         count++;
1098                         len -= datalen;
1099                         offset += datalen;
1100                 }
1101                 to_talitos_ptr(link_tbl_ptr + count,
1102                                sg_dma_address(sg) + offset, sg_next(sg) ? len : len + padding, 0);
1103                 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1104                 count++;
1105                 cryptlen -= len;
1106                 datalen -= len;
1107                 offset = 0;
1108
1109 next:
1110                 sg = sg_next(sg);
1111         }
1112
1113         /* tag end of link table */
1114         if (count > 0)
1115                 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1116                                        DESC_PTR_LNKTBL_RET, 0);
1117
1118         return count;
1119 }
1120
1121 static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1122                               unsigned int len, struct talitos_edesc *edesc,
1123                               struct talitos_ptr *ptr, int sg_count,
1124                               unsigned int offset, int tbl_off, int elen,
1125                               bool force, int align)
1126 {
1127         struct talitos_private *priv = dev_get_drvdata(dev);
1128         bool is_sec1 = has_ftr_sec1(priv);
1129         int aligned_len = ALIGN(len, align);
1130
1131         if (!src) {
1132                 to_talitos_ptr(ptr, 0, 0, is_sec1);
1133                 return 1;
1134         }
1135         to_talitos_ptr_ext_set(ptr, elen, is_sec1);
1136         if (sg_count == 1 && !force) {
1137                 to_talitos_ptr(ptr, sg_dma_address(src) + offset, aligned_len, is_sec1);
1138                 return sg_count;
1139         }
1140         if (is_sec1) {
1141                 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, aligned_len, is_sec1);
1142                 return sg_count;
1143         }
1144         sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len, elen,
1145                                          &edesc->link_tbl[tbl_off], align);
1146         if (sg_count == 1 && !force) {
1147                 /* Only one segment now, so no link tbl needed*/
1148                 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1149                 return sg_count;
1150         }
1151         to_talitos_ptr(ptr, edesc->dma_link_tbl +
1152                             tbl_off * sizeof(struct talitos_ptr), aligned_len, is_sec1);
1153         to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1154
1155         return sg_count;
1156 }
1157
1158 static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1159                           unsigned int len, struct talitos_edesc *edesc,
1160                           struct talitos_ptr *ptr, int sg_count,
1161                           unsigned int offset, int tbl_off)
1162 {
1163         return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
1164                                   tbl_off, 0, false, 1);
1165 }
1166
1167 /*
1168  * fill in and submit ipsec_esp descriptor
1169  */
1170 static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
1171                      bool encrypt,
1172                      void (*callback)(struct device *dev,
1173                                       struct talitos_desc *desc,
1174                                       void *context, int error))
1175 {
1176         struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1177         unsigned int authsize = crypto_aead_authsize(aead);
1178         struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1179         struct device *dev = ctx->dev;
1180         struct talitos_desc *desc = &edesc->desc;
1181         unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1182         unsigned int ivsize = crypto_aead_ivsize(aead);
1183         int tbl_off = 0;
1184         int sg_count, ret;
1185         int elen = 0;
1186         bool sync_needed = false;
1187         struct talitos_private *priv = dev_get_drvdata(dev);
1188         bool is_sec1 = has_ftr_sec1(priv);
1189         bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1190         struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1191         struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
1192         dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize;
1193
1194         /* hmac key */
1195         to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
1196
1197         sg_count = edesc->src_nents ?: 1;
1198         if (is_sec1 && sg_count > 1)
1199                 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1200                                   areq->assoclen + cryptlen);
1201         else
1202                 sg_count = dma_map_sg(dev, areq->src, sg_count,
1203                                       (areq->src == areq->dst) ?
1204                                       DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1205
1206         /* hmac data */
1207         ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1208                              &desc->ptr[1], sg_count, 0, tbl_off);
1209
1210         if (ret > 1) {
1211                 tbl_off += ret;
1212                 sync_needed = true;
1213         }
1214
1215         /* cipher iv */
1216         to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
1217
1218         /* cipher key */
1219         to_talitos_ptr(ckey_ptr, ctx->dma_key  + ctx->authkeylen,
1220                        ctx->enckeylen, is_sec1);
1221
1222         /*
1223          * cipher in
1224          * map and adjust cipher len to aead request cryptlen.
1225          * extent is bytes of HMAC postpended to ciphertext,
1226          * typically 12 for ipsec
1227          */
1228         if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1229                 elen = authsize;
1230
1231         ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
1232                                  sg_count, areq->assoclen, tbl_off, elen,
1233                                  false, 1);
1234
1235         if (ret > 1) {
1236                 tbl_off += ret;
1237                 sync_needed = true;
1238         }
1239
1240         /* cipher out */
1241         if (areq->src != areq->dst) {
1242                 sg_count = edesc->dst_nents ? : 1;
1243                 if (!is_sec1 || sg_count == 1)
1244                         dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1245         }
1246
1247         if (is_ipsec_esp && encrypt)
1248                 elen = authsize;
1249         else
1250                 elen = 0;
1251         ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1252                                  sg_count, areq->assoclen, tbl_off, elen,
1253                                  is_ipsec_esp && !encrypt, 1);
1254         tbl_off += ret;
1255
1256         /* ICV data */
1257         edesc->icv_ool = !encrypt;
1258
1259         if (!encrypt && is_ipsec_esp) {
1260                 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1261
1262                 /* Add an entry to the link table for ICV data */
1263                 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1264                 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RET, is_sec1);
1265
1266                 /* icv data follows link tables */
1267                 to_talitos_ptr(tbl_ptr, dma_icv, authsize, is_sec1);
1268                 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1269                 sync_needed = true;
1270         } else if (!encrypt) {
1271                 to_talitos_ptr(&desc->ptr[6], dma_icv, authsize, is_sec1);
1272                 sync_needed = true;
1273         } else if (!is_ipsec_esp) {
1274                 talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6],
1275                                sg_count, areq->assoclen + cryptlen, tbl_off);
1276         }
1277
1278         /* iv out */
1279         if (is_ipsec_esp)
1280                 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1281                                        DMA_FROM_DEVICE);
1282
1283         if (sync_needed)
1284                 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1285                                            edesc->dma_len,
1286                                            DMA_BIDIRECTIONAL);
1287
1288         ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1289         if (ret != -EINPROGRESS) {
1290                 ipsec_esp_unmap(dev, edesc, areq, encrypt);
1291                 kfree(edesc);
1292         }
1293         return ret;
1294 }
1295
1296 /*
1297  * allocate and map the extended descriptor
1298  */
1299 static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1300                                                  struct scatterlist *src,
1301                                                  struct scatterlist *dst,
1302                                                  u8 *iv,
1303                                                  unsigned int assoclen,
1304                                                  unsigned int cryptlen,
1305                                                  unsigned int authsize,
1306                                                  unsigned int ivsize,
1307                                                  int icv_stashing,
1308                                                  u32 cryptoflags,
1309                                                  bool encrypt)
1310 {
1311         struct talitos_edesc *edesc;
1312         int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
1313         dma_addr_t iv_dma = 0;
1314         gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1315                       GFP_ATOMIC;
1316         struct talitos_private *priv = dev_get_drvdata(dev);
1317         bool is_sec1 = has_ftr_sec1(priv);
1318         int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
1319
1320         if (cryptlen + authsize > max_len) {
1321                 dev_err(dev, "length exceeds h/w max limit\n");
1322                 return ERR_PTR(-EINVAL);
1323         }
1324
1325         if (!dst || dst == src) {
1326                 src_len = assoclen + cryptlen + authsize;
1327                 src_nents = sg_nents_for_len(src, src_len);
1328                 if (src_nents < 0) {
1329                         dev_err(dev, "Invalid number of src SG.\n");
1330                         return ERR_PTR(-EINVAL);
1331                 }
1332                 src_nents = (src_nents == 1) ? 0 : src_nents;
1333                 dst_nents = dst ? src_nents : 0;
1334                 dst_len = 0;
1335         } else { /* dst && dst != src*/
1336                 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1337                 src_nents = sg_nents_for_len(src, src_len);
1338                 if (src_nents < 0) {
1339                         dev_err(dev, "Invalid number of src SG.\n");
1340                         return ERR_PTR(-EINVAL);
1341                 }
1342                 src_nents = (src_nents == 1) ? 0 : src_nents;
1343                 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1344                 dst_nents = sg_nents_for_len(dst, dst_len);
1345                 if (dst_nents < 0) {
1346                         dev_err(dev, "Invalid number of dst SG.\n");
1347                         return ERR_PTR(-EINVAL);
1348                 }
1349                 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
1350         }
1351
1352         /*
1353          * allocate space for base edesc plus the link tables,
1354          * allowing for two separate entries for AD and generated ICV (+ 2),
1355          * and space for two sets of ICVs (stashed and generated)
1356          */
1357         alloc_len = sizeof(struct talitos_edesc);
1358         if (src_nents || dst_nents || !encrypt) {
1359                 if (is_sec1)
1360                         dma_len = (src_nents ? src_len : 0) +
1361                                   (dst_nents ? dst_len : 0) + authsize;
1362                 else
1363                         dma_len = (src_nents + dst_nents + 2) *
1364                                   sizeof(struct talitos_ptr) + authsize;
1365                 alloc_len += dma_len;
1366         } else {
1367                 dma_len = 0;
1368         }
1369         alloc_len += icv_stashing ? authsize : 0;
1370
1371         /* if its a ahash, add space for a second desc next to the first one */
1372         if (is_sec1 && !dst)
1373                 alloc_len += sizeof(struct talitos_desc);
1374         alloc_len += ivsize;
1375
1376         edesc = kmalloc(alloc_len, GFP_DMA | flags);
1377         if (!edesc)
1378                 return ERR_PTR(-ENOMEM);
1379         if (ivsize) {
1380                 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
1381                 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1382         }
1383         memset(&edesc->desc, 0, sizeof(edesc->desc));
1384
1385         edesc->src_nents = src_nents;
1386         edesc->dst_nents = dst_nents;
1387         edesc->iv_dma = iv_dma;
1388         edesc->dma_len = dma_len;
1389         if (dma_len)
1390                 edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0],
1391                                                      edesc->dma_len,
1392                                                      DMA_BIDIRECTIONAL);
1393
1394         return edesc;
1395 }
1396
1397 static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
1398                                               int icv_stashing, bool encrypt)
1399 {
1400         struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1401         unsigned int authsize = crypto_aead_authsize(authenc);
1402         struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1403         unsigned int ivsize = crypto_aead_ivsize(authenc);
1404         unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1405
1406         return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1407                                    iv, areq->assoclen, cryptlen,
1408                                    authsize, ivsize, icv_stashing,
1409                                    areq->base.flags, encrypt);
1410 }
1411
1412 static int aead_encrypt(struct aead_request *req)
1413 {
1414         struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1415         struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1416         struct talitos_edesc *edesc;
1417
1418         /* allocate extended descriptor */
1419         edesc = aead_edesc_alloc(req, req->iv, 0, true);
1420         if (IS_ERR(edesc))
1421                 return PTR_ERR(edesc);
1422
1423         /* set encrypt */
1424         edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1425
1426         return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done);
1427 }
1428
1429 static int aead_decrypt(struct aead_request *req)
1430 {
1431         struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1432         unsigned int authsize = crypto_aead_authsize(authenc);
1433         struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1434         struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1435         struct talitos_edesc *edesc;
1436         void *icvdata;
1437
1438         /* allocate extended descriptor */
1439         edesc = aead_edesc_alloc(req, req->iv, 1, false);
1440         if (IS_ERR(edesc))
1441                 return PTR_ERR(edesc);
1442
1443         if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) &&
1444             (priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
1445             ((!edesc->src_nents && !edesc->dst_nents) ||
1446              priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
1447
1448                 /* decrypt and check the ICV */
1449                 edesc->desc.hdr = ctx->desc_hdr_template |
1450                                   DESC_HDR_DIR_INBOUND |
1451                                   DESC_HDR_MODE1_MDEU_CICV;
1452
1453                 /* reset integrity check result bits */
1454
1455                 return ipsec_esp(edesc, req, false,
1456                                  ipsec_esp_decrypt_hwauth_done);
1457         }
1458
1459         /* Have to check the ICV with software */
1460         edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1461
1462         /* stash incoming ICV for later cmp with ICV generated by the h/w */
1463         icvdata = edesc->buf + edesc->dma_len;
1464
1465         sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1466                            req->assoclen + req->cryptlen - authsize);
1467
1468         return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done);
1469 }
1470
1471 static int ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1472                              const u8 *key, unsigned int keylen)
1473 {
1474         struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1475         struct device *dev = ctx->dev;
1476         u32 tmp[DES_EXPKEY_WORDS];
1477
1478         if (keylen > TALITOS_MAX_KEY_SIZE) {
1479                 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
1480                 return -EINVAL;
1481         }
1482
1483         if (unlikely(crypto_ablkcipher_get_flags(cipher) &
1484                      CRYPTO_TFM_REQ_WEAK_KEY) &&
1485             !des_ekey(tmp, key)) {
1486                 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_WEAK_KEY);
1487                 return -EINVAL;
1488         }
1489
1490         if (ctx->keylen)
1491                 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1492
1493         memcpy(&ctx->key, key, keylen);
1494         ctx->keylen = keylen;
1495
1496         ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1497
1498         return 0;
1499 }
1500
1501 static int ablkcipher_aes_setkey(struct crypto_ablkcipher *cipher,
1502                                   const u8 *key, unsigned int keylen)
1503 {
1504         if (keylen == AES_KEYSIZE_128 || keylen == AES_KEYSIZE_192 ||
1505             keylen == AES_KEYSIZE_256)
1506                 return ablkcipher_setkey(cipher, key, keylen);
1507
1508         crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
1509
1510         return -EINVAL;
1511 }
1512
1513 static void common_nonsnoop_unmap(struct device *dev,
1514                                   struct talitos_edesc *edesc,
1515                                   struct ablkcipher_request *areq)
1516 {
1517         unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1518
1519         talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0);
1520         unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1521
1522         if (edesc->dma_len)
1523                 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1524                                  DMA_BIDIRECTIONAL);
1525 }
1526
1527 static void ablkcipher_done(struct device *dev,
1528                             struct talitos_desc *desc, void *context,
1529                             int err)
1530 {
1531         struct ablkcipher_request *areq = context;
1532         struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1533         struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1534         unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
1535         struct talitos_edesc *edesc;
1536
1537         edesc = container_of(desc, struct talitos_edesc, desc);
1538
1539         common_nonsnoop_unmap(dev, edesc, areq);
1540         memcpy(areq->info, ctx->iv, ivsize);
1541
1542         kfree(edesc);
1543
1544         areq->base.complete(&areq->base, err);
1545 }
1546
1547 static int common_nonsnoop(struct talitos_edesc *edesc,
1548                            struct ablkcipher_request *areq,
1549                            void (*callback) (struct device *dev,
1550                                              struct talitos_desc *desc,
1551                                              void *context, int error))
1552 {
1553         struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1554         struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1555         struct device *dev = ctx->dev;
1556         struct talitos_desc *desc = &edesc->desc;
1557         unsigned int cryptlen = areq->nbytes;
1558         unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
1559         int sg_count, ret;
1560         bool sync_needed = false;
1561         struct talitos_private *priv = dev_get_drvdata(dev);
1562         bool is_sec1 = has_ftr_sec1(priv);
1563         bool is_ctr = (desc->hdr & DESC_HDR_SEL0_MASK) == DESC_HDR_SEL0_AESU &&
1564                       (desc->hdr & DESC_HDR_MODE0_AESU_MASK) == DESC_HDR_MODE0_AESU_CTR;
1565
1566         /* first DWORD empty */
1567
1568         /* cipher iv */
1569         to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
1570
1571         /* cipher key */
1572         to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
1573
1574         sg_count = edesc->src_nents ?: 1;
1575         if (is_sec1 && sg_count > 1)
1576                 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1577                                   cryptlen);
1578         else
1579                 sg_count = dma_map_sg(dev, areq->src, sg_count,
1580                                       (areq->src == areq->dst) ?
1581                                       DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1582         /*
1583          * cipher in
1584          */
1585         sg_count = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[3],
1586                                       sg_count, 0, 0, 0, false, is_ctr ? 16 : 1);
1587         if (sg_count > 1)
1588                 sync_needed = true;
1589
1590         /* cipher out */
1591         if (areq->src != areq->dst) {
1592                 sg_count = edesc->dst_nents ? : 1;
1593                 if (!is_sec1 || sg_count == 1)
1594                         dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1595         }
1596
1597         ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1598                              sg_count, 0, (edesc->src_nents + 1));
1599         if (ret > 1)
1600                 sync_needed = true;
1601
1602         /* iv out */
1603         map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
1604                                DMA_FROM_DEVICE);
1605
1606         /* last DWORD empty */
1607
1608         if (sync_needed)
1609                 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1610                                            edesc->dma_len, DMA_BIDIRECTIONAL);
1611
1612         ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1613         if (ret != -EINPROGRESS) {
1614                 common_nonsnoop_unmap(dev, edesc, areq);
1615                 kfree(edesc);
1616         }
1617         return ret;
1618 }
1619
1620 static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request *
1621                                                     areq, bool encrypt)
1622 {
1623         struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1624         struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1625         unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
1626
1627         return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1628                                    areq->info, 0, areq->nbytes, 0, ivsize, 0,
1629                                    areq->base.flags, encrypt);
1630 }
1631
1632 static int ablkcipher_encrypt(struct ablkcipher_request *areq)
1633 {
1634         struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1635         struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1636         struct talitos_edesc *edesc;
1637         unsigned int blocksize =
1638                         crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(cipher));
1639
1640         if (!areq->nbytes)
1641                 return 0;
1642
1643         if (areq->nbytes % blocksize)
1644                 return -EINVAL;
1645
1646         /* allocate extended descriptor */
1647         edesc = ablkcipher_edesc_alloc(areq, true);
1648         if (IS_ERR(edesc))
1649                 return PTR_ERR(edesc);
1650
1651         /* set encrypt */
1652         edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1653
1654         return common_nonsnoop(edesc, areq, ablkcipher_done);
1655 }
1656
1657 static int ablkcipher_decrypt(struct ablkcipher_request *areq)
1658 {
1659         struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1660         struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1661         struct talitos_edesc *edesc;
1662         unsigned int blocksize =
1663                         crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(cipher));
1664
1665         if (!areq->nbytes)
1666                 return 0;
1667
1668         if (areq->nbytes % blocksize)
1669                 return -EINVAL;
1670
1671         /* allocate extended descriptor */
1672         edesc = ablkcipher_edesc_alloc(areq, false);
1673         if (IS_ERR(edesc))
1674                 return PTR_ERR(edesc);
1675
1676         edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1677
1678         return common_nonsnoop(edesc, areq, ablkcipher_done);
1679 }
1680
1681 static void common_nonsnoop_hash_unmap(struct device *dev,
1682                                        struct talitos_edesc *edesc,
1683                                        struct ahash_request *areq)
1684 {
1685         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1686         struct talitos_private *priv = dev_get_drvdata(dev);
1687         bool is_sec1 = has_ftr_sec1(priv);
1688         struct talitos_desc *desc = &edesc->desc;
1689         struct talitos_desc *desc2 = (struct talitos_desc *)
1690                                      (edesc->buf + edesc->dma_len);
1691
1692         unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1693         if (desc->next_desc &&
1694             desc->ptr[5].ptr != desc2->ptr[5].ptr)
1695                 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
1696
1697         if (req_ctx->psrc)
1698                 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
1699
1700         /* When using hashctx-in, must unmap it. */
1701         if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1702                 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1703                                          DMA_TO_DEVICE);
1704         else if (desc->next_desc)
1705                 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1706                                          DMA_TO_DEVICE);
1707
1708         if (is_sec1 && req_ctx->nbuf)
1709                 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1710                                          DMA_TO_DEVICE);
1711
1712         if (edesc->dma_len)
1713                 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1714                                  DMA_BIDIRECTIONAL);
1715
1716         if (edesc->desc.next_desc)
1717                 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1718                                  TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
1719 }
1720
1721 static void ahash_done(struct device *dev,
1722                        struct talitos_desc *desc, void *context,
1723                        int err)
1724 {
1725         struct ahash_request *areq = context;
1726         struct talitos_edesc *edesc =
1727                  container_of(desc, struct talitos_edesc, desc);
1728         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1729
1730         if (!req_ctx->last && req_ctx->to_hash_later) {
1731                 /* Position any partial block for next update/final/finup */
1732                 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
1733                 req_ctx->nbuf = req_ctx->to_hash_later;
1734         }
1735         common_nonsnoop_hash_unmap(dev, edesc, areq);
1736
1737         kfree(edesc);
1738
1739         areq->base.complete(&areq->base, err);
1740 }
1741
1742 /*
1743  * SEC1 doesn't like hashing of 0 sized message, so we do the padding
1744  * ourself and submit a padded block
1745  */
1746 static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
1747                                struct talitos_edesc *edesc,
1748                                struct talitos_ptr *ptr)
1749 {
1750         static u8 padded_hash[64] = {
1751                 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1752                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1753                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1754                 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1755         };
1756
1757         pr_err_once("Bug in SEC1, padding ourself\n");
1758         edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1759         map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1760                                (char *)padded_hash, DMA_TO_DEVICE);
1761 }
1762
1763 static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1764                                 struct ahash_request *areq, unsigned int length,
1765                                 void (*callback) (struct device *dev,
1766                                                   struct talitos_desc *desc,
1767                                                   void *context, int error))
1768 {
1769         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1770         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1771         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1772         struct device *dev = ctx->dev;
1773         struct talitos_desc *desc = &edesc->desc;
1774         int ret;
1775         bool sync_needed = false;
1776         struct talitos_private *priv = dev_get_drvdata(dev);
1777         bool is_sec1 = has_ftr_sec1(priv);
1778         int sg_count;
1779
1780         /* first DWORD empty */
1781
1782         /* hash context in */
1783         if (!req_ctx->first || req_ctx->swinit) {
1784                 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1785                                               req_ctx->hw_context_size,
1786                                               req_ctx->hw_context,
1787                                               DMA_TO_DEVICE);
1788                 req_ctx->swinit = 0;
1789         }
1790         /* Indicate next op is not the first. */
1791         req_ctx->first = 0;
1792
1793         /* HMAC key */
1794         if (ctx->keylen)
1795                 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1796                                is_sec1);
1797
1798         if (is_sec1 && req_ctx->nbuf)
1799                 length -= req_ctx->nbuf;
1800
1801         sg_count = edesc->src_nents ?: 1;
1802         if (is_sec1 && sg_count > 1)
1803                 sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length);
1804         else if (length)
1805                 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1806                                       DMA_TO_DEVICE);
1807         /*
1808          * data in
1809          */
1810         if (is_sec1 && req_ctx->nbuf) {
1811                 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1812                                        req_ctx->buf[req_ctx->buf_idx],
1813                                        DMA_TO_DEVICE);
1814         } else {
1815                 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1816                                           &desc->ptr[3], sg_count, 0, 0);
1817                 if (sg_count > 1)
1818                         sync_needed = true;
1819         }
1820
1821         /* fifth DWORD empty */
1822
1823         /* hash/HMAC out -or- hash context out */
1824         if (req_ctx->last)
1825                 map_single_talitos_ptr(dev, &desc->ptr[5],
1826                                        crypto_ahash_digestsize(tfm),
1827                                        areq->result, DMA_FROM_DEVICE);
1828         else
1829                 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1830                                               req_ctx->hw_context_size,
1831                                               req_ctx->hw_context,
1832                                               DMA_FROM_DEVICE);
1833
1834         /* last DWORD empty */
1835
1836         if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1837                 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1838
1839         if (is_sec1 && req_ctx->nbuf && length) {
1840                 struct talitos_desc *desc2 = (struct talitos_desc *)
1841                                              (edesc->buf + edesc->dma_len);
1842                 dma_addr_t next_desc;
1843
1844                 memset(desc2, 0, sizeof(*desc2));
1845                 desc2->hdr = desc->hdr;
1846                 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1847                 desc2->hdr1 = desc2->hdr;
1848                 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1849                 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1850                 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1851
1852                 if (desc->ptr[1].ptr)
1853                         copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1854                                          is_sec1);
1855                 else
1856                         map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1857                                                       req_ctx->hw_context_size,
1858                                                       req_ctx->hw_context,
1859                                                       DMA_TO_DEVICE);
1860                 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1861                 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1862                                           &desc2->ptr[3], sg_count, 0, 0);
1863                 if (sg_count > 1)
1864                         sync_needed = true;
1865                 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1866                 if (req_ctx->last)
1867                         map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1868                                                       req_ctx->hw_context_size,
1869                                                       req_ctx->hw_context,
1870                                                       DMA_FROM_DEVICE);
1871
1872                 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1873                                            DMA_BIDIRECTIONAL);
1874                 desc->next_desc = cpu_to_be32(next_desc);
1875         }
1876
1877         if (sync_needed)
1878                 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1879                                            edesc->dma_len, DMA_BIDIRECTIONAL);
1880
1881         ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1882         if (ret != -EINPROGRESS) {
1883                 common_nonsnoop_hash_unmap(dev, edesc, areq);
1884                 kfree(edesc);
1885         }
1886         return ret;
1887 }
1888
1889 static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1890                                                unsigned int nbytes)
1891 {
1892         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1893         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1894         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1895         struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1896         bool is_sec1 = has_ftr_sec1(priv);
1897
1898         if (is_sec1)
1899                 nbytes -= req_ctx->nbuf;
1900
1901         return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
1902                                    nbytes, 0, 0, 0, areq->base.flags, false);
1903 }
1904
1905 static int ahash_init(struct ahash_request *areq)
1906 {
1907         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1908         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1909         struct device *dev = ctx->dev;
1910         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1911         unsigned int size;
1912         dma_addr_t dma;
1913
1914         /* Initialize the context */
1915         req_ctx->buf_idx = 0;
1916         req_ctx->nbuf = 0;
1917         req_ctx->first = 1; /* first indicates h/w must init its context */
1918         req_ctx->swinit = 0; /* assume h/w init of context */
1919         size =  (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
1920                         ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1921                         : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
1922         req_ctx->hw_context_size = size;
1923
1924         dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
1925                              DMA_TO_DEVICE);
1926         dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
1927
1928         return 0;
1929 }
1930
1931 /*
1932  * on h/w without explicit sha224 support, we initialize h/w context
1933  * manually with sha224 constants, and tell it to run sha256.
1934  */
1935 static int ahash_init_sha224_swinit(struct ahash_request *areq)
1936 {
1937         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1938
1939         req_ctx->hw_context[0] = SHA224_H0;
1940         req_ctx->hw_context[1] = SHA224_H1;
1941         req_ctx->hw_context[2] = SHA224_H2;
1942         req_ctx->hw_context[3] = SHA224_H3;
1943         req_ctx->hw_context[4] = SHA224_H4;
1944         req_ctx->hw_context[5] = SHA224_H5;
1945         req_ctx->hw_context[6] = SHA224_H6;
1946         req_ctx->hw_context[7] = SHA224_H7;
1947
1948         /* init 64-bit count */
1949         req_ctx->hw_context[8] = 0;
1950         req_ctx->hw_context[9] = 0;
1951
1952         ahash_init(areq);
1953         req_ctx->swinit = 1;/* prevent h/w initting context with sha256 values*/
1954
1955         return 0;
1956 }
1957
1958 static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1959 {
1960         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1961         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1962         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1963         struct talitos_edesc *edesc;
1964         unsigned int blocksize =
1965                         crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1966         unsigned int nbytes_to_hash;
1967         unsigned int to_hash_later;
1968         unsigned int nsg;
1969         int nents;
1970         struct device *dev = ctx->dev;
1971         struct talitos_private *priv = dev_get_drvdata(dev);
1972         bool is_sec1 = has_ftr_sec1(priv);
1973         u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
1974
1975         if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1976                 /* Buffer up to one whole block */
1977                 nents = sg_nents_for_len(areq->src, nbytes);
1978                 if (nents < 0) {
1979                         dev_err(ctx->dev, "Invalid number of src SG.\n");
1980                         return nents;
1981                 }
1982                 sg_copy_to_buffer(areq->src, nents,
1983                                   ctx_buf + req_ctx->nbuf, nbytes);
1984                 req_ctx->nbuf += nbytes;
1985                 return 0;
1986         }
1987
1988         /* At least (blocksize + 1) bytes are available to hash */
1989         nbytes_to_hash = nbytes + req_ctx->nbuf;
1990         to_hash_later = nbytes_to_hash & (blocksize - 1);
1991
1992         if (req_ctx->last)
1993                 to_hash_later = 0;
1994         else if (to_hash_later)
1995                 /* There is a partial block. Hash the full block(s) now */
1996                 nbytes_to_hash -= to_hash_later;
1997         else {
1998                 /* Keep one block buffered */
1999                 nbytes_to_hash -= blocksize;
2000                 to_hash_later = blocksize;
2001         }
2002
2003         /* Chain in any previously buffered data */
2004         if (!is_sec1 && req_ctx->nbuf) {
2005                 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2006                 sg_init_table(req_ctx->bufsl, nsg);
2007                 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
2008                 if (nsg > 1)
2009                         sg_chain(req_ctx->bufsl, 2, areq->src);
2010                 req_ctx->psrc = req_ctx->bufsl;
2011         } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2012                 int offset;
2013
2014                 if (nbytes_to_hash > blocksize)
2015                         offset = blocksize - req_ctx->nbuf;
2016                 else
2017                         offset = nbytes_to_hash - req_ctx->nbuf;
2018                 nents = sg_nents_for_len(areq->src, offset);
2019                 if (nents < 0) {
2020                         dev_err(ctx->dev, "Invalid number of src SG.\n");
2021                         return nents;
2022                 }
2023                 sg_copy_to_buffer(areq->src, nents,
2024                                   ctx_buf + req_ctx->nbuf, offset);
2025                 req_ctx->nbuf += offset;
2026                 req_ctx->psrc = scatterwalk_ffwd(req_ctx->bufsl, areq->src,
2027                                                  offset);
2028         } else
2029                 req_ctx->psrc = areq->src;
2030
2031         if (to_hash_later) {
2032                 nents = sg_nents_for_len(areq->src, nbytes);
2033                 if (nents < 0) {
2034                         dev_err(ctx->dev, "Invalid number of src SG.\n");
2035                         return nents;
2036                 }
2037                 sg_pcopy_to_buffer(areq->src, nents,
2038                                    req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
2039                                       to_hash_later,
2040                                       nbytes - to_hash_later);
2041         }
2042         req_ctx->to_hash_later = to_hash_later;
2043
2044         /* Allocate extended descriptor */
2045         edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2046         if (IS_ERR(edesc))
2047                 return PTR_ERR(edesc);
2048
2049         edesc->desc.hdr = ctx->desc_hdr_template;
2050
2051         /* On last one, request SEC to pad; otherwise continue */
2052         if (req_ctx->last)
2053                 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2054         else
2055                 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2056
2057         /* request SEC to INIT hash. */
2058         if (req_ctx->first && !req_ctx->swinit)
2059                 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2060
2061         /* When the tfm context has a keylen, it's an HMAC.
2062          * A first or last (ie. not middle) descriptor must request HMAC.
2063          */
2064         if (ctx->keylen && (req_ctx->first || req_ctx->last))
2065                 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2066
2067         return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done);
2068 }
2069
2070 static int ahash_update(struct ahash_request *areq)
2071 {
2072         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2073
2074         req_ctx->last = 0;
2075
2076         return ahash_process_req(areq, areq->nbytes);
2077 }
2078
2079 static int ahash_final(struct ahash_request *areq)
2080 {
2081         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2082
2083         req_ctx->last = 1;
2084
2085         return ahash_process_req(areq, 0);
2086 }
2087
2088 static int ahash_finup(struct ahash_request *areq)
2089 {
2090         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2091
2092         req_ctx->last = 1;
2093
2094         return ahash_process_req(areq, areq->nbytes);
2095 }
2096
2097 static int ahash_digest(struct ahash_request *areq)
2098 {
2099         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2100         struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
2101
2102         ahash->init(areq);
2103         req_ctx->last = 1;
2104
2105         return ahash_process_req(areq, areq->nbytes);
2106 }
2107
2108 static int ahash_export(struct ahash_request *areq, void *out)
2109 {
2110         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2111         struct talitos_export_state *export = out;
2112         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2113         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2114         struct device *dev = ctx->dev;
2115         dma_addr_t dma;
2116
2117         dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2118                              DMA_FROM_DEVICE);
2119         dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
2120
2121         memcpy(export->hw_context, req_ctx->hw_context,
2122                req_ctx->hw_context_size);
2123         memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
2124         export->swinit = req_ctx->swinit;
2125         export->first = req_ctx->first;
2126         export->last = req_ctx->last;
2127         export->to_hash_later = req_ctx->to_hash_later;
2128         export->nbuf = req_ctx->nbuf;
2129
2130         return 0;
2131 }
2132
2133 static int ahash_import(struct ahash_request *areq, const void *in)
2134 {
2135         struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2136         struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2137         struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2138         struct device *dev = ctx->dev;
2139         const struct talitos_export_state *export = in;
2140         unsigned int size;
2141         dma_addr_t dma;
2142
2143         memset(req_ctx, 0, sizeof(*req_ctx));
2144         size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
2145                         ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2146                         : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
2147         req_ctx->hw_context_size = size;
2148         memcpy(req_ctx->hw_context, export->hw_context, size);
2149         memcpy(req_ctx->buf[0], export->buf, export->nbuf);
2150         req_ctx->swinit = export->swinit;
2151         req_ctx->first = export->first;
2152         req_ctx->last = export->last;
2153         req_ctx->to_hash_later = export->to_hash_later;
2154         req_ctx->nbuf = export->nbuf;
2155
2156         dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2157                              DMA_TO_DEVICE);
2158         dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2159
2160         return 0;
2161 }
2162
2163 static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2164                    u8 *hash)
2165 {
2166         struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2167
2168         struct scatterlist sg[1];
2169         struct ahash_request *req;
2170         struct crypto_wait wait;
2171         int ret;
2172
2173         crypto_init_wait(&wait);
2174
2175         req = ahash_request_alloc(tfm, GFP_KERNEL);
2176         if (!req)
2177                 return -ENOMEM;
2178
2179         /* Keep tfm keylen == 0 during hash of the long key */
2180         ctx->keylen = 0;
2181         ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2182                                    crypto_req_done, &wait);
2183
2184         sg_init_one(&sg[0], key, keylen);
2185
2186         ahash_request_set_crypt(req, sg, hash, keylen);
2187         ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2188
2189         ahash_request_free(req);
2190
2191         return ret;
2192 }
2193
2194 static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2195                         unsigned int keylen)
2196 {
2197         struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2198         struct device *dev = ctx->dev;
2199         unsigned int blocksize =
2200                         crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2201         unsigned int digestsize = crypto_ahash_digestsize(tfm);
2202         unsigned int keysize = keylen;
2203         u8 hash[SHA512_DIGEST_SIZE];
2204         int ret;
2205
2206         if (keylen <= blocksize)
2207                 memcpy(ctx->key, key, keysize);
2208         else {
2209                 /* Must get the hash of the long key */
2210                 ret = keyhash(tfm, key, keylen, hash);
2211
2212                 if (ret) {
2213                         crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2214                         return -EINVAL;
2215                 }
2216
2217                 keysize = digestsize;
2218                 memcpy(ctx->key, hash, digestsize);
2219         }
2220
2221         if (ctx->keylen)
2222                 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2223
2224         ctx->keylen = keysize;
2225         ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
2226
2227         return 0;
2228 }
2229
2230
2231 struct talitos_alg_template {
2232         u32 type;
2233         u32 priority;
2234         union {
2235                 struct crypto_alg crypto;
2236                 struct ahash_alg hash;
2237                 struct aead_alg aead;
2238         } alg;
2239         __be32 desc_hdr_template;
2240 };
2241
2242 static struct talitos_alg_template driver_algs[] = {
2243         /* AEAD algorithms.  These use a single-pass ipsec_esp descriptor */
2244         {       .type = CRYPTO_ALG_TYPE_AEAD,
2245                 .alg.aead = {
2246                         .base = {
2247                                 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2248                                 .cra_driver_name = "authenc-hmac-sha1-"
2249                                                    "cbc-aes-talitos",
2250                                 .cra_blocksize = AES_BLOCK_SIZE,
2251                                 .cra_flags = CRYPTO_ALG_ASYNC,
2252                         },
2253                         .ivsize = AES_BLOCK_SIZE,
2254                         .maxauthsize = SHA1_DIGEST_SIZE,
2255                 },
2256                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2257                                      DESC_HDR_SEL0_AESU |
2258                                      DESC_HDR_MODE0_AESU_CBC |
2259                                      DESC_HDR_SEL1_MDEUA |
2260                                      DESC_HDR_MODE1_MDEU_INIT |
2261                                      DESC_HDR_MODE1_MDEU_PAD |
2262                                      DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2263         },
2264         {       .type = CRYPTO_ALG_TYPE_AEAD,
2265                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2266                 .alg.aead = {
2267                         .base = {
2268                                 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2269                                 .cra_driver_name = "authenc-hmac-sha1-"
2270                                                    "cbc-aes-talitos-hsna",
2271                                 .cra_blocksize = AES_BLOCK_SIZE,
2272                                 .cra_flags = CRYPTO_ALG_ASYNC,
2273                         },
2274                         .ivsize = AES_BLOCK_SIZE,
2275                         .maxauthsize = SHA1_DIGEST_SIZE,
2276                 },
2277                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2278                                      DESC_HDR_SEL0_AESU |
2279                                      DESC_HDR_MODE0_AESU_CBC |
2280                                      DESC_HDR_SEL1_MDEUA |
2281                                      DESC_HDR_MODE1_MDEU_INIT |
2282                                      DESC_HDR_MODE1_MDEU_PAD |
2283                                      DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2284         },
2285         {       .type = CRYPTO_ALG_TYPE_AEAD,
2286                 .alg.aead = {
2287                         .base = {
2288                                 .cra_name = "authenc(hmac(sha1),"
2289                                             "cbc(des3_ede))",
2290                                 .cra_driver_name = "authenc-hmac-sha1-"
2291                                                    "cbc-3des-talitos",
2292                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2293                                 .cra_flags = CRYPTO_ALG_ASYNC,
2294                         },
2295                         .ivsize = DES3_EDE_BLOCK_SIZE,
2296                         .maxauthsize = SHA1_DIGEST_SIZE,
2297                 },
2298                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2299                                      DESC_HDR_SEL0_DEU |
2300                                      DESC_HDR_MODE0_DEU_CBC |
2301                                      DESC_HDR_MODE0_DEU_3DES |
2302                                      DESC_HDR_SEL1_MDEUA |
2303                                      DESC_HDR_MODE1_MDEU_INIT |
2304                                      DESC_HDR_MODE1_MDEU_PAD |
2305                                      DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2306         },
2307         {       .type = CRYPTO_ALG_TYPE_AEAD,
2308                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2309                 .alg.aead = {
2310                         .base = {
2311                                 .cra_name = "authenc(hmac(sha1),"
2312                                             "cbc(des3_ede))",
2313                                 .cra_driver_name = "authenc-hmac-sha1-"
2314                                                    "cbc-3des-talitos-hsna",
2315                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2316                                 .cra_flags = CRYPTO_ALG_ASYNC,
2317                         },
2318                         .ivsize = DES3_EDE_BLOCK_SIZE,
2319                         .maxauthsize = SHA1_DIGEST_SIZE,
2320                 },
2321                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2322                                      DESC_HDR_SEL0_DEU |
2323                                      DESC_HDR_MODE0_DEU_CBC |
2324                                      DESC_HDR_MODE0_DEU_3DES |
2325                                      DESC_HDR_SEL1_MDEUA |
2326                                      DESC_HDR_MODE1_MDEU_INIT |
2327                                      DESC_HDR_MODE1_MDEU_PAD |
2328                                      DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2329         },
2330         {       .type = CRYPTO_ALG_TYPE_AEAD,
2331                 .alg.aead = {
2332                         .base = {
2333                                 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2334                                 .cra_driver_name = "authenc-hmac-sha224-"
2335                                                    "cbc-aes-talitos",
2336                                 .cra_blocksize = AES_BLOCK_SIZE,
2337                                 .cra_flags = CRYPTO_ALG_ASYNC,
2338                         },
2339                         .ivsize = AES_BLOCK_SIZE,
2340                         .maxauthsize = SHA224_DIGEST_SIZE,
2341                 },
2342                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2343                                      DESC_HDR_SEL0_AESU |
2344                                      DESC_HDR_MODE0_AESU_CBC |
2345                                      DESC_HDR_SEL1_MDEUA |
2346                                      DESC_HDR_MODE1_MDEU_INIT |
2347                                      DESC_HDR_MODE1_MDEU_PAD |
2348                                      DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2349         },
2350         {       .type = CRYPTO_ALG_TYPE_AEAD,
2351                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2352                 .alg.aead = {
2353                         .base = {
2354                                 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2355                                 .cra_driver_name = "authenc-hmac-sha224-"
2356                                                    "cbc-aes-talitos-hsna",
2357                                 .cra_blocksize = AES_BLOCK_SIZE,
2358                                 .cra_flags = CRYPTO_ALG_ASYNC,
2359                         },
2360                         .ivsize = AES_BLOCK_SIZE,
2361                         .maxauthsize = SHA224_DIGEST_SIZE,
2362                 },
2363                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2364                                      DESC_HDR_SEL0_AESU |
2365                                      DESC_HDR_MODE0_AESU_CBC |
2366                                      DESC_HDR_SEL1_MDEUA |
2367                                      DESC_HDR_MODE1_MDEU_INIT |
2368                                      DESC_HDR_MODE1_MDEU_PAD |
2369                                      DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2370         },
2371         {       .type = CRYPTO_ALG_TYPE_AEAD,
2372                 .alg.aead = {
2373                         .base = {
2374                                 .cra_name = "authenc(hmac(sha224),"
2375                                             "cbc(des3_ede))",
2376                                 .cra_driver_name = "authenc-hmac-sha224-"
2377                                                    "cbc-3des-talitos",
2378                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2379                                 .cra_flags = CRYPTO_ALG_ASYNC,
2380                         },
2381                         .ivsize = DES3_EDE_BLOCK_SIZE,
2382                         .maxauthsize = SHA224_DIGEST_SIZE,
2383                 },
2384                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2385                                      DESC_HDR_SEL0_DEU |
2386                                      DESC_HDR_MODE0_DEU_CBC |
2387                                      DESC_HDR_MODE0_DEU_3DES |
2388                                      DESC_HDR_SEL1_MDEUA |
2389                                      DESC_HDR_MODE1_MDEU_INIT |
2390                                      DESC_HDR_MODE1_MDEU_PAD |
2391                                      DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2392         },
2393         {       .type = CRYPTO_ALG_TYPE_AEAD,
2394                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2395                 .alg.aead = {
2396                         .base = {
2397                                 .cra_name = "authenc(hmac(sha224),"
2398                                             "cbc(des3_ede))",
2399                                 .cra_driver_name = "authenc-hmac-sha224-"
2400                                                    "cbc-3des-talitos-hsna",
2401                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2402                                 .cra_flags = CRYPTO_ALG_ASYNC,
2403                         },
2404                         .ivsize = DES3_EDE_BLOCK_SIZE,
2405                         .maxauthsize = SHA224_DIGEST_SIZE,
2406                 },
2407                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2408                                      DESC_HDR_SEL0_DEU |
2409                                      DESC_HDR_MODE0_DEU_CBC |
2410                                      DESC_HDR_MODE0_DEU_3DES |
2411                                      DESC_HDR_SEL1_MDEUA |
2412                                      DESC_HDR_MODE1_MDEU_INIT |
2413                                      DESC_HDR_MODE1_MDEU_PAD |
2414                                      DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2415         },
2416         {       .type = CRYPTO_ALG_TYPE_AEAD,
2417                 .alg.aead = {
2418                         .base = {
2419                                 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2420                                 .cra_driver_name = "authenc-hmac-sha256-"
2421                                                    "cbc-aes-talitos",
2422                                 .cra_blocksize = AES_BLOCK_SIZE,
2423                                 .cra_flags = CRYPTO_ALG_ASYNC,
2424                         },
2425                         .ivsize = AES_BLOCK_SIZE,
2426                         .maxauthsize = SHA256_DIGEST_SIZE,
2427                 },
2428                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2429                                      DESC_HDR_SEL0_AESU |
2430                                      DESC_HDR_MODE0_AESU_CBC |
2431                                      DESC_HDR_SEL1_MDEUA |
2432                                      DESC_HDR_MODE1_MDEU_INIT |
2433                                      DESC_HDR_MODE1_MDEU_PAD |
2434                                      DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2435         },
2436         {       .type = CRYPTO_ALG_TYPE_AEAD,
2437                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2438                 .alg.aead = {
2439                         .base = {
2440                                 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2441                                 .cra_driver_name = "authenc-hmac-sha256-"
2442                                                    "cbc-aes-talitos-hsna",
2443                                 .cra_blocksize = AES_BLOCK_SIZE,
2444                                 .cra_flags = CRYPTO_ALG_ASYNC,
2445                         },
2446                         .ivsize = AES_BLOCK_SIZE,
2447                         .maxauthsize = SHA256_DIGEST_SIZE,
2448                 },
2449                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2450                                      DESC_HDR_SEL0_AESU |
2451                                      DESC_HDR_MODE0_AESU_CBC |
2452                                      DESC_HDR_SEL1_MDEUA |
2453                                      DESC_HDR_MODE1_MDEU_INIT |
2454                                      DESC_HDR_MODE1_MDEU_PAD |
2455                                      DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2456         },
2457         {       .type = CRYPTO_ALG_TYPE_AEAD,
2458                 .alg.aead = {
2459                         .base = {
2460                                 .cra_name = "authenc(hmac(sha256),"
2461                                             "cbc(des3_ede))",
2462                                 .cra_driver_name = "authenc-hmac-sha256-"
2463                                                    "cbc-3des-talitos",
2464                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2465                                 .cra_flags = CRYPTO_ALG_ASYNC,
2466                         },
2467                         .ivsize = DES3_EDE_BLOCK_SIZE,
2468                         .maxauthsize = SHA256_DIGEST_SIZE,
2469                 },
2470                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2471                                      DESC_HDR_SEL0_DEU |
2472                                      DESC_HDR_MODE0_DEU_CBC |
2473                                      DESC_HDR_MODE0_DEU_3DES |
2474                                      DESC_HDR_SEL1_MDEUA |
2475                                      DESC_HDR_MODE1_MDEU_INIT |
2476                                      DESC_HDR_MODE1_MDEU_PAD |
2477                                      DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2478         },
2479         {       .type = CRYPTO_ALG_TYPE_AEAD,
2480                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2481                 .alg.aead = {
2482                         .base = {
2483                                 .cra_name = "authenc(hmac(sha256),"
2484                                             "cbc(des3_ede))",
2485                                 .cra_driver_name = "authenc-hmac-sha256-"
2486                                                    "cbc-3des-talitos-hsna",
2487                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2488                                 .cra_flags = CRYPTO_ALG_ASYNC,
2489                         },
2490                         .ivsize = DES3_EDE_BLOCK_SIZE,
2491                         .maxauthsize = SHA256_DIGEST_SIZE,
2492                 },
2493                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2494                                      DESC_HDR_SEL0_DEU |
2495                                      DESC_HDR_MODE0_DEU_CBC |
2496                                      DESC_HDR_MODE0_DEU_3DES |
2497                                      DESC_HDR_SEL1_MDEUA |
2498                                      DESC_HDR_MODE1_MDEU_INIT |
2499                                      DESC_HDR_MODE1_MDEU_PAD |
2500                                      DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2501         },
2502         {       .type = CRYPTO_ALG_TYPE_AEAD,
2503                 .alg.aead = {
2504                         .base = {
2505                                 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2506                                 .cra_driver_name = "authenc-hmac-sha384-"
2507                                                    "cbc-aes-talitos",
2508                                 .cra_blocksize = AES_BLOCK_SIZE,
2509                                 .cra_flags = CRYPTO_ALG_ASYNC,
2510                         },
2511                         .ivsize = AES_BLOCK_SIZE,
2512                         .maxauthsize = SHA384_DIGEST_SIZE,
2513                 },
2514                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2515                                      DESC_HDR_SEL0_AESU |
2516                                      DESC_HDR_MODE0_AESU_CBC |
2517                                      DESC_HDR_SEL1_MDEUB |
2518                                      DESC_HDR_MODE1_MDEU_INIT |
2519                                      DESC_HDR_MODE1_MDEU_PAD |
2520                                      DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2521         },
2522         {       .type = CRYPTO_ALG_TYPE_AEAD,
2523                 .alg.aead = {
2524                         .base = {
2525                                 .cra_name = "authenc(hmac(sha384),"
2526                                             "cbc(des3_ede))",
2527                                 .cra_driver_name = "authenc-hmac-sha384-"
2528                                                    "cbc-3des-talitos",
2529                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2530                                 .cra_flags = CRYPTO_ALG_ASYNC,
2531                         },
2532                         .ivsize = DES3_EDE_BLOCK_SIZE,
2533                         .maxauthsize = SHA384_DIGEST_SIZE,
2534                 },
2535                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2536                                      DESC_HDR_SEL0_DEU |
2537                                      DESC_HDR_MODE0_DEU_CBC |
2538                                      DESC_HDR_MODE0_DEU_3DES |
2539                                      DESC_HDR_SEL1_MDEUB |
2540                                      DESC_HDR_MODE1_MDEU_INIT |
2541                                      DESC_HDR_MODE1_MDEU_PAD |
2542                                      DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2543         },
2544         {       .type = CRYPTO_ALG_TYPE_AEAD,
2545                 .alg.aead = {
2546                         .base = {
2547                                 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2548                                 .cra_driver_name = "authenc-hmac-sha512-"
2549                                                    "cbc-aes-talitos",
2550                                 .cra_blocksize = AES_BLOCK_SIZE,
2551                                 .cra_flags = CRYPTO_ALG_ASYNC,
2552                         },
2553                         .ivsize = AES_BLOCK_SIZE,
2554                         .maxauthsize = SHA512_DIGEST_SIZE,
2555                 },
2556                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2557                                      DESC_HDR_SEL0_AESU |
2558                                      DESC_HDR_MODE0_AESU_CBC |
2559                                      DESC_HDR_SEL1_MDEUB |
2560                                      DESC_HDR_MODE1_MDEU_INIT |
2561                                      DESC_HDR_MODE1_MDEU_PAD |
2562                                      DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2563         },
2564         {       .type = CRYPTO_ALG_TYPE_AEAD,
2565                 .alg.aead = {
2566                         .base = {
2567                                 .cra_name = "authenc(hmac(sha512),"
2568                                             "cbc(des3_ede))",
2569                                 .cra_driver_name = "authenc-hmac-sha512-"
2570                                                    "cbc-3des-talitos",
2571                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2572                                 .cra_flags = CRYPTO_ALG_ASYNC,
2573                         },
2574                         .ivsize = DES3_EDE_BLOCK_SIZE,
2575                         .maxauthsize = SHA512_DIGEST_SIZE,
2576                 },
2577                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2578                                      DESC_HDR_SEL0_DEU |
2579                                      DESC_HDR_MODE0_DEU_CBC |
2580                                      DESC_HDR_MODE0_DEU_3DES |
2581                                      DESC_HDR_SEL1_MDEUB |
2582                                      DESC_HDR_MODE1_MDEU_INIT |
2583                                      DESC_HDR_MODE1_MDEU_PAD |
2584                                      DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2585         },
2586         {       .type = CRYPTO_ALG_TYPE_AEAD,
2587                 .alg.aead = {
2588                         .base = {
2589                                 .cra_name = "authenc(hmac(md5),cbc(aes))",
2590                                 .cra_driver_name = "authenc-hmac-md5-"
2591                                                    "cbc-aes-talitos",
2592                                 .cra_blocksize = AES_BLOCK_SIZE,
2593                                 .cra_flags = CRYPTO_ALG_ASYNC,
2594                         },
2595                         .ivsize = AES_BLOCK_SIZE,
2596                         .maxauthsize = MD5_DIGEST_SIZE,
2597                 },
2598                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2599                                      DESC_HDR_SEL0_AESU |
2600                                      DESC_HDR_MODE0_AESU_CBC |
2601                                      DESC_HDR_SEL1_MDEUA |
2602                                      DESC_HDR_MODE1_MDEU_INIT |
2603                                      DESC_HDR_MODE1_MDEU_PAD |
2604                                      DESC_HDR_MODE1_MDEU_MD5_HMAC,
2605         },
2606         {       .type = CRYPTO_ALG_TYPE_AEAD,
2607                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2608                 .alg.aead = {
2609                         .base = {
2610                                 .cra_name = "authenc(hmac(md5),cbc(aes))",
2611                                 .cra_driver_name = "authenc-hmac-md5-"
2612                                                    "cbc-aes-talitos-hsna",
2613                                 .cra_blocksize = AES_BLOCK_SIZE,
2614                                 .cra_flags = CRYPTO_ALG_ASYNC,
2615                         },
2616                         .ivsize = AES_BLOCK_SIZE,
2617                         .maxauthsize = MD5_DIGEST_SIZE,
2618                 },
2619                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2620                                      DESC_HDR_SEL0_AESU |
2621                                      DESC_HDR_MODE0_AESU_CBC |
2622                                      DESC_HDR_SEL1_MDEUA |
2623                                      DESC_HDR_MODE1_MDEU_INIT |
2624                                      DESC_HDR_MODE1_MDEU_PAD |
2625                                      DESC_HDR_MODE1_MDEU_MD5_HMAC,
2626         },
2627         {       .type = CRYPTO_ALG_TYPE_AEAD,
2628                 .alg.aead = {
2629                         .base = {
2630                                 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2631                                 .cra_driver_name = "authenc-hmac-md5-"
2632                                                    "cbc-3des-talitos",
2633                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2634                                 .cra_flags = CRYPTO_ALG_ASYNC,
2635                         },
2636                         .ivsize = DES3_EDE_BLOCK_SIZE,
2637                         .maxauthsize = MD5_DIGEST_SIZE,
2638                 },
2639                 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2640                                      DESC_HDR_SEL0_DEU |
2641                                      DESC_HDR_MODE0_DEU_CBC |
2642                                      DESC_HDR_MODE0_DEU_3DES |
2643                                      DESC_HDR_SEL1_MDEUA |
2644                                      DESC_HDR_MODE1_MDEU_INIT |
2645                                      DESC_HDR_MODE1_MDEU_PAD |
2646                                      DESC_HDR_MODE1_MDEU_MD5_HMAC,
2647         },
2648         {       .type = CRYPTO_ALG_TYPE_AEAD,
2649                 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2650                 .alg.aead = {
2651                         .base = {
2652                                 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2653                                 .cra_driver_name = "authenc-hmac-md5-"
2654                                                    "cbc-3des-talitos-hsna",
2655                                 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2656                                 .cra_flags = CRYPTO_ALG_ASYNC,
2657                         },
2658                         .ivsize = DES3_EDE_BLOCK_SIZE,
2659                         .maxauthsize = MD5_DIGEST_SIZE,
2660                 },
2661                 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2662                                      DESC_HDR_SEL0_DEU |
2663                                      DESC_HDR_MODE0_DEU_CBC |
2664                                      DESC_HDR_MODE0_DEU_3DES |
2665                                      DESC_HDR_SEL1_MDEUA |
2666                                      DESC_HDR_MODE1_MDEU_INIT |
2667                                      DESC_HDR_MODE1_MDEU_PAD |
2668                                      DESC_HDR_MODE1_MDEU_MD5_HMAC,
2669         },
2670         /* ABLKCIPHER algorithms. */
2671         {       .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2672                 .alg.crypto = {
2673                         .cra_name = "ecb(aes)",
2674                         .cra_driver_name = "ecb-aes-talitos",
2675                         .cra_blocksize = AES_BLOCK_SIZE,
2676                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2677                                      CRYPTO_ALG_ASYNC,
2678                         .cra_ablkcipher = {
2679                                 .min_keysize = AES_MIN_KEY_SIZE,
2680                                 .max_keysize = AES_MAX_KEY_SIZE,
2681                         }
2682                 },
2683                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2684                                      DESC_HDR_SEL0_AESU,
2685         },
2686         {       .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2687                 .alg.crypto = {
2688                         .cra_name = "cbc(aes)",
2689                         .cra_driver_name = "cbc-aes-talitos",
2690                         .cra_blocksize = AES_BLOCK_SIZE,
2691                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2692                                      CRYPTO_ALG_ASYNC,
2693                         .cra_ablkcipher = {
2694                                 .min_keysize = AES_MIN_KEY_SIZE,
2695                                 .max_keysize = AES_MAX_KEY_SIZE,
2696                                 .ivsize = AES_BLOCK_SIZE,
2697                                 .setkey = ablkcipher_aes_setkey,
2698                         }
2699                 },
2700                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2701                                      DESC_HDR_SEL0_AESU |
2702                                      DESC_HDR_MODE0_AESU_CBC,
2703         },
2704         {       .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2705                 .alg.crypto = {
2706                         .cra_name = "ctr(aes)",
2707                         .cra_driver_name = "ctr-aes-talitos",
2708                         .cra_blocksize = 1,
2709                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2710                                      CRYPTO_ALG_ASYNC,
2711                         .cra_ablkcipher = {
2712                                 .min_keysize = AES_MIN_KEY_SIZE,
2713                                 .max_keysize = AES_MAX_KEY_SIZE,
2714                                 .ivsize = AES_BLOCK_SIZE,
2715                                 .setkey = ablkcipher_aes_setkey,
2716                         }
2717                 },
2718                 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
2719                                      DESC_HDR_SEL0_AESU |
2720                                      DESC_HDR_MODE0_AESU_CTR,
2721         },
2722         {       .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2723                 .alg.crypto = {
2724                         .cra_name = "ecb(des)",
2725                         .cra_driver_name = "ecb-des-talitos",
2726                         .cra_blocksize = DES_BLOCK_SIZE,
2727                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2728                                      CRYPTO_ALG_ASYNC,
2729                         .cra_ablkcipher = {
2730                                 .min_keysize = DES_KEY_SIZE,
2731                                 .max_keysize = DES_KEY_SIZE,
2732                                 .ivsize = DES_BLOCK_SIZE,
2733                         }
2734                 },
2735                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2736                                      DESC_HDR_SEL0_DEU,
2737         },
2738         {       .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2739                 .alg.crypto = {
2740                         .cra_name = "cbc(des)",
2741                         .cra_driver_name = "cbc-des-talitos",
2742                         .cra_blocksize = DES_BLOCK_SIZE,
2743                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2744                                      CRYPTO_ALG_ASYNC,
2745                         .cra_ablkcipher = {
2746                                 .min_keysize = DES_KEY_SIZE,
2747                                 .max_keysize = DES_KEY_SIZE,
2748                                 .ivsize = DES_BLOCK_SIZE,
2749                         }
2750                 },
2751                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2752                                      DESC_HDR_SEL0_DEU |
2753                                      DESC_HDR_MODE0_DEU_CBC,
2754         },
2755         {       .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2756                 .alg.crypto = {
2757                         .cra_name = "ecb(des3_ede)",
2758                         .cra_driver_name = "ecb-3des-talitos",
2759                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2760                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2761                                      CRYPTO_ALG_ASYNC,
2762                         .cra_ablkcipher = {
2763                                 .min_keysize = DES3_EDE_KEY_SIZE,
2764                                 .max_keysize = DES3_EDE_KEY_SIZE,
2765                                 .ivsize = DES3_EDE_BLOCK_SIZE,
2766                         }
2767                 },
2768                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2769                                      DESC_HDR_SEL0_DEU |
2770                                      DESC_HDR_MODE0_DEU_3DES,
2771         },
2772         {       .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2773                 .alg.crypto = {
2774                         .cra_name = "cbc(des3_ede)",
2775                         .cra_driver_name = "cbc-3des-talitos",
2776                         .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2777                         .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2778                                      CRYPTO_ALG_ASYNC,
2779                         .cra_ablkcipher = {
2780                                 .min_keysize = DES3_EDE_KEY_SIZE,
2781                                 .max_keysize = DES3_EDE_KEY_SIZE,
2782                                 .ivsize = DES3_EDE_BLOCK_SIZE,
2783                         }
2784                 },
2785                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2786                                      DESC_HDR_SEL0_DEU |
2787                                      DESC_HDR_MODE0_DEU_CBC |
2788                                      DESC_HDR_MODE0_DEU_3DES,
2789         },
2790         /* AHASH algorithms. */
2791         {       .type = CRYPTO_ALG_TYPE_AHASH,
2792                 .alg.hash = {
2793                         .halg.digestsize = MD5_DIGEST_SIZE,
2794                         .halg.statesize = sizeof(struct talitos_export_state),
2795                         .halg.base = {
2796                                 .cra_name = "md5",
2797                                 .cra_driver_name = "md5-talitos",
2798                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2799                                 .cra_flags = CRYPTO_ALG_ASYNC,
2800                         }
2801                 },
2802                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2803                                      DESC_HDR_SEL0_MDEUA |
2804                                      DESC_HDR_MODE0_MDEU_MD5,
2805         },
2806         {       .type = CRYPTO_ALG_TYPE_AHASH,
2807                 .alg.hash = {
2808                         .halg.digestsize = SHA1_DIGEST_SIZE,
2809                         .halg.statesize = sizeof(struct talitos_export_state),
2810                         .halg.base = {
2811                                 .cra_name = "sha1",
2812                                 .cra_driver_name = "sha1-talitos",
2813                                 .cra_blocksize = SHA1_BLOCK_SIZE,
2814                                 .cra_flags = CRYPTO_ALG_ASYNC,
2815                         }
2816                 },
2817                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2818                                      DESC_HDR_SEL0_MDEUA |
2819                                      DESC_HDR_MODE0_MDEU_SHA1,
2820         },
2821         {       .type = CRYPTO_ALG_TYPE_AHASH,
2822                 .alg.hash = {
2823                         .halg.digestsize = SHA224_DIGEST_SIZE,
2824                         .halg.statesize = sizeof(struct talitos_export_state),
2825                         .halg.base = {
2826                                 .cra_name = "sha224",
2827                                 .cra_driver_name = "sha224-talitos",
2828                                 .cra_blocksize = SHA224_BLOCK_SIZE,
2829                                 .cra_flags = CRYPTO_ALG_ASYNC,
2830                         }
2831                 },
2832                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2833                                      DESC_HDR_SEL0_MDEUA |
2834                                      DESC_HDR_MODE0_MDEU_SHA224,
2835         },
2836         {       .type = CRYPTO_ALG_TYPE_AHASH,
2837                 .alg.hash = {
2838                         .halg.digestsize = SHA256_DIGEST_SIZE,
2839                         .halg.statesize = sizeof(struct talitos_export_state),
2840                         .halg.base = {
2841                                 .cra_name = "sha256",
2842                                 .cra_driver_name = "sha256-talitos",
2843                                 .cra_blocksize = SHA256_BLOCK_SIZE,
2844                                 .cra_flags = CRYPTO_ALG_ASYNC,
2845                         }
2846                 },
2847                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2848                                      DESC_HDR_SEL0_MDEUA |
2849                                      DESC_HDR_MODE0_MDEU_SHA256,
2850         },
2851         {       .type = CRYPTO_ALG_TYPE_AHASH,
2852                 .alg.hash = {
2853                         .halg.digestsize = SHA384_DIGEST_SIZE,
2854                         .halg.statesize = sizeof(struct talitos_export_state),
2855                         .halg.base = {
2856                                 .cra_name = "sha384",
2857                                 .cra_driver_name = "sha384-talitos",
2858                                 .cra_blocksize = SHA384_BLOCK_SIZE,
2859                                 .cra_flags = CRYPTO_ALG_ASYNC,
2860                         }
2861                 },
2862                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2863                                      DESC_HDR_SEL0_MDEUB |
2864                                      DESC_HDR_MODE0_MDEUB_SHA384,
2865         },
2866         {       .type = CRYPTO_ALG_TYPE_AHASH,
2867                 .alg.hash = {
2868                         .halg.digestsize = SHA512_DIGEST_SIZE,
2869                         .halg.statesize = sizeof(struct talitos_export_state),
2870                         .halg.base = {
2871                                 .cra_name = "sha512",
2872                                 .cra_driver_name = "sha512-talitos",
2873                                 .cra_blocksize = SHA512_BLOCK_SIZE,
2874                                 .cra_flags = CRYPTO_ALG_ASYNC,
2875                         }
2876                 },
2877                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2878                                      DESC_HDR_SEL0_MDEUB |
2879                                      DESC_HDR_MODE0_MDEUB_SHA512,
2880         },
2881         {       .type = CRYPTO_ALG_TYPE_AHASH,
2882                 .alg.hash = {
2883                         .halg.digestsize = MD5_DIGEST_SIZE,
2884                         .halg.statesize = sizeof(struct talitos_export_state),
2885                         .halg.base = {
2886                                 .cra_name = "hmac(md5)",
2887                                 .cra_driver_name = "hmac-md5-talitos",
2888                                 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2889                                 .cra_flags = CRYPTO_ALG_ASYNC,
2890                         }
2891                 },
2892                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2893                                      DESC_HDR_SEL0_MDEUA |
2894                                      DESC_HDR_MODE0_MDEU_MD5,
2895         },
2896         {       .type = CRYPTO_ALG_TYPE_AHASH,
2897                 .alg.hash = {
2898                         .halg.digestsize = SHA1_DIGEST_SIZE,
2899                         .halg.statesize = sizeof(struct talitos_export_state),
2900                         .halg.base = {
2901                                 .cra_name = "hmac(sha1)",
2902                                 .cra_driver_name = "hmac-sha1-talitos",
2903                                 .cra_blocksize = SHA1_BLOCK_SIZE,
2904                                 .cra_flags = CRYPTO_ALG_ASYNC,
2905                         }
2906                 },
2907                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2908                                      DESC_HDR_SEL0_MDEUA |
2909                                      DESC_HDR_MODE0_MDEU_SHA1,
2910         },
2911         {       .type = CRYPTO_ALG_TYPE_AHASH,
2912                 .alg.hash = {
2913                         .halg.digestsize = SHA224_DIGEST_SIZE,
2914                         .halg.statesize = sizeof(struct talitos_export_state),
2915                         .halg.base = {
2916                                 .cra_name = "hmac(sha224)",
2917                                 .cra_driver_name = "hmac-sha224-talitos",
2918                                 .cra_blocksize = SHA224_BLOCK_SIZE,
2919                                 .cra_flags = CRYPTO_ALG_ASYNC,
2920                         }
2921                 },
2922                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2923                                      DESC_HDR_SEL0_MDEUA |
2924                                      DESC_HDR_MODE0_MDEU_SHA224,
2925         },
2926         {       .type = CRYPTO_ALG_TYPE_AHASH,
2927                 .alg.hash = {
2928                         .halg.digestsize = SHA256_DIGEST_SIZE,
2929                         .halg.statesize = sizeof(struct talitos_export_state),
2930                         .halg.base = {
2931                                 .cra_name = "hmac(sha256)",
2932                                 .cra_driver_name = "hmac-sha256-talitos",
2933                                 .cra_blocksize = SHA256_BLOCK_SIZE,
2934                                 .cra_flags = CRYPTO_ALG_ASYNC,
2935                         }
2936                 },
2937                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2938                                      DESC_HDR_SEL0_MDEUA |
2939                                      DESC_HDR_MODE0_MDEU_SHA256,
2940         },
2941         {       .type = CRYPTO_ALG_TYPE_AHASH,
2942                 .alg.hash = {
2943                         .halg.digestsize = SHA384_DIGEST_SIZE,
2944                         .halg.statesize = sizeof(struct talitos_export_state),
2945                         .halg.base = {
2946                                 .cra_name = "hmac(sha384)",
2947                                 .cra_driver_name = "hmac-sha384-talitos",
2948                                 .cra_blocksize = SHA384_BLOCK_SIZE,
2949                                 .cra_flags = CRYPTO_ALG_ASYNC,
2950                         }
2951                 },
2952                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2953                                      DESC_HDR_SEL0_MDEUB |
2954                                      DESC_HDR_MODE0_MDEUB_SHA384,
2955         },
2956         {       .type = CRYPTO_ALG_TYPE_AHASH,
2957                 .alg.hash = {
2958                         .halg.digestsize = SHA512_DIGEST_SIZE,
2959                         .halg.statesize = sizeof(struct talitos_export_state),
2960                         .halg.base = {
2961                                 .cra_name = "hmac(sha512)",
2962                                 .cra_driver_name = "hmac-sha512-talitos",
2963                                 .cra_blocksize = SHA512_BLOCK_SIZE,
2964                                 .cra_flags = CRYPTO_ALG_ASYNC,
2965                         }
2966                 },
2967                 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2968                                      DESC_HDR_SEL0_MDEUB |
2969                                      DESC_HDR_MODE0_MDEUB_SHA512,
2970         }
2971 };
2972
2973 struct talitos_crypto_alg {
2974         struct list_head entry;
2975         struct device *dev;
2976         struct talitos_alg_template algt;
2977 };
2978
2979 static int talitos_init_common(struct talitos_ctx *ctx,
2980                                struct talitos_crypto_alg *talitos_alg)
2981 {
2982         struct talitos_private *priv;
2983
2984         /* update context with ptr to dev */
2985         ctx->dev = talitos_alg->dev;
2986
2987         /* assign SEC channel to tfm in round-robin fashion */
2988         priv = dev_get_drvdata(ctx->dev);
2989         ctx->ch = atomic_inc_return(&priv->last_chan) &
2990                   (priv->num_channels - 1);
2991
2992         /* copy descriptor header template value */
2993         ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
2994
2995         /* select done notification */
2996         ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
2997
2998         return 0;
2999 }
3000
3001 static int talitos_cra_init(struct crypto_tfm *tfm)
3002 {
3003         struct crypto_alg *alg = tfm->__crt_alg;
3004         struct talitos_crypto_alg *talitos_alg;
3005         struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3006
3007         if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_AHASH)
3008                 talitos_alg = container_of(__crypto_ahash_alg(alg),
3009                                            struct talitos_crypto_alg,
3010                                            algt.alg.hash);
3011         else
3012                 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3013                                            algt.alg.crypto);
3014
3015         return talitos_init_common(ctx, talitos_alg);
3016 }
3017
3018 static int talitos_cra_init_aead(struct crypto_aead *tfm)
3019 {
3020         struct aead_alg *alg = crypto_aead_alg(tfm);
3021         struct talitos_crypto_alg *talitos_alg;
3022         struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3023
3024         talitos_alg = container_of(alg, struct talitos_crypto_alg,
3025                                    algt.alg.aead);
3026
3027         return talitos_init_common(ctx, talitos_alg);
3028 }
3029
3030 static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3031 {
3032         struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3033
3034         talitos_cra_init(tfm);
3035
3036         ctx->keylen = 0;
3037         crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3038                                  sizeof(struct talitos_ahash_req_ctx));
3039
3040         return 0;
3041 }
3042
3043 static void talitos_cra_exit(struct crypto_tfm *tfm)
3044 {
3045         struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3046         struct device *dev = ctx->dev;
3047
3048         if (ctx->keylen)
3049                 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3050 }
3051
3052 /*
3053  * given the alg's descriptor header template, determine whether descriptor
3054  * type and primary/secondary execution units required match the hw
3055  * capabilities description provided in the device tree node.
3056  */
3057 static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3058 {
3059         struct talitos_private *priv = dev_get_drvdata(dev);
3060         int ret;
3061
3062         ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3063               (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3064
3065         if (SECONDARY_EU(desc_hdr_template))
3066                 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3067                               & priv->exec_units);
3068
3069         return ret;
3070 }
3071
3072 static int talitos_remove(struct platform_device *ofdev)
3073 {
3074         struct device *dev = &ofdev->dev;
3075         struct talitos_private *priv = dev_get_drvdata(dev);
3076         struct talitos_crypto_alg *t_alg, *n;
3077         int i;
3078
3079         list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
3080                 switch (t_alg->algt.type) {
3081                 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3082                         break;
3083                 case CRYPTO_ALG_TYPE_AEAD:
3084                         crypto_unregister_aead(&t_alg->algt.alg.aead);
3085                         break;
3086                 case CRYPTO_ALG_TYPE_AHASH:
3087                         crypto_unregister_ahash(&t_alg->algt.alg.hash);
3088                         break;
3089                 }
3090                 list_del(&t_alg->entry);
3091         }
3092
3093         if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3094                 talitos_unregister_rng(dev);
3095
3096         for (i = 0; i < 2; i++)
3097                 if (priv->irq[i]) {
3098                         free_irq(priv->irq[i], dev);
3099                         irq_dispose_mapping(priv->irq[i]);
3100                 }
3101
3102         tasklet_kill(&priv->done_task[0]);
3103         if (priv->irq[1])
3104                 tasklet_kill(&priv->done_task[1]);
3105
3106         return 0;
3107 }
3108
3109 static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3110                                                     struct talitos_alg_template
3111                                                            *template)
3112 {
3113         struct talitos_private *priv = dev_get_drvdata(dev);
3114         struct talitos_crypto_alg *t_alg;
3115         struct crypto_alg *alg;
3116
3117         t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3118                              GFP_KERNEL);
3119         if (!t_alg)
3120                 return ERR_PTR(-ENOMEM);
3121
3122         t_alg->algt = *template;
3123
3124         switch (t_alg->algt.type) {
3125         case CRYPTO_ALG_TYPE_ABLKCIPHER:
3126                 alg = &t_alg->algt.alg.crypto;
3127                 alg->cra_init = talitos_cra_init;
3128                 alg->cra_exit = talitos_cra_exit;
3129                 alg->cra_type = &crypto_ablkcipher_type;
3130                 alg->cra_ablkcipher.setkey = ablkcipher_setkey;
3131                 alg->cra_ablkcipher.encrypt = ablkcipher_encrypt;
3132                 alg->cra_ablkcipher.decrypt = ablkcipher_decrypt;
3133                 alg->cra_ablkcipher.geniv = "eseqiv";
3134                 break;
3135         case CRYPTO_ALG_TYPE_AEAD:
3136                 alg = &t_alg->algt.alg.aead.base;
3137                 alg->cra_exit = talitos_cra_exit;
3138                 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
3139                 t_alg->algt.alg.aead.setkey = aead_setkey;
3140                 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3141                 t_alg->algt.alg.aead.decrypt = aead_decrypt;
3142                 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3143                     !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
3144                         devm_kfree(dev, t_alg);
3145                         return ERR_PTR(-ENOTSUPP);
3146                 }
3147                 break;
3148         case CRYPTO_ALG_TYPE_AHASH:
3149                 alg = &t_alg->algt.alg.hash.halg.base;
3150                 alg->cra_init = talitos_cra_init_ahash;
3151                 alg->cra_exit = talitos_cra_exit;
3152                 t_alg->algt.alg.hash.init = ahash_init;
3153                 t_alg->algt.alg.hash.update = ahash_update;
3154                 t_alg->algt.alg.hash.final = ahash_final;
3155                 t_alg->algt.alg.hash.finup = ahash_finup;
3156                 t_alg->algt.alg.hash.digest = ahash_digest;
3157                 if (!strncmp(alg->cra_name, "hmac", 4))
3158                         t_alg->algt.alg.hash.setkey = ahash_setkey;
3159                 t_alg->algt.alg.hash.import = ahash_import;
3160                 t_alg->algt.alg.hash.export = ahash_export;
3161
3162                 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
3163                     !strncmp(alg->cra_name, "hmac", 4)) {
3164                         devm_kfree(dev, t_alg);
3165                         return ERR_PTR(-ENOTSUPP);
3166                 }
3167                 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3168                     (!strcmp(alg->cra_name, "sha224") ||
3169                      !strcmp(alg->cra_name, "hmac(sha224)"))) {
3170                         t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3171                         t_alg->algt.desc_hdr_template =
3172                                         DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3173                                         DESC_HDR_SEL0_MDEUA |
3174                                         DESC_HDR_MODE0_MDEU_SHA256;
3175                 }
3176                 break;
3177         default:
3178                 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
3179                 devm_kfree(dev, t_alg);
3180                 return ERR_PTR(-EINVAL);
3181         }
3182
3183         alg->cra_module = THIS_MODULE;
3184         if (t_alg->algt.priority)
3185                 alg->cra_priority = t_alg->algt.priority;
3186         else
3187                 alg->cra_priority = TALITOS_CRA_PRIORITY;
3188         if (has_ftr_sec1(priv))
3189                 alg->cra_alignmask = 3;
3190         else
3191                 alg->cra_alignmask = 0;
3192         alg->cra_ctxsize = sizeof(struct talitos_ctx);
3193         alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
3194
3195         t_alg->dev = dev;
3196
3197         return t_alg;
3198 }
3199
3200 static int talitos_probe_irq(struct platform_device *ofdev)
3201 {
3202         struct device *dev = &ofdev->dev;
3203         struct device_node *np = ofdev->dev.of_node;
3204         struct talitos_private *priv = dev_get_drvdata(dev);
3205         int err;
3206         bool is_sec1 = has_ftr_sec1(priv);
3207
3208         priv->irq[0] = irq_of_parse_and_map(np, 0);
3209         if (!priv->irq[0]) {
3210                 dev_err(dev, "failed to map irq\n");
3211                 return -EINVAL;
3212         }
3213         if (is_sec1) {
3214                 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3215                                   dev_driver_string(dev), dev);
3216                 goto primary_out;
3217         }
3218
3219         priv->irq[1] = irq_of_parse_and_map(np, 1);
3220
3221         /* get the primary irq line */
3222         if (!priv->irq[1]) {
3223                 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
3224                                   dev_driver_string(dev), dev);
3225                 goto primary_out;
3226         }
3227
3228         err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
3229                           dev_driver_string(dev), dev);
3230         if (err)
3231                 goto primary_out;
3232
3233         /* get the secondary irq line */
3234         err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
3235                           dev_driver_string(dev), dev);
3236         if (err) {
3237                 dev_err(dev, "failed to request secondary irq\n");
3238                 irq_dispose_mapping(priv->irq[1]);
3239                 priv->irq[1] = 0;
3240         }
3241
3242         return err;
3243
3244 primary_out:
3245         if (err) {
3246                 dev_err(dev, "failed to request primary irq\n");
3247                 irq_dispose_mapping(priv->irq[0]);
3248                 priv->irq[0] = 0;
3249         }
3250
3251         return err;
3252 }
3253
3254 static int talitos_probe(struct platform_device *ofdev)
3255 {
3256         struct device *dev = &ofdev->dev;
3257         struct device_node *np = ofdev->dev.of_node;
3258         struct talitos_private *priv;
3259         int i, err;
3260         int stride;
3261         struct resource *res;
3262
3263         priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
3264         if (!priv)
3265                 return -ENOMEM;
3266
3267         INIT_LIST_HEAD(&priv->alg_list);
3268
3269         dev_set_drvdata(dev, priv);
3270
3271         priv->ofdev = ofdev;
3272
3273         spin_lock_init(&priv->reg_lock);
3274
3275         res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3276         if (!res)
3277                 return -ENXIO;
3278         priv->reg = devm_ioremap(dev, res->start, resource_size(res));
3279         if (!priv->reg) {
3280                 dev_err(dev, "failed to of_iomap\n");
3281                 err = -ENOMEM;
3282                 goto err_out;
3283         }
3284
3285         /* get SEC version capabilities from device tree */
3286         of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3287         of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3288         of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3289         of_property_read_u32(np, "fsl,descriptor-types-mask",
3290                              &priv->desc_types);
3291
3292         if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3293             !priv->exec_units || !priv->desc_types) {
3294                 dev_err(dev, "invalid property data in device tree node\n");
3295                 err = -EINVAL;
3296                 goto err_out;
3297         }
3298
3299         if (of_device_is_compatible(np, "fsl,sec3.0"))
3300                 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3301
3302         if (of_device_is_compatible(np, "fsl,sec2.1"))
3303                 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
3304                                   TALITOS_FTR_SHA224_HWINIT |
3305                                   TALITOS_FTR_HMAC_OK;
3306
3307         if (of_device_is_compatible(np, "fsl,sec1.0"))
3308                 priv->features |= TALITOS_FTR_SEC1;
3309
3310         if (of_device_is_compatible(np, "fsl,sec1.2")) {
3311                 priv->reg_deu = priv->reg + TALITOS12_DEU;
3312                 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3313                 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3314                 stride = TALITOS1_CH_STRIDE;
3315         } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3316                 priv->reg_deu = priv->reg + TALITOS10_DEU;
3317                 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3318                 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3319                 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3320                 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3321                 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3322                 stride = TALITOS1_CH_STRIDE;
3323         } else {
3324                 priv->reg_deu = priv->reg + TALITOS2_DEU;
3325                 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3326                 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3327                 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3328                 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3329                 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3330                 priv->reg_keu = priv->reg + TALITOS2_KEU;
3331                 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3332                 stride = TALITOS2_CH_STRIDE;
3333         }
3334
3335         err = talitos_probe_irq(ofdev);
3336         if (err)
3337                 goto err_out;
3338
3339         if (of_device_is_compatible(np, "fsl,sec1.0")) {
3340                 if (priv->num_channels == 1)
3341                         tasklet_init(&priv->done_task[0], talitos1_done_ch0,
3342                                      (unsigned long)dev);
3343                 else
3344                         tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3345                                      (unsigned long)dev);
3346         } else {
3347                 if (priv->irq[1]) {
3348                         tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3349                                      (unsigned long)dev);
3350                         tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3351                                      (unsigned long)dev);
3352                 } else if (priv->num_channels == 1) {
3353                         tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3354                                      (unsigned long)dev);
3355                 } else {
3356                         tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3357                                      (unsigned long)dev);
3358                 }
3359         }
3360
3361         priv->chan = devm_kcalloc(dev,
3362                                   priv->num_channels,
3363                                   sizeof(struct talitos_channel),
3364                                   GFP_KERNEL);
3365         if (!priv->chan) {
3366                 dev_err(dev, "failed to allocate channel management space\n");
3367                 err = -ENOMEM;
3368                 goto err_out;
3369         }
3370
3371         priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3372
3373         for (i = 0; i < priv->num_channels; i++) {
3374                 priv->chan[i].reg = priv->reg + stride * (i + 1);
3375                 if (!priv->irq[1] || !(i & 1))
3376                         priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
3377
3378                 spin_lock_init(&priv->chan[i].head_lock);
3379                 spin_lock_init(&priv->chan[i].tail_lock);
3380
3381                 priv->chan[i].fifo = devm_kcalloc(dev,
3382                                                 priv->fifo_len,
3383                                                 sizeof(struct talitos_request),
3384                                                 GFP_KERNEL);
3385                 if (!priv->chan[i].fifo) {
3386                         dev_err(dev, "failed to allocate request fifo %d\n", i);
3387                         err = -ENOMEM;
3388                         goto err_out;
3389                 }
3390
3391                 atomic_set(&priv->chan[i].submit_count,
3392                            -(priv->chfifo_len - 1));
3393         }
3394
3395         dma_set_mask(dev, DMA_BIT_MASK(36));
3396
3397         /* reset and initialize the h/w */
3398         err = init_device(dev);
3399         if (err) {
3400                 dev_err(dev, "failed to initialize device\n");
3401                 goto err_out;
3402         }
3403
3404         /* register the RNG, if available */
3405         if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3406                 err = talitos_register_rng(dev);
3407                 if (err) {
3408                         dev_err(dev, "failed to register hwrng: %d\n", err);
3409                         goto err_out;
3410                 } else
3411                         dev_info(dev, "hwrng\n");
3412         }
3413
3414         /* register crypto algorithms the device supports */
3415         for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3416                 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3417                         struct talitos_crypto_alg *t_alg;
3418                         struct crypto_alg *alg = NULL;
3419
3420                         t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3421                         if (IS_ERR(t_alg)) {
3422                                 err = PTR_ERR(t_alg);
3423                                 if (err == -ENOTSUPP)
3424                                         continue;
3425                                 goto err_out;
3426                         }
3427
3428                         switch (t_alg->algt.type) {
3429                         case CRYPTO_ALG_TYPE_ABLKCIPHER:
3430                                 err = crypto_register_alg(
3431                                                 &t_alg->algt.alg.crypto);
3432                                 alg = &t_alg->algt.alg.crypto;
3433                                 break;
3434
3435                         case CRYPTO_ALG_TYPE_AEAD:
3436                                 err = crypto_register_aead(
3437                                         &t_alg->algt.alg.aead);
3438                                 alg = &t_alg->algt.alg.aead.base;
3439                                 break;
3440
3441                         case CRYPTO_ALG_TYPE_AHASH:
3442                                 err = crypto_register_ahash(
3443                                                 &t_alg->algt.alg.hash);
3444                                 alg = &t_alg->algt.alg.hash.halg.base;
3445                                 break;
3446                         }
3447                         if (err) {
3448                                 dev_err(dev, "%s alg registration failed\n",
3449                                         alg->cra_driver_name);
3450                                 devm_kfree(dev, t_alg);
3451                         } else
3452                                 list_add_tail(&t_alg->entry, &priv->alg_list);
3453                 }
3454         }
3455         if (!list_empty(&priv->alg_list))
3456                 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3457                          (char *)of_get_property(np, "compatible", NULL));
3458
3459         return 0;
3460
3461 err_out:
3462         talitos_remove(ofdev);
3463
3464         return err;
3465 }
3466
3467 static const struct of_device_id talitos_match[] = {
3468 #ifdef CONFIG_CRYPTO_DEV_TALITOS1
3469         {
3470                 .compatible = "fsl,sec1.0",
3471         },
3472 #endif
3473 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
3474         {
3475                 .compatible = "fsl,sec2.0",
3476         },
3477 #endif
3478         {},
3479 };
3480 MODULE_DEVICE_TABLE(of, talitos_match);
3481
3482 static struct platform_driver talitos_driver = {
3483         .driver = {
3484                 .name = "talitos",
3485                 .of_match_table = talitos_match,
3486         },
3487         .probe = talitos_probe,
3488         .remove = talitos_remove,
3489 };
3490
3491 module_platform_driver(talitos_driver);
3492
3493 MODULE_LICENSE("GPL");
3494 MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3495 MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");