GNU Linux-libre 4.14.266-gnu1
[releases.git] / drivers / crypto / inside-secure / safexcel_ring.c
1 /*
2  * Copyright (C) 2017 Marvell
3  *
4  * Antoine Tenart <antoine.tenart@free-electrons.com>
5  *
6  * This file is licensed under the terms of the GNU General Public
7  * License version 2. This program is licensed "as is" without any
8  * warranty of any kind, whether express or implied.
9  */
10
11 #include <linux/dma-mapping.h>
12 #include <linux/spinlock.h>
13
14 #include "safexcel.h"
15
16 int safexcel_init_ring_descriptors(struct safexcel_crypto_priv *priv,
17                                    struct safexcel_ring *cdr,
18                                    struct safexcel_ring *rdr)
19 {
20         cdr->offset = sizeof(u32) * priv->config.cd_offset;
21         cdr->base = dmam_alloc_coherent(priv->dev,
22                                         cdr->offset * EIP197_DEFAULT_RING_SIZE,
23                                         &cdr->base_dma, GFP_KERNEL);
24         if (!cdr->base)
25                 return -ENOMEM;
26         cdr->write = cdr->base;
27         cdr->base_end = cdr->base + cdr->offset * EIP197_DEFAULT_RING_SIZE;
28         cdr->read = cdr->base;
29
30         rdr->offset = sizeof(u32) * priv->config.rd_offset;
31         rdr->base = dmam_alloc_coherent(priv->dev,
32                                         rdr->offset * EIP197_DEFAULT_RING_SIZE,
33                                         &rdr->base_dma, GFP_KERNEL);
34         if (!rdr->base)
35                 return -ENOMEM;
36         rdr->write = rdr->base;
37         rdr->base_end = rdr->base + rdr->offset * EIP197_DEFAULT_RING_SIZE;
38         rdr->read = rdr->base;
39
40         return 0;
41 }
42
43 inline int safexcel_select_ring(struct safexcel_crypto_priv *priv)
44 {
45         return (atomic_inc_return(&priv->ring_used) % priv->config.rings);
46 }
47
48 static void *safexcel_ring_next_wptr(struct safexcel_crypto_priv *priv,
49                                      struct safexcel_ring *ring)
50 {
51         void *ptr = ring->write;
52
53         if (ring->nr == EIP197_DEFAULT_RING_SIZE - 1)
54                 return ERR_PTR(-ENOMEM);
55
56         ring->write += ring->offset;
57         if (ring->write == ring->base_end)
58                 ring->write = ring->base;
59
60         ring->nr++;
61         return ptr;
62 }
63
64 void *safexcel_ring_next_rptr(struct safexcel_crypto_priv *priv,
65                               struct safexcel_ring *ring)
66 {
67         void *ptr = ring->read;
68
69         if (!ring->nr)
70                 return ERR_PTR(-ENOENT);
71
72         ring->read += ring->offset;
73         if (ring->read == ring->base_end)
74                 ring->read = ring->base;
75
76         ring->nr--;
77         return ptr;
78 }
79
80 void safexcel_ring_rollback_wptr(struct safexcel_crypto_priv *priv,
81                                  struct safexcel_ring *ring)
82 {
83         if (!ring->nr)
84                 return;
85
86         if (ring->write == ring->base)
87                 ring->write = ring->base_end - ring->offset;
88         else
89                 ring->write -= ring->offset;
90
91         ring->nr--;
92 }
93
94 struct safexcel_command_desc *safexcel_add_cdesc(struct safexcel_crypto_priv *priv,
95                                                  int ring_id,
96                                                  bool first, bool last,
97                                                  dma_addr_t data, u32 data_len,
98                                                  u32 full_data_len,
99                                                  dma_addr_t context) {
100         struct safexcel_command_desc *cdesc;
101         int i;
102
103         cdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].cdr);
104         if (IS_ERR(cdesc))
105                 return cdesc;
106
107         memset(cdesc, 0, sizeof(struct safexcel_command_desc));
108
109         cdesc->first_seg = first;
110         cdesc->last_seg = last;
111         cdesc->particle_size = data_len;
112         cdesc->data_lo = lower_32_bits(data);
113         cdesc->data_hi = upper_32_bits(data);
114
115         if (first && context) {
116                 struct safexcel_token *token =
117                         (struct safexcel_token *)cdesc->control_data.token;
118
119                 cdesc->control_data.packet_length = full_data_len;
120                 cdesc->control_data.options = EIP197_OPTION_MAGIC_VALUE |
121                                               EIP197_OPTION_64BIT_CTX |
122                                               EIP197_OPTION_CTX_CTRL_IN_CMD;
123                 cdesc->control_data.context_lo =
124                         (lower_32_bits(context) & GENMASK(31, 2)) >> 2;
125                 cdesc->control_data.context_hi = upper_32_bits(context);
126
127                 /* TODO: large xform HMAC with SHA-384/512 uses refresh = 3 */
128                 cdesc->control_data.refresh = 2;
129
130                 for (i = 0; i < EIP197_MAX_TOKENS; i++)
131                         eip197_noop_token(&token[i]);
132         }
133
134         return cdesc;
135 }
136
137 struct safexcel_result_desc *safexcel_add_rdesc(struct safexcel_crypto_priv *priv,
138                                                 int ring_id,
139                                                 bool first, bool last,
140                                                 dma_addr_t data, u32 len)
141 {
142         struct safexcel_result_desc *rdesc;
143
144         rdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].rdr);
145         if (IS_ERR(rdesc))
146                 return rdesc;
147
148         memset(rdesc, 0, sizeof(struct safexcel_result_desc));
149
150         rdesc->first_seg = first;
151         rdesc->last_seg = last;
152         rdesc->particle_size = len;
153         rdesc->data_lo = lower_32_bits(data);
154         rdesc->data_hi = upper_32_bits(data);
155
156         return rdesc;
157 }