3237c12c1532b76c358a7e13afe5be69f6826049
[openwrt/openwrt.git] / target / linux / mediatek / patches-5.4 / 0500-v5.6-crypto-backport-inside-secure.patch
1 --- a/drivers/crypto/inside-secure/safexcel.c
2 +++ b/drivers/crypto/inside-secure/safexcel.c
3 @@ -75,9 +75,9 @@
4 }
5
6 static u32 eip197_trc_cache_probe(struct safexcel_crypto_priv *priv,
7 - int maxbanks, u32 probemask)
8 + int maxbanks, u32 probemask, u32 stride)
9 {
10 - u32 val, addrhi, addrlo, addrmid;
11 + u32 val, addrhi, addrlo, addrmid, addralias, delta, marker;
12 int actbank;
13
14 /*
15 @@ -87,32 +87,37 @@
16 addrhi = 1 << (16 + maxbanks);
17 addrlo = 0;
18 actbank = min(maxbanks - 1, 0);
19 - while ((addrhi - addrlo) > 32) {
20 + while ((addrhi - addrlo) > stride) {
21 /* write marker to lowest address in top half */
22 addrmid = (addrhi + addrlo) >> 1;
23 + marker = (addrmid ^ 0xabadbabe) & probemask; /* Unique */
24 eip197_trc_cache_banksel(priv, addrmid, &actbank);
25 - writel((addrmid | (addrlo << 16)) & probemask,
26 + writel(marker,
27 priv->base + EIP197_CLASSIFICATION_RAMS +
28 (addrmid & 0xffff));
29
30 - /* write marker to lowest address in bottom half */
31 - eip197_trc_cache_banksel(priv, addrlo, &actbank);
32 - writel((addrlo | (addrhi << 16)) & probemask,
33 - priv->base + EIP197_CLASSIFICATION_RAMS +
34 - (addrlo & 0xffff));
35 + /* write invalid markers to possible aliases */
36 + delta = 1 << __fls(addrmid);
37 + while (delta >= stride) {
38 + addralias = addrmid - delta;
39 + eip197_trc_cache_banksel(priv, addralias, &actbank);
40 + writel(~marker,
41 + priv->base + EIP197_CLASSIFICATION_RAMS +
42 + (addralias & 0xffff));
43 + delta >>= 1;
44 + }
45
46 /* read back marker from top half */
47 eip197_trc_cache_banksel(priv, addrmid, &actbank);
48 val = readl(priv->base + EIP197_CLASSIFICATION_RAMS +
49 (addrmid & 0xffff));
50
51 - if (val == ((addrmid | (addrlo << 16)) & probemask)) {
52 + if ((val & probemask) == marker)
53 /* read back correct, continue with top half */
54 addrlo = addrmid;
55 - } else {
56 + else
57 /* not read back correct, continue with bottom half */
58 addrhi = addrmid;
59 - }
60 }
61 return addrhi;
62 }
63 @@ -150,7 +155,7 @@
64 htable_offset + i * sizeof(u32));
65 }
66
67 -static void eip197_trc_cache_init(struct safexcel_crypto_priv *priv)
68 +static int eip197_trc_cache_init(struct safexcel_crypto_priv *priv)
69 {
70 u32 val, dsize, asize;
71 int cs_rc_max, cs_ht_wc, cs_trc_rec_wc, cs_trc_lg_rec_wc;
72 @@ -183,7 +188,7 @@
73 writel(val, priv->base + EIP197_TRC_PARAMS);
74
75 /* Probed data RAM size in bytes */
76 - dsize = eip197_trc_cache_probe(priv, maxbanks, 0xffffffff);
77 + dsize = eip197_trc_cache_probe(priv, maxbanks, 0xffffffff, 32);
78
79 /*
80 * Now probe the administration RAM size pretty much the same way
81 @@ -196,11 +201,18 @@
82 writel(val, priv->base + EIP197_TRC_PARAMS);
83
84 /* Probed admin RAM size in admin words */
85 - asize = eip197_trc_cache_probe(priv, 0, 0xbfffffff) >> 4;
86 + asize = eip197_trc_cache_probe(priv, 0, 0x3fffffff, 16) >> 4;
87
88 /* Clear any ECC errors detected while probing! */
89 writel(0, priv->base + EIP197_TRC_ECCCTRL);
90
91 + /* Sanity check probing results */
92 + if (dsize < EIP197_MIN_DSIZE || asize < EIP197_MIN_ASIZE) {
93 + dev_err(priv->dev, "Record cache probing failed (%d,%d).",
94 + dsize, asize);
95 + return -ENODEV;
96 + }
97 +
98 /*
99 * Determine optimal configuration from RAM sizes
100 * Note that we assume that the physical RAM configuration is sane
101 @@ -251,6 +263,7 @@
102
103 dev_info(priv->dev, "TRC init: %dd,%da (%dr,%dh)\n",
104 dsize, asize, cs_rc_max, cs_ht_wc + cs_ht_wc);
105 + return 0;
106 }
107
108 static void eip197_init_firmware(struct safexcel_crypto_priv *priv)
109 @@ -298,13 +311,14 @@
110 static int eip197_write_firmware(struct safexcel_crypto_priv *priv,
111 const struct firmware *fw)
112 {
113 - const u32 *data = (const u32 *)fw->data;
114 + const __be32 *data = (const __be32 *)fw->data;
115 int i;
116
117 /* Write the firmware */
118 for (i = 0; i < fw->size / sizeof(u32); i++)
119 writel(be32_to_cpu(data[i]),
120 - priv->base + EIP197_CLASSIFICATION_RAMS + i * sizeof(u32));
121 + priv->base + EIP197_CLASSIFICATION_RAMS +
122 + i * sizeof(__be32));
123
124 /* Exclude final 2 NOPs from size */
125 return i - EIP197_FW_TERMINAL_NOPS;
126 @@ -471,6 +485,14 @@
127 cd_fetch_cnt = ((1 << priv->hwconfig.hwcfsize) /
128 cd_size_rnd) - 1;
129 }
130 + /*
131 + * Since we're using command desc's way larger than formally specified,
132 + * we need to check whether we can fit even 1 for low-end EIP196's!
133 + */
134 + if (!cd_fetch_cnt) {
135 + dev_err(priv->dev, "Unable to fit even 1 command desc!\n");
136 + return -ENODEV;
137 + }
138
139 for (i = 0; i < priv->config.rings; i++) {
140 /* ring base address */
141 @@ -479,12 +501,12 @@
142 writel(upper_32_bits(priv->ring[i].cdr.base_dma),
143 EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI);
144
145 - writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.cd_offset << 16) |
146 - priv->config.cd_size,
147 + writel(EIP197_xDR_DESC_MODE_64BIT | EIP197_CDR_DESC_MODE_ADCP |
148 + (priv->config.cd_offset << 14) | priv->config.cd_size,
149 EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_DESC_SIZE);
150 writel(((cd_fetch_cnt *
151 (cd_size_rnd << priv->hwconfig.hwdataw)) << 16) |
152 - (cd_fetch_cnt * priv->config.cd_offset),
153 + (cd_fetch_cnt * (priv->config.cd_offset / sizeof(u32))),
154 EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_CFG);
155
156 /* Configure DMA tx control */
157 @@ -527,13 +549,13 @@
158 writel(upper_32_bits(priv->ring[i].rdr.base_dma),
159 EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_BASE_ADDR_HI);
160
161 - writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.rd_offset << 16) |
162 + writel(EIP197_xDR_DESC_MODE_64BIT | (priv->config.rd_offset << 14) |
163 priv->config.rd_size,
164 EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_DESC_SIZE);
165
166 writel(((rd_fetch_cnt *
167 (rd_size_rnd << priv->hwconfig.hwdataw)) << 16) |
168 - (rd_fetch_cnt * priv->config.rd_offset),
169 + (rd_fetch_cnt * (priv->config.rd_offset / sizeof(u32))),
170 EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_CFG);
171
172 /* Configure DMA tx control */
173 @@ -559,7 +581,7 @@
174 static int safexcel_hw_init(struct safexcel_crypto_priv *priv)
175 {
176 u32 val;
177 - int i, ret, pe;
178 + int i, ret, pe, opbuflo, opbufhi;
179
180 dev_dbg(priv->dev, "HW init: using %d pipe(s) and %d ring(s)\n",
181 priv->config.pes, priv->config.rings);
182 @@ -595,8 +617,8 @@
183 writel(EIP197_DxE_THR_CTRL_RESET_PE,
184 EIP197_HIA_DFE_THR(priv) + EIP197_HIA_DFE_THR_CTRL(pe));
185
186 - if (priv->flags & SAFEXCEL_HW_EIP197)
187 - /* Reset HIA input interface arbiter (EIP197 only) */
188 + if (priv->flags & EIP197_PE_ARB)
189 + /* Reset HIA input interface arbiter (if present) */
190 writel(EIP197_HIA_RA_PE_CTRL_RESET,
191 EIP197_HIA_AIC(priv) + EIP197_HIA_RA_PE_CTRL(pe));
192
193 @@ -639,9 +661,16 @@
194 ;
195
196 /* DMA transfer size to use */
197 + if (priv->hwconfig.hwnumpes > 4) {
198 + opbuflo = 9;
199 + opbufhi = 10;
200 + } else {
201 + opbuflo = 7;
202 + opbufhi = 8;
203 + }
204 val = EIP197_HIA_DSE_CFG_DIS_DEBUG;
205 - val |= EIP197_HIA_DxE_CFG_MIN_DATA_SIZE(7) |
206 - EIP197_HIA_DxE_CFG_MAX_DATA_SIZE(8);
207 + val |= EIP197_HIA_DxE_CFG_MIN_DATA_SIZE(opbuflo) |
208 + EIP197_HIA_DxE_CFG_MAX_DATA_SIZE(opbufhi);
209 val |= EIP197_HIA_DxE_CFG_DATA_CACHE_CTRL(WR_CACHE_3BITS);
210 val |= EIP197_HIA_DSE_CFG_ALWAYS_BUFFERABLE;
211 /* FIXME: instability issues can occur for EIP97 but disabling
212 @@ -655,8 +684,8 @@
213 writel(0, EIP197_HIA_DSE_THR(priv) + EIP197_HIA_DSE_THR_CTRL(pe));
214
215 /* Configure the procesing engine thresholds */
216 - writel(EIP197_PE_OUT_DBUF_THRES_MIN(7) |
217 - EIP197_PE_OUT_DBUF_THRES_MAX(8),
218 + writel(EIP197_PE_OUT_DBUF_THRES_MIN(opbuflo) |
219 + EIP197_PE_OUT_DBUF_THRES_MAX(opbufhi),
220 EIP197_PE(priv) + EIP197_PE_OUT_DBUF_THRES(pe));
221
222 /* Processing Engine configuration */
223 @@ -696,7 +725,7 @@
224 writel(0,
225 EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_PROC_PNTR);
226
227 - writel((EIP197_DEFAULT_RING_SIZE * priv->config.cd_offset) << 2,
228 + writel((EIP197_DEFAULT_RING_SIZE * priv->config.cd_offset),
229 EIP197_HIA_CDR(priv, i) + EIP197_HIA_xDR_RING_SIZE);
230 }
231
232 @@ -719,7 +748,7 @@
233 EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_PROC_PNTR);
234
235 /* Ring size */
236 - writel((EIP197_DEFAULT_RING_SIZE * priv->config.rd_offset) << 2,
237 + writel((EIP197_DEFAULT_RING_SIZE * priv->config.rd_offset),
238 EIP197_HIA_RDR(priv, i) + EIP197_HIA_xDR_RING_SIZE);
239 }
240
241 @@ -736,19 +765,28 @@
242 /* Clear any HIA interrupt */
243 writel(GENMASK(30, 20), EIP197_HIA_AIC_G(priv) + EIP197_HIA_AIC_G_ACK);
244
245 - if (priv->flags & SAFEXCEL_HW_EIP197) {
246 - eip197_trc_cache_init(priv);
247 - priv->flags |= EIP197_TRC_CACHE;
248 + if (priv->flags & EIP197_SIMPLE_TRC) {
249 + writel(EIP197_STRC_CONFIG_INIT |
250 + EIP197_STRC_CONFIG_LARGE_REC(EIP197_CS_TRC_REC_WC) |
251 + EIP197_STRC_CONFIG_SMALL_REC(EIP197_CS_TRC_REC_WC),
252 + priv->base + EIP197_STRC_CONFIG);
253 + writel(EIP197_PE_EIP96_TOKEN_CTRL2_CTX_DONE,
254 + EIP197_PE(priv) + EIP197_PE_EIP96_TOKEN_CTRL2(0));
255 + } else if (priv->flags & SAFEXCEL_HW_EIP197) {
256 + ret = eip197_trc_cache_init(priv);
257 + if (ret)
258 + return ret;
259 + }
260
261 + if (priv->flags & EIP197_ICE) {
262 ret = eip197_load_firmwares(priv);
263 if (ret)
264 return ret;
265 }
266
267 - safexcel_hw_setup_cdesc_rings(priv);
268 - safexcel_hw_setup_rdesc_rings(priv);
269 -
270 - return 0;
271 + return safexcel_hw_setup_cdesc_rings(priv) ?:
272 + safexcel_hw_setup_rdesc_rings(priv) ?:
273 + 0;
274 }
275
276 /* Called with ring's lock taken */
277 @@ -836,20 +874,24 @@
278 spin_unlock_bh(&priv->ring[ring].lock);
279
280 /* let the RDR know we have pending descriptors */
281 - writel((rdesc * priv->config.rd_offset) << 2,
282 + writel((rdesc * priv->config.rd_offset),
283 EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT);
284
285 /* let the CDR know we have pending descriptors */
286 - writel((cdesc * priv->config.cd_offset) << 2,
287 + writel((cdesc * priv->config.cd_offset),
288 EIP197_HIA_CDR(priv, ring) + EIP197_HIA_xDR_PREP_COUNT);
289 }
290
291 inline int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv,
292 - struct safexcel_result_desc *rdesc)
293 + void *rdp)
294 {
295 - if (likely((!rdesc->descriptor_overflow) &&
296 - (!rdesc->buffer_overflow) &&
297 - (!rdesc->result_data.error_code)))
298 + struct safexcel_result_desc *rdesc = rdp;
299 + struct result_data_desc *result_data = rdp + priv->config.res_offset;
300 +
301 + if (likely((!rdesc->last_seg) || /* Rest only valid if last seg! */
302 + ((!rdesc->descriptor_overflow) &&
303 + (!rdesc->buffer_overflow) &&
304 + (!result_data->error_code))))
305 return 0;
306
307 if (rdesc->descriptor_overflow)
308 @@ -858,13 +900,14 @@
309 if (rdesc->buffer_overflow)
310 dev_err(priv->dev, "Buffer overflow detected");
311
312 - if (rdesc->result_data.error_code & 0x4066) {
313 + if (result_data->error_code & 0x4066) {
314 /* Fatal error (bits 1,2,5,6 & 14) */
315 dev_err(priv->dev,
316 "result descriptor error (%x)",
317 - rdesc->result_data.error_code);
318 + result_data->error_code);
319 +
320 return -EIO;
321 - } else if (rdesc->result_data.error_code &
322 + } else if (result_data->error_code &
323 (BIT(7) | BIT(4) | BIT(3) | BIT(0))) {
324 /*
325 * Give priority over authentication fails:
326 @@ -872,7 +915,7 @@
327 * something wrong with the input!
328 */
329 return -EINVAL;
330 - } else if (rdesc->result_data.error_code & BIT(9)) {
331 + } else if (result_data->error_code & BIT(9)) {
332 /* Authentication failed */
333 return -EBADMSG;
334 }
335 @@ -931,16 +974,18 @@
336 {
337 struct safexcel_command_desc *cdesc;
338 struct safexcel_result_desc *rdesc;
339 + struct safexcel_token *dmmy;
340 int ret = 0;
341
342 /* Prepare command descriptor */
343 - cdesc = safexcel_add_cdesc(priv, ring, true, true, 0, 0, 0, ctxr_dma);
344 + cdesc = safexcel_add_cdesc(priv, ring, true, true, 0, 0, 0, ctxr_dma,
345 + &dmmy);
346 if (IS_ERR(cdesc))
347 return PTR_ERR(cdesc);
348
349 cdesc->control_data.type = EIP197_TYPE_EXTENDED;
350 cdesc->control_data.options = 0;
351 - cdesc->control_data.refresh = 0;
352 + cdesc->control_data.context_lo &= ~EIP197_CONTEXT_SIZE_MASK;
353 cdesc->control_data.control0 = CONTEXT_CONTROL_INV_TR;
354
355 /* Prepare result descriptor */
356 @@ -1003,7 +1048,7 @@
357 acknowledge:
358 if (i)
359 writel(EIP197_xDR_PROC_xD_PKT(i) |
360 - EIP197_xDR_PROC_xD_COUNT(tot_descs * priv->config.rd_offset),
361 + (tot_descs * priv->config.rd_offset),
362 EIP197_HIA_RDR(priv, ring) + EIP197_HIA_xDR_PROC_COUNT);
363
364 /* If the number of requests overflowed the counter, try to proceed more
365 @@ -1171,6 +1216,44 @@
366 &safexcel_alg_xts_aes,
367 &safexcel_alg_gcm,
368 &safexcel_alg_ccm,
369 + &safexcel_alg_crc32,
370 + &safexcel_alg_cbcmac,
371 + &safexcel_alg_xcbcmac,
372 + &safexcel_alg_cmac,
373 + &safexcel_alg_chacha20,
374 + &safexcel_alg_chachapoly,
375 + &safexcel_alg_chachapoly_esp,
376 + &safexcel_alg_sm3,
377 + &safexcel_alg_hmac_sm3,
378 + &safexcel_alg_ecb_sm4,
379 + &safexcel_alg_cbc_sm4,
380 + &safexcel_alg_ofb_sm4,
381 + &safexcel_alg_cfb_sm4,
382 + &safexcel_alg_ctr_sm4,
383 + &safexcel_alg_authenc_hmac_sha1_cbc_sm4,
384 + &safexcel_alg_authenc_hmac_sm3_cbc_sm4,
385 + &safexcel_alg_authenc_hmac_sha1_ctr_sm4,
386 + &safexcel_alg_authenc_hmac_sm3_ctr_sm4,
387 + &safexcel_alg_sha3_224,
388 + &safexcel_alg_sha3_256,
389 + &safexcel_alg_sha3_384,
390 + &safexcel_alg_sha3_512,
391 + &safexcel_alg_hmac_sha3_224,
392 + &safexcel_alg_hmac_sha3_256,
393 + &safexcel_alg_hmac_sha3_384,
394 + &safexcel_alg_hmac_sha3_512,
395 + &safexcel_alg_authenc_hmac_sha1_cbc_des,
396 + &safexcel_alg_authenc_hmac_sha256_cbc_des3_ede,
397 + &safexcel_alg_authenc_hmac_sha224_cbc_des3_ede,
398 + &safexcel_alg_authenc_hmac_sha512_cbc_des3_ede,
399 + &safexcel_alg_authenc_hmac_sha384_cbc_des3_ede,
400 + &safexcel_alg_authenc_hmac_sha256_cbc_des,
401 + &safexcel_alg_authenc_hmac_sha224_cbc_des,
402 + &safexcel_alg_authenc_hmac_sha512_cbc_des,
403 + &safexcel_alg_authenc_hmac_sha384_cbc_des,
404 + &safexcel_alg_rfc4106_gcm,
405 + &safexcel_alg_rfc4543_gcm,
406 + &safexcel_alg_rfc4309_ccm,
407 };
408
409 static int safexcel_register_algorithms(struct safexcel_crypto_priv *priv)
410 @@ -1240,30 +1323,30 @@
411
412 static void safexcel_configure(struct safexcel_crypto_priv *priv)
413 {
414 - u32 val, mask = 0;
415 -
416 - val = readl(EIP197_HIA_AIC_G(priv) + EIP197_HIA_OPTIONS);
417 -
418 - /* Read number of PEs from the engine */
419 - if (priv->flags & SAFEXCEL_HW_EIP197)
420 - /* Wider field width for all EIP197 type engines */
421 - mask = EIP197_N_PES_MASK;
422 - else
423 - /* Narrow field width for EIP97 type engine */
424 - mask = EIP97_N_PES_MASK;
425 -
426 - priv->config.pes = (val >> EIP197_N_PES_OFFSET) & mask;
427 + u32 mask = BIT(priv->hwconfig.hwdataw) - 1;
428
429 - priv->config.rings = min_t(u32, val & GENMASK(3, 0), max_rings);
430 + priv->config.pes = priv->hwconfig.hwnumpes;
431 + priv->config.rings = min_t(u32, priv->hwconfig.hwnumrings, max_rings);
432 + /* Cannot currently support more rings than we have ring AICs! */
433 + priv->config.rings = min_t(u32, priv->config.rings,
434 + priv->hwconfig.hwnumraic);
435
436 - val = (val & GENMASK(27, 25)) >> 25;
437 - mask = BIT(val) - 1;
438 -
439 - priv->config.cd_size = (sizeof(struct safexcel_command_desc) / sizeof(u32));
440 + priv->config.cd_size = EIP197_CD64_FETCH_SIZE;
441 priv->config.cd_offset = (priv->config.cd_size + mask) & ~mask;
442 + priv->config.cdsh_offset = (EIP197_MAX_TOKENS + mask) & ~mask;
443
444 - priv->config.rd_size = (sizeof(struct safexcel_result_desc) / sizeof(u32));
445 + /* res token is behind the descr, but ofs must be rounded to buswdth */
446 + priv->config.res_offset = (EIP197_RD64_FETCH_SIZE + mask) & ~mask;
447 + /* now the size of the descr is this 1st part plus the result struct */
448 + priv->config.rd_size = priv->config.res_offset +
449 + EIP197_RD64_RESULT_SIZE;
450 priv->config.rd_offset = (priv->config.rd_size + mask) & ~mask;
451 +
452 + /* convert dwords to bytes */
453 + priv->config.cd_offset *= sizeof(u32);
454 + priv->config.cdsh_offset *= sizeof(u32);
455 + priv->config.rd_offset *= sizeof(u32);
456 + priv->config.res_offset *= sizeof(u32);
457 }
458
459 static void safexcel_init_register_offsets(struct safexcel_crypto_priv *priv)
460 @@ -1309,7 +1392,7 @@
461 int is_pci_dev)
462 {
463 struct device *dev = priv->dev;
464 - u32 peid, version, mask, val, hiaopt;
465 + u32 peid, version, mask, val, hiaopt, hwopt, peopt;
466 int i, ret, hwctg;
467
468 priv->context_pool = dmam_pool_create("safexcel-context", dev,
469 @@ -1371,13 +1454,16 @@
470 */
471 version = readl(EIP197_GLOBAL(priv) + EIP197_VERSION);
472 if (((priv->flags & SAFEXCEL_HW_EIP197) &&
473 - (EIP197_REG_LO16(version) != EIP197_VERSION_LE)) ||
474 + (EIP197_REG_LO16(version) != EIP197_VERSION_LE) &&
475 + (EIP197_REG_LO16(version) != EIP196_VERSION_LE)) ||
476 ((!(priv->flags & SAFEXCEL_HW_EIP197) &&
477 (EIP197_REG_LO16(version) != EIP97_VERSION_LE)))) {
478 /*
479 * We did not find the device that matched our initial probing
480 * (or our initial probing failed) Report appropriate error.
481 */
482 + dev_err(priv->dev, "Probing for EIP97/EIP19x failed - no such device (read %08x)\n",
483 + version);
484 return -ENODEV;
485 }
486
487 @@ -1385,6 +1471,14 @@
488 hwctg = version >> 28;
489 peid = version & 255;
490
491 + /* Detect EIP206 processing pipe */
492 + version = readl(EIP197_PE(priv) + + EIP197_PE_VERSION(0));
493 + if (EIP197_REG_LO16(version) != EIP206_VERSION_LE) {
494 + dev_err(priv->dev, "EIP%d: EIP206 not detected\n", peid);
495 + return -ENODEV;
496 + }
497 + priv->hwconfig.ppver = EIP197_VERSION_MASK(version);
498 +
499 /* Detect EIP96 packet engine and version */
500 version = readl(EIP197_PE(priv) + EIP197_PE_EIP96_VERSION(0));
501 if (EIP197_REG_LO16(version) != EIP96_VERSION_LE) {
502 @@ -1393,10 +1487,13 @@
503 }
504 priv->hwconfig.pever = EIP197_VERSION_MASK(version);
505
506 + hwopt = readl(EIP197_GLOBAL(priv) + EIP197_OPTIONS);
507 hiaopt = readl(EIP197_HIA_AIC(priv) + EIP197_HIA_OPTIONS);
508
509 if (priv->flags & SAFEXCEL_HW_EIP197) {
510 /* EIP197 */
511 + peopt = readl(EIP197_PE(priv) + EIP197_PE_OPTIONS(0));
512 +
513 priv->hwconfig.hwdataw = (hiaopt >> EIP197_HWDATAW_OFFSET) &
514 EIP197_HWDATAW_MASK;
515 priv->hwconfig.hwcfsize = ((hiaopt >> EIP197_CFSIZE_OFFSET) &
516 @@ -1405,6 +1502,19 @@
517 priv->hwconfig.hwrfsize = ((hiaopt >> EIP197_RFSIZE_OFFSET) &
518 EIP197_RFSIZE_MASK) +
519 EIP197_RFSIZE_ADJUST;
520 + priv->hwconfig.hwnumpes = (hiaopt >> EIP197_N_PES_OFFSET) &
521 + EIP197_N_PES_MASK;
522 + priv->hwconfig.hwnumrings = (hiaopt >> EIP197_N_RINGS_OFFSET) &
523 + EIP197_N_RINGS_MASK;
524 + if (hiaopt & EIP197_HIA_OPT_HAS_PE_ARB)
525 + priv->flags |= EIP197_PE_ARB;
526 + if (EIP206_OPT_ICE_TYPE(peopt) == 1)
527 + priv->flags |= EIP197_ICE;
528 + /* If not a full TRC, then assume simple TRC */
529 + if (!(hwopt & EIP197_OPT_HAS_TRC))
530 + priv->flags |= EIP197_SIMPLE_TRC;
531 + /* EIP197 always has SOME form of TRC */
532 + priv->flags |= EIP197_TRC_CACHE;
533 } else {
534 /* EIP97 */
535 priv->hwconfig.hwdataw = (hiaopt >> EIP197_HWDATAW_OFFSET) &
536 @@ -1413,6 +1523,23 @@
537 EIP97_CFSIZE_MASK;
538 priv->hwconfig.hwrfsize = (hiaopt >> EIP97_RFSIZE_OFFSET) &
539 EIP97_RFSIZE_MASK;
540 + priv->hwconfig.hwnumpes = 1; /* by definition */
541 + priv->hwconfig.hwnumrings = (hiaopt >> EIP197_N_RINGS_OFFSET) &
542 + EIP197_N_RINGS_MASK;
543 + }
544 +
545 + /* Scan for ring AIC's */
546 + for (i = 0; i < EIP197_MAX_RING_AIC; i++) {
547 + version = readl(EIP197_HIA_AIC_R(priv) +
548 + EIP197_HIA_AIC_R_VERSION(i));
549 + if (EIP197_REG_LO16(version) != EIP201_VERSION_LE)
550 + break;
551 + }
552 + priv->hwconfig.hwnumraic = i;
553 + /* Low-end EIP196 may not have any ring AIC's ... */
554 + if (!priv->hwconfig.hwnumraic) {
555 + dev_err(priv->dev, "No ring interrupt controller present!\n");
556 + return -ENODEV;
557 }
558
559 /* Get supported algorithms from EIP96 transform engine */
560 @@ -1420,10 +1547,12 @@
561 EIP197_PE_EIP96_OPTIONS(0));
562
563 /* Print single info line describing what we just detected */
564 - dev_info(priv->dev, "EIP%d:%x(%d)-HIA:%x(%d,%d,%d),PE:%x,alg:%08x\n",
565 - peid, priv->hwconfig.hwver, hwctg, priv->hwconfig.hiaver,
566 - priv->hwconfig.hwdataw, priv->hwconfig.hwcfsize,
567 - priv->hwconfig.hwrfsize, priv->hwconfig.pever,
568 + dev_info(priv->dev, "EIP%d:%x(%d,%d,%d,%d)-HIA:%x(%d,%d,%d),PE:%x/%x,alg:%08x\n",
569 + peid, priv->hwconfig.hwver, hwctg, priv->hwconfig.hwnumpes,
570 + priv->hwconfig.hwnumrings, priv->hwconfig.hwnumraic,
571 + priv->hwconfig.hiaver, priv->hwconfig.hwdataw,
572 + priv->hwconfig.hwcfsize, priv->hwconfig.hwrfsize,
573 + priv->hwconfig.ppver, priv->hwconfig.pever,
574 priv->hwconfig.algo_flags);
575
576 safexcel_configure(priv);
577 @@ -1547,7 +1676,6 @@
578 }
579 }
580
581 -#if IS_ENABLED(CONFIG_OF)
582 /* for Device Tree platform driver */
583
584 static int safexcel_probe(struct platform_device *pdev)
585 @@ -1625,6 +1753,7 @@
586 safexcel_unregister_algorithms(priv);
587 safexcel_hw_reset_rings(priv);
588
589 + clk_disable_unprepare(priv->reg_clk);
590 clk_disable_unprepare(priv->clk);
591
592 for (i = 0; i < priv->config.rings; i++)
593 @@ -1666,9 +1795,7 @@
594 .of_match_table = safexcel_of_match_table,
595 },
596 };
597 -#endif
598
599 -#if IS_ENABLED(CONFIG_PCI)
600 /* PCIE devices - i.e. Inside Secure development boards */
601
602 static int safexcel_pci_probe(struct pci_dev *pdev,
603 @@ -1759,7 +1886,7 @@
604 return rc;
605 }
606
607 -void safexcel_pci_remove(struct pci_dev *pdev)
608 +static void safexcel_pci_remove(struct pci_dev *pdev)
609 {
610 struct safexcel_crypto_priv *priv = pci_get_drvdata(pdev);
611 int i;
612 @@ -1789,54 +1916,32 @@
613 .probe = safexcel_pci_probe,
614 .remove = safexcel_pci_remove,
615 };
616 -#endif
617 -
618 -/* Unfortunately, we have to resort to global variables here */
619 -#if IS_ENABLED(CONFIG_PCI)
620 -int pcireg_rc = -EINVAL; /* Default safe value */
621 -#endif
622 -#if IS_ENABLED(CONFIG_OF)
623 -int ofreg_rc = -EINVAL; /* Default safe value */
624 -#endif
625
626 static int __init safexcel_init(void)
627 {
628 -#if IS_ENABLED(CONFIG_PCI)
629 + int ret;
630 +
631 /* Register PCI driver */
632 - pcireg_rc = pci_register_driver(&safexcel_pci_driver);
633 -#endif
634 + ret = pci_register_driver(&safexcel_pci_driver);
635
636 -#if IS_ENABLED(CONFIG_OF)
637 /* Register platform driver */
638 - ofreg_rc = platform_driver_register(&crypto_safexcel);
639 - #if IS_ENABLED(CONFIG_PCI)
640 - /* Return success if either PCI or OF registered OK */
641 - return pcireg_rc ? ofreg_rc : 0;
642 - #else
643 - return ofreg_rc;
644 - #endif
645 -#else
646 - #if IS_ENABLED(CONFIG_PCI)
647 - return pcireg_rc;
648 - #else
649 - return -EINVAL;
650 - #endif
651 -#endif
652 + if (IS_ENABLED(CONFIG_OF) && !ret) {
653 + ret = platform_driver_register(&crypto_safexcel);
654 + if (ret)
655 + pci_unregister_driver(&safexcel_pci_driver);
656 + }
657 +
658 + return ret;
659 }
660
661 static void __exit safexcel_exit(void)
662 {
663 -#if IS_ENABLED(CONFIG_OF)
664 /* Unregister platform driver */
665 - if (!ofreg_rc)
666 + if (IS_ENABLED(CONFIG_OF))
667 platform_driver_unregister(&crypto_safexcel);
668 -#endif
669
670 -#if IS_ENABLED(CONFIG_PCI)
671 /* Unregister PCI driver if successfully registered before */
672 - if (!pcireg_rc)
673 - pci_unregister_driver(&safexcel_pci_driver);
674 -#endif
675 + pci_unregister_driver(&safexcel_pci_driver);
676 }
677
678 module_init(safexcel_init);
679 --- a/drivers/crypto/inside-secure/safexcel_cipher.c
680 +++ b/drivers/crypto/inside-secure/safexcel_cipher.c
681 @@ -5,18 +5,22 @@
682 * Antoine Tenart <antoine.tenart@free-electrons.com>
683 */
684
685 +#include <asm/unaligned.h>
686 #include <linux/device.h>
687 #include <linux/dma-mapping.h>
688 #include <linux/dmapool.h>
689 -
690 #include <crypto/aead.h>
691 #include <crypto/aes.h>
692 #include <crypto/authenc.h>
693 +#include <crypto/chacha.h>
694 #include <crypto/ctr.h>
695 #include <crypto/internal/des.h>
696 #include <crypto/gcm.h>
697 #include <crypto/ghash.h>
698 +#include <crypto/poly1305.h>
699 #include <crypto/sha.h>
700 +#include <crypto/sm3.h>
701 +#include <crypto/sm4.h>
702 #include <crypto/xts.h>
703 #include <crypto/skcipher.h>
704 #include <crypto/internal/aead.h>
705 @@ -33,6 +37,8 @@
706 SAFEXCEL_DES,
707 SAFEXCEL_3DES,
708 SAFEXCEL_AES,
709 + SAFEXCEL_CHACHA20,
710 + SAFEXCEL_SM4,
711 };
712
713 struct safexcel_cipher_ctx {
714 @@ -41,8 +47,12 @@
715
716 u32 mode;
717 enum safexcel_cipher_alg alg;
718 - bool aead;
719 - int xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
720 + u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
721 + u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
722 + u8 aadskip;
723 + u8 blocksz;
724 + u32 ivmask;
725 + u32 ctrinit;
726
727 __le32 key[16];
728 u32 nonce;
729 @@ -51,10 +61,11 @@
730 /* All the below is AEAD specific */
731 u32 hash_alg;
732 u32 state_sz;
733 - u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
734 - u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
735 + __be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
736 + __be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
737
738 struct crypto_cipher *hkaes;
739 + struct crypto_aead *fback;
740 };
741
742 struct safexcel_cipher_req {
743 @@ -65,206 +76,298 @@
744 int nr_src, nr_dst;
745 };
746
747 -static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
748 - struct safexcel_command_desc *cdesc)
749 +static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
750 + struct safexcel_command_desc *cdesc)
751 {
752 - u32 block_sz = 0;
753 -
754 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
755 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
756 -
757 /* 32 bit nonce */
758 cdesc->control_data.token[0] = ctx->nonce;
759 /* 64 bit IV part */
760 memcpy(&cdesc->control_data.token[1], iv, 8);
761 - /* 32 bit counter, start at 1 (big endian!) */
762 - cdesc->control_data.token[3] = cpu_to_be32(1);
763 -
764 - return;
765 - } else if (ctx->xcm == EIP197_XCM_MODE_GCM) {
766 - cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
767 -
768 - /* 96 bit IV part */
769 - memcpy(&cdesc->control_data.token[0], iv, 12);
770 - /* 32 bit counter, start at 1 (big endian!) */
771 - cdesc->control_data.token[3] = cpu_to_be32(1);
772 -
773 - return;
774 - } else if (ctx->xcm == EIP197_XCM_MODE_CCM) {
775 + /* 32 bit counter, start at 0 or 1 (big endian!) */
776 + cdesc->control_data.token[3] =
777 + (__force u32)cpu_to_be32(ctx->ctrinit);
778 + return 4;
779 + }
780 + if (ctx->alg == SAFEXCEL_CHACHA20) {
781 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
782 -
783 - /* Variable length IV part */
784 - memcpy(&cdesc->control_data.token[0], iv, 15 - iv[0]);
785 - /* Start variable length counter at 0 */
786 - memset((u8 *)&cdesc->control_data.token[0] + 15 - iv[0],
787 - 0, iv[0] + 1);
788 -
789 - return;
790 + /* 96 bit nonce part */
791 + memcpy(&cdesc->control_data.token[0], &iv[4], 12);
792 + /* 32 bit counter */
793 + cdesc->control_data.token[3] = *(u32 *)iv;
794 + return 4;
795 }
796
797 - if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
798 - switch (ctx->alg) {
799 - case SAFEXCEL_DES:
800 - block_sz = DES_BLOCK_SIZE;
801 - cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
802 - break;
803 - case SAFEXCEL_3DES:
804 - block_sz = DES3_EDE_BLOCK_SIZE;
805 - cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
806 - break;
807 - case SAFEXCEL_AES:
808 - block_sz = AES_BLOCK_SIZE;
809 - cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
810 - break;
811 - }
812 - memcpy(cdesc->control_data.token, iv, block_sz);
813 - }
814 + cdesc->control_data.options |= ctx->ivmask;
815 + memcpy(cdesc->control_data.token, iv, ctx->blocksz);
816 + return ctx->blocksz / sizeof(u32);
817 }
818
819 static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
820 struct safexcel_command_desc *cdesc,
821 + struct safexcel_token *atoken,
822 u32 length)
823 {
824 struct safexcel_token *token;
825 + int ivlen;
826
827 - safexcel_cipher_token(ctx, iv, cdesc);
828 -
829 - /* skip over worst case IV of 4 dwords, no need to be exact */
830 - token = (struct safexcel_token *)(cdesc->control_data.token + 4);
831 + ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
832 + if (ivlen == 4) {
833 + /* No space in cdesc, instruction moves to atoken */
834 + cdesc->additional_cdata_size = 1;
835 + token = atoken;
836 + } else {
837 + /* Everything fits in cdesc */
838 + token = (struct safexcel_token *)(cdesc->control_data.token + 2);
839 + /* Need to pad with NOP */
840 + eip197_noop_token(&token[1]);
841 + }
842 +
843 + token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
844 + token->packet_length = length;
845 + token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
846 + EIP197_TOKEN_STAT_LAST_HASH;
847 + token->instructions = EIP197_TOKEN_INS_LAST |
848 + EIP197_TOKEN_INS_TYPE_CRYPTO |
849 + EIP197_TOKEN_INS_TYPE_OUTPUT;
850 +}
851
852 - token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
853 - token[0].packet_length = length;
854 - token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
855 - EIP197_TOKEN_STAT_LAST_HASH;
856 - token[0].instructions = EIP197_TOKEN_INS_LAST |
857 - EIP197_TOKEN_INS_TYPE_CRYPTO |
858 - EIP197_TOKEN_INS_TYPE_OUTPUT;
859 +static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
860 + struct safexcel_command_desc *cdesc)
861 +{
862 + if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
863 + ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
864 + /* 32 bit nonce */
865 + cdesc->control_data.token[0] = ctx->nonce;
866 + /* 64 bit IV part */
867 + memcpy(&cdesc->control_data.token[1], iv, 8);
868 + /* 32 bit counter, start at 0 or 1 (big endian!) */
869 + cdesc->control_data.token[3] =
870 + (__force u32)cpu_to_be32(ctx->ctrinit);
871 + return;
872 + }
873 + if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
874 + /* 96 bit IV part */
875 + memcpy(&cdesc->control_data.token[0], iv, 12);
876 + /* 32 bit counter, start at 0 or 1 (big endian!) */
877 + cdesc->control_data.token[3] =
878 + (__force u32)cpu_to_be32(ctx->ctrinit);
879 + return;
880 + }
881 + /* CBC */
882 + memcpy(cdesc->control_data.token, iv, ctx->blocksz);
883 }
884
885 static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
886 struct safexcel_command_desc *cdesc,
887 + struct safexcel_token *atoken,
888 enum safexcel_cipher_direction direction,
889 u32 cryptlen, u32 assoclen, u32 digestsize)
890 {
891 - struct safexcel_token *token;
892 + struct safexcel_token *aadref;
893 + int atoksize = 2; /* Start with minimum size */
894 + int assocadj = assoclen - ctx->aadskip, aadalign;
895
896 - safexcel_cipher_token(ctx, iv, cdesc);
897 + /* Always 4 dwords of embedded IV for AEAD modes */
898 + cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
899
900 - if (direction == SAFEXCEL_ENCRYPT) {
901 - /* align end of instruction sequence to end of token */
902 - token = (struct safexcel_token *)(cdesc->control_data.token +
903 - EIP197_MAX_TOKENS - 13);
904 -
905 - token[12].opcode = EIP197_TOKEN_OPCODE_INSERT;
906 - token[12].packet_length = digestsize;
907 - token[12].stat = EIP197_TOKEN_STAT_LAST_HASH |
908 - EIP197_TOKEN_STAT_LAST_PACKET;
909 - token[12].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
910 - EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
911 - } else {
912 + if (direction == SAFEXCEL_DECRYPT)
913 cryptlen -= digestsize;
914
915 - /* align end of instruction sequence to end of token */
916 - token = (struct safexcel_token *)(cdesc->control_data.token +
917 - EIP197_MAX_TOKENS - 14);
918 -
919 - token[12].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
920 - token[12].packet_length = digestsize;
921 - token[12].stat = EIP197_TOKEN_STAT_LAST_HASH |
922 - EIP197_TOKEN_STAT_LAST_PACKET;
923 - token[12].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
924 -
925 - token[13].opcode = EIP197_TOKEN_OPCODE_VERIFY;
926 - token[13].packet_length = digestsize |
927 - EIP197_TOKEN_HASH_RESULT_VERIFY;
928 - token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
929 - EIP197_TOKEN_STAT_LAST_PACKET;
930 - token[13].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
931 - }
932 -
933 - token[6].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
934 - token[6].packet_length = assoclen;
935 -
936 - if (likely(cryptlen)) {
937 - token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
938 -
939 - token[10].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
940 - token[10].packet_length = cryptlen;
941 - token[10].stat = EIP197_TOKEN_STAT_LAST_HASH;
942 - token[10].instructions = EIP197_TOKEN_INS_LAST |
943 - EIP197_TOKEN_INS_TYPE_CRYPTO |
944 - EIP197_TOKEN_INS_TYPE_HASH |
945 - EIP197_TOKEN_INS_TYPE_OUTPUT;
946 - } else if (ctx->xcm != EIP197_XCM_MODE_CCM) {
947 - token[6].stat = EIP197_TOKEN_STAT_LAST_HASH;
948 - token[6].instructions = EIP197_TOKEN_INS_LAST |
949 - EIP197_TOKEN_INS_TYPE_HASH;
950 - }
951 -
952 - if (!ctx->xcm)
953 - return;
954 -
955 - token[8].opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
956 - token[8].packet_length = 0;
957 - token[8].instructions = AES_BLOCK_SIZE;
958 -
959 - token[9].opcode = EIP197_TOKEN_OPCODE_INSERT;
960 - token[9].packet_length = AES_BLOCK_SIZE;
961 - token[9].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
962 - EIP197_TOKEN_INS_TYPE_CRYPTO;
963 -
964 - if (ctx->xcm == EIP197_XCM_MODE_GCM) {
965 - token[6].instructions = EIP197_TOKEN_INS_LAST |
966 - EIP197_TOKEN_INS_TYPE_HASH;
967 - } else {
968 - u8 *cbcmaciv = (u8 *)&token[1];
969 - u32 *aadlen = (u32 *)&token[5];
970 -
971 + if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
972 /* Construct IV block B0 for the CBC-MAC */
973 - token[0].opcode = EIP197_TOKEN_OPCODE_INSERT;
974 - token[0].packet_length = AES_BLOCK_SIZE +
975 - ((assoclen > 0) << 1);
976 - token[0].instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
977 - EIP197_TOKEN_INS_TYPE_HASH;
978 - /* Variable length IV part */
979 - memcpy(cbcmaciv, iv, 15 - iv[0]);
980 - /* fixup flags byte */
981 - cbcmaciv[0] |= ((assoclen > 0) << 6) | ((digestsize - 2) << 2);
982 - /* Clear upper bytes of variable message length to 0 */
983 - memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
984 - /* insert lower 2 bytes of message length */
985 - cbcmaciv[14] = cryptlen >> 8;
986 - cbcmaciv[15] = cryptlen & 255;
987 -
988 - if (assoclen) {
989 - *aadlen = cpu_to_le32(cpu_to_be16(assoclen));
990 - assoclen += 2;
991 + u8 *final_iv = (u8 *)cdesc->control_data.token;
992 + u8 *cbcmaciv = (u8 *)&atoken[1];
993 + __le32 *aadlen = (__le32 *)&atoken[5];
994 +
995 + if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
996 + /* Length + nonce */
997 + cdesc->control_data.token[0] = ctx->nonce;
998 + /* Fixup flags byte */
999 + *(__le32 *)cbcmaciv =
1000 + cpu_to_le32(ctx->nonce |
1001 + ((assocadj > 0) << 6) |
1002 + ((digestsize - 2) << 2));
1003 + /* 64 bit IV part */
1004 + memcpy(&cdesc->control_data.token[1], iv, 8);
1005 + memcpy(cbcmaciv + 4, iv, 8);
1006 + /* Start counter at 0 */
1007 + cdesc->control_data.token[3] = 0;
1008 + /* Message length */
1009 + *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
1010 + } else {
1011 + /* Variable length IV part */
1012 + memcpy(final_iv, iv, 15 - iv[0]);
1013 + memcpy(cbcmaciv, iv, 15 - iv[0]);
1014 + /* Start variable length counter at 0 */
1015 + memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
1016 + memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
1017 + /* fixup flags byte */
1018 + cbcmaciv[0] |= ((assocadj > 0) << 6) |
1019 + ((digestsize - 2) << 2);
1020 + /* insert lower 2 bytes of message length */
1021 + cbcmaciv[14] = cryptlen >> 8;
1022 + cbcmaciv[15] = cryptlen & 255;
1023 + }
1024 +
1025 + atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
1026 + atoken->packet_length = AES_BLOCK_SIZE +
1027 + ((assocadj > 0) << 1);
1028 + atoken->stat = 0;
1029 + atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
1030 + EIP197_TOKEN_INS_TYPE_HASH;
1031 +
1032 + if (likely(assocadj)) {
1033 + *aadlen = cpu_to_le32((assocadj >> 8) |
1034 + (assocadj & 255) << 8);
1035 + atoken += 6;
1036 + atoksize += 7;
1037 + } else {
1038 + atoken += 5;
1039 + atoksize += 6;
1040 }
1041
1042 - token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
1043 -
1044 - /* Align AAD data towards hash engine */
1045 - token[7].opcode = EIP197_TOKEN_OPCODE_INSERT;
1046 - assoclen &= 15;
1047 - token[7].packet_length = assoclen ? 16 - assoclen : 0;
1048 -
1049 + /* Process AAD data */
1050 + aadref = atoken;
1051 + atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
1052 + atoken->packet_length = assocadj;
1053 + atoken->stat = 0;
1054 + atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
1055 + atoken++;
1056 +
1057 + /* For CCM only, align AAD data towards hash engine */
1058 + atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
1059 + aadalign = (assocadj + 2) & 15;
1060 + atoken->packet_length = assocadj && aadalign ?
1061 + 16 - aadalign :
1062 + 0;
1063 if (likely(cryptlen)) {
1064 - token[7].instructions = EIP197_TOKEN_INS_TYPE_HASH;
1065 -
1066 - /* Align crypto data towards hash engine */
1067 - token[10].stat = 0;
1068 + atoken->stat = 0;
1069 + atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
1070 + } else {
1071 + atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
1072 + atoken->instructions = EIP197_TOKEN_INS_LAST |
1073 + EIP197_TOKEN_INS_TYPE_HASH;
1074 + }
1075 + } else {
1076 + safexcel_aead_iv(ctx, iv, cdesc);
1077
1078 - token[11].opcode = EIP197_TOKEN_OPCODE_INSERT;
1079 - cryptlen &= 15;
1080 - token[11].packet_length = cryptlen ? 16 - cryptlen : 0;
1081 - token[11].stat = EIP197_TOKEN_STAT_LAST_HASH;
1082 - token[11].instructions = EIP197_TOKEN_INS_TYPE_HASH;
1083 + /* Process AAD data */
1084 + aadref = atoken;
1085 + atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
1086 + atoken->packet_length = assocadj;
1087 + atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
1088 + atoken->instructions = EIP197_TOKEN_INS_LAST |
1089 + EIP197_TOKEN_INS_TYPE_HASH;
1090 + }
1091 + atoken++;
1092 +
1093 + if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
1094 + /* For ESP mode (and not GMAC), skip over the IV */
1095 + atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
1096 + atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
1097 + atoken->stat = 0;
1098 + atoken->instructions = 0;
1099 + atoken++;
1100 + atoksize++;
1101 + } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
1102 + direction == SAFEXCEL_DECRYPT)) {
1103 + /* Poly-chacha decryption needs a dummy NOP here ... */
1104 + atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
1105 + atoken->packet_length = 16; /* According to Op Manual */
1106 + atoken->stat = 0;
1107 + atoken->instructions = 0;
1108 + atoken++;
1109 + atoksize++;
1110 + }
1111 +
1112 + if (ctx->xcm) {
1113 + /* For GCM and CCM, obtain enc(Y0) */
1114 + atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
1115 + atoken->packet_length = 0;
1116 + atoken->stat = 0;
1117 + atoken->instructions = AES_BLOCK_SIZE;
1118 + atoken++;
1119 +
1120 + atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
1121 + atoken->packet_length = AES_BLOCK_SIZE;
1122 + atoken->stat = 0;
1123 + atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
1124 + EIP197_TOKEN_INS_TYPE_CRYPTO;
1125 + atoken++;
1126 + atoksize += 2;
1127 + }
1128 +
1129 + if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
1130 + /* Fixup stat field for AAD direction instruction */
1131 + aadref->stat = 0;
1132 +
1133 + /* Process crypto data */
1134 + atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
1135 + atoken->packet_length = cryptlen;
1136 +
1137 + if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
1138 + /* Fixup instruction field for AAD dir instruction */
1139 + aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
1140 +
1141 + /* Do not send to crypt engine in case of GMAC */
1142 + atoken->instructions = EIP197_TOKEN_INS_LAST |
1143 + EIP197_TOKEN_INS_TYPE_HASH |
1144 + EIP197_TOKEN_INS_TYPE_OUTPUT;
1145 + } else {
1146 + atoken->instructions = EIP197_TOKEN_INS_LAST |
1147 + EIP197_TOKEN_INS_TYPE_CRYPTO |
1148 + EIP197_TOKEN_INS_TYPE_HASH |
1149 + EIP197_TOKEN_INS_TYPE_OUTPUT;
1150 + }
1151 +
1152 + cryptlen &= 15;
1153 + if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
1154 + atoken->stat = 0;
1155 + /* For CCM only, pad crypto data to the hash engine */
1156 + atoken++;
1157 + atoksize++;
1158 + atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
1159 + atoken->packet_length = 16 - cryptlen;
1160 + atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
1161 + atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
1162 } else {
1163 - token[7].stat = EIP197_TOKEN_STAT_LAST_HASH;
1164 - token[7].instructions = EIP197_TOKEN_INS_LAST |
1165 - EIP197_TOKEN_INS_TYPE_HASH;
1166 + atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
1167 }
1168 + atoken++;
1169 + atoksize++;
1170 }
1171 +
1172 + if (direction == SAFEXCEL_ENCRYPT) {
1173 + /* Append ICV */
1174 + atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
1175 + atoken->packet_length = digestsize;
1176 + atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
1177 + EIP197_TOKEN_STAT_LAST_PACKET;
1178 + atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
1179 + EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
1180 + } else {
1181 + /* Extract ICV */
1182 + atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
1183 + atoken->packet_length = digestsize;
1184 + atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
1185 + EIP197_TOKEN_STAT_LAST_PACKET;
1186 + atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
1187 + atoken++;
1188 + atoksize++;
1189 +
1190 + /* Verify ICV */
1191 + atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
1192 + atoken->packet_length = digestsize |
1193 + EIP197_TOKEN_HASH_RESULT_VERIFY;
1194 + atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
1195 + EIP197_TOKEN_STAT_LAST_PACKET;
1196 + atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
1197 + }
1198 +
1199 + /* Fixup length of the token in the command descriptor */
1200 + cdesc->additional_cdata_size = atoksize;
1201 }
1202
1203 static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
1204 @@ -277,14 +380,12 @@
1205 int ret, i;
1206
1207 ret = aes_expandkey(&aes, key, len);
1208 - if (ret) {
1209 - crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1210 + if (ret)
1211 return ret;
1212 - }
1213
1214 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1215 for (i = 0; i < len / sizeof(u32); i++) {
1216 - if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1217 + if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1218 ctx->base.needs_inv = true;
1219 break;
1220 }
1221 @@ -309,43 +410,57 @@
1222 struct safexcel_crypto_priv *priv = ctx->priv;
1223 struct crypto_authenc_keys keys;
1224 struct crypto_aes_ctx aes;
1225 - int err = -EINVAL;
1226 + int err = -EINVAL, i;
1227
1228 - if (crypto_authenc_extractkeys(&keys, key, len) != 0)
1229 + if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
1230 goto badkey;
1231
1232 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
1233 - /* Minimum keysize is minimum AES key size + nonce size */
1234 - if (keys.enckeylen < (AES_MIN_KEY_SIZE +
1235 - CTR_RFC3686_NONCE_SIZE))
1236 + /* Must have at least space for the nonce here */
1237 + if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
1238 goto badkey;
1239 /* last 4 bytes of key are the nonce! */
1240 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
1241 CTR_RFC3686_NONCE_SIZE);
1242 /* exclude the nonce here */
1243 - keys.enckeylen -= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1244 + keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
1245 }
1246
1247 /* Encryption key */
1248 switch (ctx->alg) {
1249 + case SAFEXCEL_DES:
1250 + err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
1251 + if (unlikely(err))
1252 + goto badkey;
1253 + break;
1254 case SAFEXCEL_3DES:
1255 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
1256 if (unlikely(err))
1257 - goto badkey_expflags;
1258 + goto badkey;
1259 break;
1260 case SAFEXCEL_AES:
1261 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
1262 if (unlikely(err))
1263 goto badkey;
1264 break;
1265 + case SAFEXCEL_SM4:
1266 + if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
1267 + goto badkey;
1268 + break;
1269 default:
1270 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
1271 goto badkey;
1272 }
1273
1274 - if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
1275 - memcmp(ctx->key, keys.enckey, keys.enckeylen))
1276 - ctx->base.needs_inv = true;
1277 + if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1278 + for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
1279 + if (le32_to_cpu(ctx->key[i]) !=
1280 + ((u32 *)keys.enckey)[i]) {
1281 + ctx->base.needs_inv = true;
1282 + break;
1283 + }
1284 + }
1285 + }
1286
1287 /* Auth key */
1288 switch (ctx->hash_alg) {
1289 @@ -374,21 +489,24 @@
1290 keys.authkeylen, &istate, &ostate))
1291 goto badkey;
1292 break;
1293 + case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
1294 + if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
1295 + keys.authkeylen, &istate, &ostate))
1296 + goto badkey;
1297 + break;
1298 default:
1299 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
1300 goto badkey;
1301 }
1302
1303 - crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
1304 - CRYPTO_TFM_RES_MASK);
1305 -
1306 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
1307 (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
1308 memcmp(ctx->opad, ostate.state, ctx->state_sz)))
1309 ctx->base.needs_inv = true;
1310
1311 /* Now copy the keys into the context */
1312 - memcpy(ctx->key, keys.enckey, keys.enckeylen);
1313 + for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
1314 + ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
1315 ctx->key_len = keys.enckeylen;
1316
1317 memcpy(ctx->ipad, &istate.state, ctx->state_sz);
1318 @@ -398,8 +516,6 @@
1319 return 0;
1320
1321 badkey:
1322 - crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1323 -badkey_expflags:
1324 memzero_explicit(&keys, sizeof(keys));
1325 return err;
1326 }
1327 @@ -423,6 +539,17 @@
1328 CONTEXT_CONTROL_DIGEST_XCM |
1329 ctx->hash_alg |
1330 CONTEXT_CONTROL_SIZE(ctrl_size);
1331 + } else if (ctx->alg == SAFEXCEL_CHACHA20) {
1332 + /* Chacha20-Poly1305 */
1333 + cdesc->control_data.control0 =
1334 + CONTEXT_CONTROL_KEY_EN |
1335 + CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
1336 + (sreq->direction == SAFEXCEL_ENCRYPT ?
1337 + CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
1338 + CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
1339 + ctx->hash_alg |
1340 + CONTEXT_CONTROL_SIZE(ctrl_size);
1341 + return 0;
1342 } else {
1343 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
1344 cdesc->control_data.control0 =
1345 @@ -431,17 +558,21 @@
1346 ctx->hash_alg |
1347 CONTEXT_CONTROL_SIZE(ctrl_size);
1348 }
1349 - if (sreq->direction == SAFEXCEL_ENCRYPT)
1350 - cdesc->control_data.control0 |=
1351 - (ctx->xcm == EIP197_XCM_MODE_CCM) ?
1352 - CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT :
1353 - CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
1354
1355 + if (sreq->direction == SAFEXCEL_ENCRYPT &&
1356 + (ctx->xcm == EIP197_XCM_MODE_CCM ||
1357 + ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
1358 + cdesc->control_data.control0 |=
1359 + CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
1360 + else if (sreq->direction == SAFEXCEL_ENCRYPT)
1361 + cdesc->control_data.control0 |=
1362 + CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
1363 + else if (ctx->xcm == EIP197_XCM_MODE_CCM)
1364 + cdesc->control_data.control0 |=
1365 + CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
1366 else
1367 cdesc->control_data.control0 |=
1368 - (ctx->xcm == EIP197_XCM_MODE_CCM) ?
1369 - CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN :
1370 - CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
1371 + CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
1372 } else {
1373 if (sreq->direction == SAFEXCEL_ENCRYPT)
1374 cdesc->control_data.control0 =
1375 @@ -480,6 +611,12 @@
1376 ctx->key_len >> ctx->xts);
1377 return -EINVAL;
1378 }
1379 + } else if (ctx->alg == SAFEXCEL_CHACHA20) {
1380 + cdesc->control_data.control0 |=
1381 + CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
1382 + } else if (ctx->alg == SAFEXCEL_SM4) {
1383 + cdesc->control_data.control0 |=
1384 + CONTEXT_CONTROL_CRYPTO_ALG_SM4;
1385 }
1386
1387 return 0;
1388 @@ -563,6 +700,7 @@
1389 unsigned int totlen;
1390 unsigned int totlen_src = cryptlen + assoclen;
1391 unsigned int totlen_dst = totlen_src;
1392 + struct safexcel_token *atoken;
1393 int n_cdesc = 0, n_rdesc = 0;
1394 int queued, i, ret = 0;
1395 bool first = true;
1396 @@ -637,56 +775,60 @@
1397
1398 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
1399
1400 - /* The EIP cannot deal with zero length input packets! */
1401 - if (totlen == 0)
1402 - totlen = 1;
1403 + if (!totlen) {
1404 + /*
1405 + * The EIP97 cannot deal with zero length input packets!
1406 + * So stuff a dummy command descriptor indicating a 1 byte
1407 + * (dummy) input packet, using the context record as source.
1408 + */
1409 + first_cdesc = safexcel_add_cdesc(priv, ring,
1410 + 1, 1, ctx->base.ctxr_dma,
1411 + 1, 1, ctx->base.ctxr_dma,
1412 + &atoken);
1413 + if (IS_ERR(first_cdesc)) {
1414 + /* No space left in the command descriptor ring */
1415 + ret = PTR_ERR(first_cdesc);
1416 + goto cdesc_rollback;
1417 + }
1418 + n_cdesc = 1;
1419 + goto skip_cdesc;
1420 + }
1421
1422 /* command descriptors */
1423 for_each_sg(src, sg, sreq->nr_src, i) {
1424 int len = sg_dma_len(sg);
1425
1426 /* Do not overflow the request */
1427 - if (queued - len < 0)
1428 + if (queued < len)
1429 len = queued;
1430
1431 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
1432 !(queued - len),
1433 sg_dma_address(sg), len, totlen,
1434 - ctx->base.ctxr_dma);
1435 + ctx->base.ctxr_dma, &atoken);
1436 if (IS_ERR(cdesc)) {
1437 /* No space left in the command descriptor ring */
1438 ret = PTR_ERR(cdesc);
1439 goto cdesc_rollback;
1440 }
1441 - n_cdesc++;
1442
1443 - if (n_cdesc == 1) {
1444 + if (!n_cdesc)
1445 first_cdesc = cdesc;
1446 - }
1447
1448 + n_cdesc++;
1449 queued -= len;
1450 if (!queued)
1451 break;
1452 }
1453 -
1454 - if (unlikely(!n_cdesc)) {
1455 - /*
1456 - * Special case: zero length input buffer.
1457 - * The engine always needs the 1st command descriptor, however!
1458 - */
1459 - first_cdesc = safexcel_add_cdesc(priv, ring, 1, 1, 0, 0, totlen,
1460 - ctx->base.ctxr_dma);
1461 - n_cdesc = 1;
1462 - }
1463 -
1464 +skip_cdesc:
1465 /* Add context control words and token to first command descriptor */
1466 safexcel_context_control(ctx, base, sreq, first_cdesc);
1467 if (ctx->aead)
1468 - safexcel_aead_token(ctx, iv, first_cdesc,
1469 + safexcel_aead_token(ctx, iv, first_cdesc, atoken,
1470 sreq->direction, cryptlen,
1471 assoclen, digestsize);
1472 else
1473 - safexcel_skcipher_token(ctx, iv, first_cdesc,
1474 + safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
1475 cryptlen);
1476
1477 /* result descriptors */
1478 @@ -1073,6 +1215,8 @@
1479
1480 ctx->base.send = safexcel_skcipher_send;
1481 ctx->base.handle_result = safexcel_skcipher_handle_result;
1482 + ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1483 + ctx->ctrinit = 1;
1484 return 0;
1485 }
1486
1487 @@ -1137,6 +1281,8 @@
1488 safexcel_skcipher_cra_init(tfm);
1489 ctx->alg = SAFEXCEL_AES;
1490 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1491 + ctx->blocksz = 0;
1492 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1493 return 0;
1494 }
1495
1496 @@ -1171,6 +1317,7 @@
1497
1498 safexcel_skcipher_cra_init(tfm);
1499 ctx->alg = SAFEXCEL_AES;
1500 + ctx->blocksz = AES_BLOCK_SIZE;
1501 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1502 return 0;
1503 }
1504 @@ -1207,6 +1354,7 @@
1505
1506 safexcel_skcipher_cra_init(tfm);
1507 ctx->alg = SAFEXCEL_AES;
1508 + ctx->blocksz = AES_BLOCK_SIZE;
1509 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1510 return 0;
1511 }
1512 @@ -1243,6 +1391,7 @@
1513
1514 safexcel_skcipher_cra_init(tfm);
1515 ctx->alg = SAFEXCEL_AES;
1516 + ctx->blocksz = AES_BLOCK_SIZE;
1517 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1518 return 0;
1519 }
1520 @@ -1288,14 +1437,12 @@
1521 /* exclude the nonce here */
1522 keylen = len - CTR_RFC3686_NONCE_SIZE;
1523 ret = aes_expandkey(&aes, key, keylen);
1524 - if (ret) {
1525 - crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1526 + if (ret)
1527 return ret;
1528 - }
1529
1530 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1531 for (i = 0; i < keylen / sizeof(u32); i++) {
1532 - if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1533 + if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1534 ctx->base.needs_inv = true;
1535 break;
1536 }
1537 @@ -1317,6 +1464,7 @@
1538
1539 safexcel_skcipher_cra_init(tfm);
1540 ctx->alg = SAFEXCEL_AES;
1541 + ctx->blocksz = AES_BLOCK_SIZE;
1542 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1543 return 0;
1544 }
1545 @@ -1352,6 +1500,7 @@
1546 unsigned int len)
1547 {
1548 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1549 + struct safexcel_crypto_priv *priv = ctx->priv;
1550 int ret;
1551
1552 ret = verify_skcipher_des_key(ctfm, key);
1553 @@ -1359,7 +1508,7 @@
1554 return ret;
1555
1556 /* if context exits and key changed, need to invalidate it */
1557 - if (ctx->base.ctxr_dma)
1558 + if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1559 if (memcmp(ctx->key, key, len))
1560 ctx->base.needs_inv = true;
1561
1562 @@ -1375,6 +1524,8 @@
1563
1564 safexcel_skcipher_cra_init(tfm);
1565 ctx->alg = SAFEXCEL_DES;
1566 + ctx->blocksz = DES_BLOCK_SIZE;
1567 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1568 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1569 return 0;
1570 }
1571 @@ -1412,6 +1563,8 @@
1572 safexcel_skcipher_cra_init(tfm);
1573 ctx->alg = SAFEXCEL_DES;
1574 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1575 + ctx->blocksz = 0;
1576 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1577 return 0;
1578 }
1579
1580 @@ -1444,6 +1597,7 @@
1581 const u8 *key, unsigned int len)
1582 {
1583 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1584 + struct safexcel_crypto_priv *priv = ctx->priv;
1585 int err;
1586
1587 err = verify_skcipher_des3_key(ctfm, key);
1588 @@ -1451,13 +1605,11 @@
1589 return err;
1590
1591 /* if context exits and key changed, need to invalidate it */
1592 - if (ctx->base.ctxr_dma) {
1593 + if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1594 if (memcmp(ctx->key, key, len))
1595 ctx->base.needs_inv = true;
1596 - }
1597
1598 memcpy(ctx->key, key, len);
1599 -
1600 ctx->key_len = len;
1601
1602 return 0;
1603 @@ -1469,6 +1621,8 @@
1604
1605 safexcel_skcipher_cra_init(tfm);
1606 ctx->alg = SAFEXCEL_3DES;
1607 + ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1608 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1609 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1610 return 0;
1611 }
1612 @@ -1506,6 +1660,8 @@
1613 safexcel_skcipher_cra_init(tfm);
1614 ctx->alg = SAFEXCEL_3DES;
1615 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1616 + ctx->blocksz = 0;
1617 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1618 return 0;
1619 }
1620
1621 @@ -1561,6 +1717,9 @@
1622 ctx->priv = tmpl->priv;
1623
1624 ctx->alg = SAFEXCEL_AES; /* default */
1625 + ctx->blocksz = AES_BLOCK_SIZE;
1626 + ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1627 + ctx->ctrinit = 1;
1628 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1629 ctx->aead = true;
1630 ctx->base.send = safexcel_aead_send;
1631 @@ -1749,6 +1908,8 @@
1632
1633 safexcel_aead_sha1_cra_init(tfm);
1634 ctx->alg = SAFEXCEL_3DES; /* override default */
1635 + ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1636 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1637 return 0;
1638 }
1639
1640 @@ -1777,6 +1938,330 @@
1641 },
1642 };
1643
1644 +static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1645 +{
1646 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1647 +
1648 + safexcel_aead_sha256_cra_init(tfm);
1649 + ctx->alg = SAFEXCEL_3DES; /* override default */
1650 + ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1651 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1652 + return 0;
1653 +}
1654 +
1655 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1656 + .type = SAFEXCEL_ALG_TYPE_AEAD,
1657 + .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1658 + .alg.aead = {
1659 + .setkey = safexcel_aead_setkey,
1660 + .encrypt = safexcel_aead_encrypt,
1661 + .decrypt = safexcel_aead_decrypt,
1662 + .ivsize = DES3_EDE_BLOCK_SIZE,
1663 + .maxauthsize = SHA256_DIGEST_SIZE,
1664 + .base = {
1665 + .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1666 + .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1667 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
1668 + .cra_flags = CRYPTO_ALG_ASYNC |
1669 + CRYPTO_ALG_KERN_DRIVER_ONLY,
1670 + .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1671 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1672 + .cra_alignmask = 0,
1673 + .cra_init = safexcel_aead_sha256_des3_cra_init,
1674 + .cra_exit = safexcel_aead_cra_exit,
1675 + .cra_module = THIS_MODULE,
1676 + },
1677 + },
1678 +};
1679 +
1680 +static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1681 +{
1682 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1683 +
1684 + safexcel_aead_sha224_cra_init(tfm);
1685 + ctx->alg = SAFEXCEL_3DES; /* override default */
1686 + ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1687 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1688 + return 0;
1689 +}
1690 +
1691 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1692 + .type = SAFEXCEL_ALG_TYPE_AEAD,
1693 + .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1694 + .alg.aead = {
1695 + .setkey = safexcel_aead_setkey,
1696 + .encrypt = safexcel_aead_encrypt,
1697 + .decrypt = safexcel_aead_decrypt,
1698 + .ivsize = DES3_EDE_BLOCK_SIZE,
1699 + .maxauthsize = SHA224_DIGEST_SIZE,
1700 + .base = {
1701 + .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1702 + .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1703 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
1704 + .cra_flags = CRYPTO_ALG_ASYNC |
1705 + CRYPTO_ALG_KERN_DRIVER_ONLY,
1706 + .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1707 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1708 + .cra_alignmask = 0,
1709 + .cra_init = safexcel_aead_sha224_des3_cra_init,
1710 + .cra_exit = safexcel_aead_cra_exit,
1711 + .cra_module = THIS_MODULE,
1712 + },
1713 + },
1714 +};
1715 +
1716 +static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
1717 +{
1718 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1719 +
1720 + safexcel_aead_sha512_cra_init(tfm);
1721 + ctx->alg = SAFEXCEL_3DES; /* override default */
1722 + ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1723 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1724 + return 0;
1725 +}
1726 +
1727 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
1728 + .type = SAFEXCEL_ALG_TYPE_AEAD,
1729 + .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1730 + .alg.aead = {
1731 + .setkey = safexcel_aead_setkey,
1732 + .encrypt = safexcel_aead_encrypt,
1733 + .decrypt = safexcel_aead_decrypt,
1734 + .ivsize = DES3_EDE_BLOCK_SIZE,
1735 + .maxauthsize = SHA512_DIGEST_SIZE,
1736 + .base = {
1737 + .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
1738 + .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
1739 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
1740 + .cra_flags = CRYPTO_ALG_ASYNC |
1741 + CRYPTO_ALG_KERN_DRIVER_ONLY,
1742 + .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1743 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1744 + .cra_alignmask = 0,
1745 + .cra_init = safexcel_aead_sha512_des3_cra_init,
1746 + .cra_exit = safexcel_aead_cra_exit,
1747 + .cra_module = THIS_MODULE,
1748 + },
1749 + },
1750 +};
1751 +
1752 +static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
1753 +{
1754 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1755 +
1756 + safexcel_aead_sha384_cra_init(tfm);
1757 + ctx->alg = SAFEXCEL_3DES; /* override default */
1758 + ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1759 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1760 + return 0;
1761 +}
1762 +
1763 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
1764 + .type = SAFEXCEL_ALG_TYPE_AEAD,
1765 + .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1766 + .alg.aead = {
1767 + .setkey = safexcel_aead_setkey,
1768 + .encrypt = safexcel_aead_encrypt,
1769 + .decrypt = safexcel_aead_decrypt,
1770 + .ivsize = DES3_EDE_BLOCK_SIZE,
1771 + .maxauthsize = SHA384_DIGEST_SIZE,
1772 + .base = {
1773 + .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
1774 + .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
1775 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
1776 + .cra_flags = CRYPTO_ALG_ASYNC |
1777 + CRYPTO_ALG_KERN_DRIVER_ONLY,
1778 + .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1779 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1780 + .cra_alignmask = 0,
1781 + .cra_init = safexcel_aead_sha384_des3_cra_init,
1782 + .cra_exit = safexcel_aead_cra_exit,
1783 + .cra_module = THIS_MODULE,
1784 + },
1785 + },
1786 +};
1787 +
1788 +static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
1789 +{
1790 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1791 +
1792 + safexcel_aead_sha1_cra_init(tfm);
1793 + ctx->alg = SAFEXCEL_DES; /* override default */
1794 + ctx->blocksz = DES_BLOCK_SIZE;
1795 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1796 + return 0;
1797 +}
1798 +
1799 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
1800 + .type = SAFEXCEL_ALG_TYPE_AEAD,
1801 + .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1802 + .alg.aead = {
1803 + .setkey = safexcel_aead_setkey,
1804 + .encrypt = safexcel_aead_encrypt,
1805 + .decrypt = safexcel_aead_decrypt,
1806 + .ivsize = DES_BLOCK_SIZE,
1807 + .maxauthsize = SHA1_DIGEST_SIZE,
1808 + .base = {
1809 + .cra_name = "authenc(hmac(sha1),cbc(des))",
1810 + .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
1811 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
1812 + .cra_flags = CRYPTO_ALG_ASYNC |
1813 + CRYPTO_ALG_KERN_DRIVER_ONLY,
1814 + .cra_blocksize = DES_BLOCK_SIZE,
1815 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1816 + .cra_alignmask = 0,
1817 + .cra_init = safexcel_aead_sha1_des_cra_init,
1818 + .cra_exit = safexcel_aead_cra_exit,
1819 + .cra_module = THIS_MODULE,
1820 + },
1821 + },
1822 +};
1823 +
1824 +static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
1825 +{
1826 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1827 +
1828 + safexcel_aead_sha256_cra_init(tfm);
1829 + ctx->alg = SAFEXCEL_DES; /* override default */
1830 + ctx->blocksz = DES_BLOCK_SIZE;
1831 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1832 + return 0;
1833 +}
1834 +
1835 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
1836 + .type = SAFEXCEL_ALG_TYPE_AEAD,
1837 + .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1838 + .alg.aead = {
1839 + .setkey = safexcel_aead_setkey,
1840 + .encrypt = safexcel_aead_encrypt,
1841 + .decrypt = safexcel_aead_decrypt,
1842 + .ivsize = DES_BLOCK_SIZE,
1843 + .maxauthsize = SHA256_DIGEST_SIZE,
1844 + .base = {
1845 + .cra_name = "authenc(hmac(sha256),cbc(des))",
1846 + .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
1847 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
1848 + .cra_flags = CRYPTO_ALG_ASYNC |
1849 + CRYPTO_ALG_KERN_DRIVER_ONLY,
1850 + .cra_blocksize = DES_BLOCK_SIZE,
1851 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1852 + .cra_alignmask = 0,
1853 + .cra_init = safexcel_aead_sha256_des_cra_init,
1854 + .cra_exit = safexcel_aead_cra_exit,
1855 + .cra_module = THIS_MODULE,
1856 + },
1857 + },
1858 +};
1859 +
1860 +static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
1861 +{
1862 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1863 +
1864 + safexcel_aead_sha224_cra_init(tfm);
1865 + ctx->alg = SAFEXCEL_DES; /* override default */
1866 + ctx->blocksz = DES_BLOCK_SIZE;
1867 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1868 + return 0;
1869 +}
1870 +
1871 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
1872 + .type = SAFEXCEL_ALG_TYPE_AEAD,
1873 + .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1874 + .alg.aead = {
1875 + .setkey = safexcel_aead_setkey,
1876 + .encrypt = safexcel_aead_encrypt,
1877 + .decrypt = safexcel_aead_decrypt,
1878 + .ivsize = DES_BLOCK_SIZE,
1879 + .maxauthsize = SHA224_DIGEST_SIZE,
1880 + .base = {
1881 + .cra_name = "authenc(hmac(sha224),cbc(des))",
1882 + .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
1883 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
1884 + .cra_flags = CRYPTO_ALG_ASYNC |
1885 + CRYPTO_ALG_KERN_DRIVER_ONLY,
1886 + .cra_blocksize = DES_BLOCK_SIZE,
1887 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1888 + .cra_alignmask = 0,
1889 + .cra_init = safexcel_aead_sha224_des_cra_init,
1890 + .cra_exit = safexcel_aead_cra_exit,
1891 + .cra_module = THIS_MODULE,
1892 + },
1893 + },
1894 +};
1895 +
1896 +static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
1897 +{
1898 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1899 +
1900 + safexcel_aead_sha512_cra_init(tfm);
1901 + ctx->alg = SAFEXCEL_DES; /* override default */
1902 + ctx->blocksz = DES_BLOCK_SIZE;
1903 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1904 + return 0;
1905 +}
1906 +
1907 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
1908 + .type = SAFEXCEL_ALG_TYPE_AEAD,
1909 + .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1910 + .alg.aead = {
1911 + .setkey = safexcel_aead_setkey,
1912 + .encrypt = safexcel_aead_encrypt,
1913 + .decrypt = safexcel_aead_decrypt,
1914 + .ivsize = DES_BLOCK_SIZE,
1915 + .maxauthsize = SHA512_DIGEST_SIZE,
1916 + .base = {
1917 + .cra_name = "authenc(hmac(sha512),cbc(des))",
1918 + .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
1919 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
1920 + .cra_flags = CRYPTO_ALG_ASYNC |
1921 + CRYPTO_ALG_KERN_DRIVER_ONLY,
1922 + .cra_blocksize = DES_BLOCK_SIZE,
1923 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1924 + .cra_alignmask = 0,
1925 + .cra_init = safexcel_aead_sha512_des_cra_init,
1926 + .cra_exit = safexcel_aead_cra_exit,
1927 + .cra_module = THIS_MODULE,
1928 + },
1929 + },
1930 +};
1931 +
1932 +static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
1933 +{
1934 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1935 +
1936 + safexcel_aead_sha384_cra_init(tfm);
1937 + ctx->alg = SAFEXCEL_DES; /* override default */
1938 + ctx->blocksz = DES_BLOCK_SIZE;
1939 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1940 + return 0;
1941 +}
1942 +
1943 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
1944 + .type = SAFEXCEL_ALG_TYPE_AEAD,
1945 + .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1946 + .alg.aead = {
1947 + .setkey = safexcel_aead_setkey,
1948 + .encrypt = safexcel_aead_encrypt,
1949 + .decrypt = safexcel_aead_decrypt,
1950 + .ivsize = DES_BLOCK_SIZE,
1951 + .maxauthsize = SHA384_DIGEST_SIZE,
1952 + .base = {
1953 + .cra_name = "authenc(hmac(sha384),cbc(des))",
1954 + .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
1955 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
1956 + .cra_flags = CRYPTO_ALG_ASYNC |
1957 + CRYPTO_ALG_KERN_DRIVER_ONLY,
1958 + .cra_blocksize = DES_BLOCK_SIZE,
1959 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1960 + .cra_alignmask = 0,
1961 + .cra_init = safexcel_aead_sha384_des_cra_init,
1962 + .cra_exit = safexcel_aead_cra_exit,
1963 + .cra_module = THIS_MODULE,
1964 + },
1965 + },
1966 +};
1967 +
1968 static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
1969 {
1970 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1971 @@ -1965,14 +2450,12 @@
1972 /* Only half of the key data is cipher key */
1973 keylen = (len >> 1);
1974 ret = aes_expandkey(&aes, key, keylen);
1975 - if (ret) {
1976 - crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1977 + if (ret)
1978 return ret;
1979 - }
1980
1981 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1982 for (i = 0; i < keylen / sizeof(u32); i++) {
1983 - if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1984 + if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1985 ctx->base.needs_inv = true;
1986 break;
1987 }
1988 @@ -1984,15 +2467,13 @@
1989
1990 /* The other half is the tweak key */
1991 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
1992 - if (ret) {
1993 - crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1994 + if (ret)
1995 return ret;
1996 - }
1997
1998 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1999 for (i = 0; i < keylen / sizeof(u32); i++) {
2000 - if (ctx->key[i + keylen / sizeof(u32)] !=
2001 - cpu_to_le32(aes.key_enc[i])) {
2002 + if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2003 + aes.key_enc[i]) {
2004 ctx->base.needs_inv = true;
2005 break;
2006 }
2007 @@ -2015,6 +2496,7 @@
2008
2009 safexcel_skcipher_cra_init(tfm);
2010 ctx->alg = SAFEXCEL_AES;
2011 + ctx->blocksz = AES_BLOCK_SIZE;
2012 ctx->xts = 1;
2013 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2014 return 0;
2015 @@ -2075,14 +2557,13 @@
2016
2017 ret = aes_expandkey(&aes, key, len);
2018 if (ret) {
2019 - crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2020 memzero_explicit(&aes, sizeof(aes));
2021 return ret;
2022 }
2023
2024 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2025 for (i = 0; i < len / sizeof(u32); i++) {
2026 - if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2027 + if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2028 ctx->base.needs_inv = true;
2029 break;
2030 }
2031 @@ -2099,8 +2580,6 @@
2032 crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2033 CRYPTO_TFM_REQ_MASK);
2034 ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2035 - crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
2036 - CRYPTO_TFM_RES_MASK);
2037 if (ret)
2038 return ret;
2039
2040 @@ -2109,7 +2588,7 @@
2041
2042 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2043 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2044 - if (ctx->ipad[i] != cpu_to_be32(hashkey[i])) {
2045 + if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
2046 ctx->base.needs_inv = true;
2047 break;
2048 }
2049 @@ -2135,10 +2614,7 @@
2050 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2051
2052 ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2053 - if (IS_ERR(ctx->hkaes))
2054 - return PTR_ERR(ctx->hkaes);
2055 -
2056 - return 0;
2057 + return PTR_ERR_OR_ZERO(ctx->hkaes);
2058 }
2059
2060 static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2061 @@ -2192,14 +2668,13 @@
2062
2063 ret = aes_expandkey(&aes, key, len);
2064 if (ret) {
2065 - crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2066 memzero_explicit(&aes, sizeof(aes));
2067 return ret;
2068 }
2069
2070 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2071 for (i = 0; i < len / sizeof(u32); i++) {
2072 - if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2073 + if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2074 ctx->base.needs_inv = true;
2075 break;
2076 }
2077 @@ -2235,6 +2710,7 @@
2078 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2079 ctx->xcm = EIP197_XCM_MODE_CCM;
2080 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2081 + ctx->ctrinit = 0;
2082 return 0;
2083 }
2084
2085 @@ -2301,5 +2777,949 @@
2086 .cra_exit = safexcel_aead_cra_exit,
2087 .cra_module = THIS_MODULE,
2088 },
2089 + },
2090 +};
2091 +
2092 +static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2093 + const u8 *key)
2094 +{
2095 + struct safexcel_crypto_priv *priv = ctx->priv;
2096 +
2097 + if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2098 + if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2099 + ctx->base.needs_inv = true;
2100 +
2101 + memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2102 + ctx->key_len = CHACHA_KEY_SIZE;
2103 +}
2104 +
2105 +static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2106 + const u8 *key, unsigned int len)
2107 +{
2108 + struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2109 +
2110 + if (len != CHACHA_KEY_SIZE)
2111 + return -EINVAL;
2112 +
2113 + safexcel_chacha20_setkey(ctx, key);
2114 +
2115 + return 0;
2116 +}
2117 +
2118 +static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2119 +{
2120 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2121 +
2122 + safexcel_skcipher_cra_init(tfm);
2123 + ctx->alg = SAFEXCEL_CHACHA20;
2124 + ctx->ctrinit = 0;
2125 + ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2126 + return 0;
2127 +}
2128 +
2129 +struct safexcel_alg_template safexcel_alg_chacha20 = {
2130 + .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2131 + .algo_mask = SAFEXCEL_ALG_CHACHA20,
2132 + .alg.skcipher = {
2133 + .setkey = safexcel_skcipher_chacha20_setkey,
2134 + .encrypt = safexcel_encrypt,
2135 + .decrypt = safexcel_decrypt,
2136 + .min_keysize = CHACHA_KEY_SIZE,
2137 + .max_keysize = CHACHA_KEY_SIZE,
2138 + .ivsize = CHACHA_IV_SIZE,
2139 + .base = {
2140 + .cra_name = "chacha20",
2141 + .cra_driver_name = "safexcel-chacha20",
2142 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2143 + .cra_flags = CRYPTO_ALG_ASYNC |
2144 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2145 + .cra_blocksize = 1,
2146 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2147 + .cra_alignmask = 0,
2148 + .cra_init = safexcel_skcipher_chacha20_cra_init,
2149 + .cra_exit = safexcel_skcipher_cra_exit,
2150 + .cra_module = THIS_MODULE,
2151 + },
2152 + },
2153 +};
2154 +
2155 +static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2156 + const u8 *key, unsigned int len)
2157 +{
2158 + struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2159 +
2160 + if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2161 + len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2162 + /* ESP variant has nonce appended to key */
2163 + len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2164 + ctx->nonce = *(u32 *)(key + len);
2165 + }
2166 + if (len != CHACHA_KEY_SIZE)
2167 + return -EINVAL;
2168 +
2169 + safexcel_chacha20_setkey(ctx, key);
2170 +
2171 + return 0;
2172 +}
2173 +
2174 +static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2175 + unsigned int authsize)
2176 +{
2177 + if (authsize != POLY1305_DIGEST_SIZE)
2178 + return -EINVAL;
2179 + return 0;
2180 +}
2181 +
2182 +static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2183 + enum safexcel_cipher_direction dir)
2184 +{
2185 + struct safexcel_cipher_req *creq = aead_request_ctx(req);
2186 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
2187 + struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2188 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2189 + struct aead_request *subreq = aead_request_ctx(req);
2190 + u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2191 + int ret = 0;
2192 +
2193 + /*
2194 + * Instead of wasting time detecting umpteen silly corner cases,
2195 + * just dump all "small" requests to the fallback implementation.
2196 + * HW would not be faster on such small requests anyway.
2197 + */
2198 + if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2199 + req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2200 + req->cryptlen > POLY1305_DIGEST_SIZE)) {
2201 + return safexcel_queue_req(&req->base, creq, dir);
2202 + }
2203 +
2204 + /* HW cannot do full (AAD+payload) zero length, use fallback */
2205 + memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2206 + if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2207 + /* ESP variant has nonce appended to the key */
2208 + key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2209 + ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2210 + CHACHA_KEY_SIZE +
2211 + EIP197_AEAD_IPSEC_NONCE_SIZE);
2212 + } else {
2213 + ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2214 + CHACHA_KEY_SIZE);
2215 + }
2216 + if (ret) {
2217 + crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2218 + crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2219 + CRYPTO_TFM_REQ_MASK);
2220 + return ret;
2221 + }
2222 +
2223 + aead_request_set_tfm(subreq, ctx->fback);
2224 + aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2225 + req->base.data);
2226 + aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2227 + req->iv);
2228 + aead_request_set_ad(subreq, req->assoclen);
2229 +
2230 + return (dir == SAFEXCEL_ENCRYPT) ?
2231 + crypto_aead_encrypt(subreq) :
2232 + crypto_aead_decrypt(subreq);
2233 +}
2234 +
2235 +static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2236 +{
2237 + return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2238 +}
2239 +
2240 +static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2241 +{
2242 + return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2243 +}
2244 +
2245 +static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2246 +{
2247 + struct crypto_aead *aead = __crypto_aead_cast(tfm);
2248 + struct aead_alg *alg = crypto_aead_alg(aead);
2249 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2250 +
2251 + safexcel_aead_cra_init(tfm);
2252 +
2253 + /* Allocate fallback implementation */
2254 + ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2255 + CRYPTO_ALG_ASYNC |
2256 + CRYPTO_ALG_NEED_FALLBACK);
2257 + if (IS_ERR(ctx->fback))
2258 + return PTR_ERR(ctx->fback);
2259 +
2260 + crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2261 + sizeof(struct aead_request) +
2262 + crypto_aead_reqsize(ctx->fback)));
2263 +
2264 + return 0;
2265 +}
2266 +
2267 +static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2268 +{
2269 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2270 +
2271 + safexcel_aead_fallback_cra_init(tfm);
2272 + ctx->alg = SAFEXCEL_CHACHA20;
2273 + ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2274 + CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2275 + ctx->ctrinit = 0;
2276 + ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2277 + ctx->state_sz = 0; /* Precomputed by HW */
2278 + return 0;
2279 +}
2280 +
2281 +static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2282 +{
2283 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2284 +
2285 + crypto_free_aead(ctx->fback);
2286 + safexcel_aead_cra_exit(tfm);
2287 +}
2288 +
2289 +struct safexcel_alg_template safexcel_alg_chachapoly = {
2290 + .type = SAFEXCEL_ALG_TYPE_AEAD,
2291 + .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2292 + .alg.aead = {
2293 + .setkey = safexcel_aead_chachapoly_setkey,
2294 + .setauthsize = safexcel_aead_chachapoly_setauthsize,
2295 + .encrypt = safexcel_aead_chachapoly_encrypt,
2296 + .decrypt = safexcel_aead_chachapoly_decrypt,
2297 + .ivsize = CHACHAPOLY_IV_SIZE,
2298 + .maxauthsize = POLY1305_DIGEST_SIZE,
2299 + .base = {
2300 + .cra_name = "rfc7539(chacha20,poly1305)",
2301 + .cra_driver_name = "safexcel-chacha20-poly1305",
2302 + /* +1 to put it above HW chacha + SW poly */
2303 + .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2304 + .cra_flags = CRYPTO_ALG_ASYNC |
2305 + CRYPTO_ALG_KERN_DRIVER_ONLY |
2306 + CRYPTO_ALG_NEED_FALLBACK,
2307 + .cra_blocksize = 1,
2308 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2309 + .cra_alignmask = 0,
2310 + .cra_init = safexcel_aead_chachapoly_cra_init,
2311 + .cra_exit = safexcel_aead_fallback_cra_exit,
2312 + .cra_module = THIS_MODULE,
2313 + },
2314 + },
2315 +};
2316 +
2317 +static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
2318 +{
2319 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2320 + int ret;
2321 +
2322 + ret = safexcel_aead_chachapoly_cra_init(tfm);
2323 + ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
2324 + ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
2325 + return ret;
2326 +}
2327 +
2328 +struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
2329 + .type = SAFEXCEL_ALG_TYPE_AEAD,
2330 + .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2331 + .alg.aead = {
2332 + .setkey = safexcel_aead_chachapoly_setkey,
2333 + .setauthsize = safexcel_aead_chachapoly_setauthsize,
2334 + .encrypt = safexcel_aead_chachapoly_encrypt,
2335 + .decrypt = safexcel_aead_chachapoly_decrypt,
2336 + .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
2337 + .maxauthsize = POLY1305_DIGEST_SIZE,
2338 + .base = {
2339 + .cra_name = "rfc7539esp(chacha20,poly1305)",
2340 + .cra_driver_name = "safexcel-chacha20-poly1305-esp",
2341 + /* +1 to put it above HW chacha + SW poly */
2342 + .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2343 + .cra_flags = CRYPTO_ALG_ASYNC |
2344 + CRYPTO_ALG_KERN_DRIVER_ONLY |
2345 + CRYPTO_ALG_NEED_FALLBACK,
2346 + .cra_blocksize = 1,
2347 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2348 + .cra_alignmask = 0,
2349 + .cra_init = safexcel_aead_chachapolyesp_cra_init,
2350 + .cra_exit = safexcel_aead_fallback_cra_exit,
2351 + .cra_module = THIS_MODULE,
2352 + },
2353 + },
2354 +};
2355 +
2356 +static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
2357 + const u8 *key, unsigned int len)
2358 +{
2359 + struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2360 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2361 + struct safexcel_crypto_priv *priv = ctx->priv;
2362 +
2363 + if (len != SM4_KEY_SIZE)
2364 + return -EINVAL;
2365 +
2366 + if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2367 + if (memcmp(ctx->key, key, SM4_KEY_SIZE))
2368 + ctx->base.needs_inv = true;
2369 +
2370 + memcpy(ctx->key, key, SM4_KEY_SIZE);
2371 + ctx->key_len = SM4_KEY_SIZE;
2372 +
2373 + return 0;
2374 +}
2375 +
2376 +static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
2377 +{
2378 + /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
2379 + if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
2380 + return -EINVAL;
2381 + else
2382 + return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2383 + SAFEXCEL_ENCRYPT);
2384 +}
2385 +
2386 +static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
2387 +{
2388 + /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
2389 + if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
2390 + return -EINVAL;
2391 + else
2392 + return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2393 + SAFEXCEL_DECRYPT);
2394 +}
2395 +
2396 +static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
2397 +{
2398 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2399 +
2400 + safexcel_skcipher_cra_init(tfm);
2401 + ctx->alg = SAFEXCEL_SM4;
2402 + ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
2403 + ctx->blocksz = 0;
2404 + ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2405 + return 0;
2406 +}
2407 +
2408 +struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
2409 + .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2410 + .algo_mask = SAFEXCEL_ALG_SM4,
2411 + .alg.skcipher = {
2412 + .setkey = safexcel_skcipher_sm4_setkey,
2413 + .encrypt = safexcel_sm4_blk_encrypt,
2414 + .decrypt = safexcel_sm4_blk_decrypt,
2415 + .min_keysize = SM4_KEY_SIZE,
2416 + .max_keysize = SM4_KEY_SIZE,
2417 + .base = {
2418 + .cra_name = "ecb(sm4)",
2419 + .cra_driver_name = "safexcel-ecb-sm4",
2420 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2421 + .cra_flags = CRYPTO_ALG_ASYNC |
2422 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2423 + .cra_blocksize = SM4_BLOCK_SIZE,
2424 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2425 + .cra_alignmask = 0,
2426 + .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
2427 + .cra_exit = safexcel_skcipher_cra_exit,
2428 + .cra_module = THIS_MODULE,
2429 + },
2430 + },
2431 +};
2432 +
2433 +static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
2434 +{
2435 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2436 +
2437 + safexcel_skcipher_cra_init(tfm);
2438 + ctx->alg = SAFEXCEL_SM4;
2439 + ctx->blocksz = SM4_BLOCK_SIZE;
2440 + ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
2441 + return 0;
2442 +}
2443 +
2444 +struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
2445 + .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2446 + .algo_mask = SAFEXCEL_ALG_SM4,
2447 + .alg.skcipher = {
2448 + .setkey = safexcel_skcipher_sm4_setkey,
2449 + .encrypt = safexcel_sm4_blk_encrypt,
2450 + .decrypt = safexcel_sm4_blk_decrypt,
2451 + .min_keysize = SM4_KEY_SIZE,
2452 + .max_keysize = SM4_KEY_SIZE,
2453 + .ivsize = SM4_BLOCK_SIZE,
2454 + .base = {
2455 + .cra_name = "cbc(sm4)",
2456 + .cra_driver_name = "safexcel-cbc-sm4",
2457 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2458 + .cra_flags = CRYPTO_ALG_ASYNC |
2459 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2460 + .cra_blocksize = SM4_BLOCK_SIZE,
2461 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2462 + .cra_alignmask = 0,
2463 + .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
2464 + .cra_exit = safexcel_skcipher_cra_exit,
2465 + .cra_module = THIS_MODULE,
2466 + },
2467 + },
2468 +};
2469 +
2470 +static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
2471 +{
2472 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2473 +
2474 + safexcel_skcipher_cra_init(tfm);
2475 + ctx->alg = SAFEXCEL_SM4;
2476 + ctx->blocksz = SM4_BLOCK_SIZE;
2477 + ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
2478 + return 0;
2479 +}
2480 +
2481 +struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
2482 + .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2483 + .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
2484 + .alg.skcipher = {
2485 + .setkey = safexcel_skcipher_sm4_setkey,
2486 + .encrypt = safexcel_encrypt,
2487 + .decrypt = safexcel_decrypt,
2488 + .min_keysize = SM4_KEY_SIZE,
2489 + .max_keysize = SM4_KEY_SIZE,
2490 + .ivsize = SM4_BLOCK_SIZE,
2491 + .base = {
2492 + .cra_name = "ofb(sm4)",
2493 + .cra_driver_name = "safexcel-ofb-sm4",
2494 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2495 + .cra_flags = CRYPTO_ALG_ASYNC |
2496 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2497 + .cra_blocksize = 1,
2498 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2499 + .cra_alignmask = 0,
2500 + .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
2501 + .cra_exit = safexcel_skcipher_cra_exit,
2502 + .cra_module = THIS_MODULE,
2503 + },
2504 + },
2505 +};
2506 +
2507 +static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
2508 +{
2509 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2510 +
2511 + safexcel_skcipher_cra_init(tfm);
2512 + ctx->alg = SAFEXCEL_SM4;
2513 + ctx->blocksz = SM4_BLOCK_SIZE;
2514 + ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
2515 + return 0;
2516 +}
2517 +
2518 +struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
2519 + .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2520 + .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
2521 + .alg.skcipher = {
2522 + .setkey = safexcel_skcipher_sm4_setkey,
2523 + .encrypt = safexcel_encrypt,
2524 + .decrypt = safexcel_decrypt,
2525 + .min_keysize = SM4_KEY_SIZE,
2526 + .max_keysize = SM4_KEY_SIZE,
2527 + .ivsize = SM4_BLOCK_SIZE,
2528 + .base = {
2529 + .cra_name = "cfb(sm4)",
2530 + .cra_driver_name = "safexcel-cfb-sm4",
2531 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2532 + .cra_flags = CRYPTO_ALG_ASYNC |
2533 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2534 + .cra_blocksize = 1,
2535 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2536 + .cra_alignmask = 0,
2537 + .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
2538 + .cra_exit = safexcel_skcipher_cra_exit,
2539 + .cra_module = THIS_MODULE,
2540 + },
2541 + },
2542 +};
2543 +
2544 +static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
2545 + const u8 *key, unsigned int len)
2546 +{
2547 + struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2548 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2549 +
2550 + /* last 4 bytes of key are the nonce! */
2551 + ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
2552 + /* exclude the nonce here */
2553 + len -= CTR_RFC3686_NONCE_SIZE;
2554 +
2555 + return safexcel_skcipher_sm4_setkey(ctfm, key, len);
2556 +}
2557 +
2558 +static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
2559 +{
2560 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2561 +
2562 + safexcel_skcipher_cra_init(tfm);
2563 + ctx->alg = SAFEXCEL_SM4;
2564 + ctx->blocksz = SM4_BLOCK_SIZE;
2565 + ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
2566 + return 0;
2567 +}
2568 +
2569 +struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
2570 + .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2571 + .algo_mask = SAFEXCEL_ALG_SM4,
2572 + .alg.skcipher = {
2573 + .setkey = safexcel_skcipher_sm4ctr_setkey,
2574 + .encrypt = safexcel_encrypt,
2575 + .decrypt = safexcel_decrypt,
2576 + /* Add nonce size */
2577 + .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
2578 + .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
2579 + .ivsize = CTR_RFC3686_IV_SIZE,
2580 + .base = {
2581 + .cra_name = "rfc3686(ctr(sm4))",
2582 + .cra_driver_name = "safexcel-ctr-sm4",
2583 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2584 + .cra_flags = CRYPTO_ALG_ASYNC |
2585 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2586 + .cra_blocksize = 1,
2587 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2588 + .cra_alignmask = 0,
2589 + .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
2590 + .cra_exit = safexcel_skcipher_cra_exit,
2591 + .cra_module = THIS_MODULE,
2592 + },
2593 + },
2594 +};
2595 +
2596 +static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
2597 +{
2598 + /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
2599 + if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
2600 + return -EINVAL;
2601 +
2602 + return safexcel_queue_req(&req->base, aead_request_ctx(req),
2603 + SAFEXCEL_ENCRYPT);
2604 +}
2605 +
2606 +static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
2607 +{
2608 + struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2609 +
2610 + /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
2611 + if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
2612 + return -EINVAL;
2613 +
2614 + return safexcel_queue_req(&req->base, aead_request_ctx(req),
2615 + SAFEXCEL_DECRYPT);
2616 +}
2617 +
2618 +static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
2619 +{
2620 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2621 +
2622 + safexcel_aead_cra_init(tfm);
2623 + ctx->alg = SAFEXCEL_SM4;
2624 + ctx->blocksz = SM4_BLOCK_SIZE;
2625 + ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
2626 + ctx->state_sz = SHA1_DIGEST_SIZE;
2627 + return 0;
2628 +}
2629 +
2630 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
2631 + .type = SAFEXCEL_ALG_TYPE_AEAD,
2632 + .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
2633 + .alg.aead = {
2634 + .setkey = safexcel_aead_setkey,
2635 + .encrypt = safexcel_aead_sm4_blk_encrypt,
2636 + .decrypt = safexcel_aead_sm4_blk_decrypt,
2637 + .ivsize = SM4_BLOCK_SIZE,
2638 + .maxauthsize = SHA1_DIGEST_SIZE,
2639 + .base = {
2640 + .cra_name = "authenc(hmac(sha1),cbc(sm4))",
2641 + .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
2642 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2643 + .cra_flags = CRYPTO_ALG_ASYNC |
2644 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2645 + .cra_blocksize = SM4_BLOCK_SIZE,
2646 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2647 + .cra_alignmask = 0,
2648 + .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
2649 + .cra_exit = safexcel_aead_cra_exit,
2650 + .cra_module = THIS_MODULE,
2651 + },
2652 + },
2653 +};
2654 +
2655 +static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
2656 + const u8 *key, unsigned int len)
2657 +{
2658 + struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2659 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2660 +
2661 + /* Keep fallback cipher synchronized */
2662 + return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
2663 + safexcel_aead_setkey(ctfm, key, len);
2664 +}
2665 +
2666 +static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
2667 + unsigned int authsize)
2668 +{
2669 + struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2670 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2671 +
2672 + /* Keep fallback cipher synchronized */
2673 + return crypto_aead_setauthsize(ctx->fback, authsize);
2674 +}
2675 +
2676 +static int safexcel_aead_fallback_crypt(struct aead_request *req,
2677 + enum safexcel_cipher_direction dir)
2678 +{
2679 + struct crypto_aead *aead = crypto_aead_reqtfm(req);
2680 + struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2681 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2682 + struct aead_request *subreq = aead_request_ctx(req);
2683 +
2684 + aead_request_set_tfm(subreq, ctx->fback);
2685 + aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2686 + req->base.data);
2687 + aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2688 + req->iv);
2689 + aead_request_set_ad(subreq, req->assoclen);
2690 +
2691 + return (dir == SAFEXCEL_ENCRYPT) ?
2692 + crypto_aead_encrypt(subreq) :
2693 + crypto_aead_decrypt(subreq);
2694 +}
2695 +
2696 +static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
2697 +{
2698 + struct safexcel_cipher_req *creq = aead_request_ctx(req);
2699 +
2700 + /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
2701 + if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
2702 + return -EINVAL;
2703 + else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
2704 + return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2705 +
2706 + /* HW cannot do full (AAD+payload) zero length, use fallback */
2707 + return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
2708 +}
2709 +
2710 +static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
2711 +{
2712 + struct safexcel_cipher_req *creq = aead_request_ctx(req);
2713 + struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2714 +
2715 + /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
2716 + if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
2717 + return -EINVAL;
2718 + else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
2719 + /* If input length > 0 only */
2720 + return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2721 +
2722 + /* HW cannot do full (AAD+payload) zero length, use fallback */
2723 + return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
2724 +}
2725 +
2726 +static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
2727 +{
2728 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2729 +
2730 + safexcel_aead_fallback_cra_init(tfm);
2731 + ctx->alg = SAFEXCEL_SM4;
2732 + ctx->blocksz = SM4_BLOCK_SIZE;
2733 + ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2734 + ctx->state_sz = SM3_DIGEST_SIZE;
2735 + return 0;
2736 +}
2737 +
2738 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
2739 + .type = SAFEXCEL_ALG_TYPE_AEAD,
2740 + .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
2741 + .alg.aead = {
2742 + .setkey = safexcel_aead_fallback_setkey,
2743 + .setauthsize = safexcel_aead_fallback_setauthsize,
2744 + .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
2745 + .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
2746 + .ivsize = SM4_BLOCK_SIZE,
2747 + .maxauthsize = SM3_DIGEST_SIZE,
2748 + .base = {
2749 + .cra_name = "authenc(hmac(sm3),cbc(sm4))",
2750 + .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
2751 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2752 + .cra_flags = CRYPTO_ALG_ASYNC |
2753 + CRYPTO_ALG_KERN_DRIVER_ONLY |
2754 + CRYPTO_ALG_NEED_FALLBACK,
2755 + .cra_blocksize = SM4_BLOCK_SIZE,
2756 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2757 + .cra_alignmask = 0,
2758 + .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
2759 + .cra_exit = safexcel_aead_fallback_cra_exit,
2760 + .cra_module = THIS_MODULE,
2761 + },
2762 + },
2763 +};
2764 +
2765 +static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
2766 +{
2767 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2768 +
2769 + safexcel_aead_sm4cbc_sha1_cra_init(tfm);
2770 + ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
2771 + return 0;
2772 +}
2773 +
2774 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
2775 + .type = SAFEXCEL_ALG_TYPE_AEAD,
2776 + .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
2777 + .alg.aead = {
2778 + .setkey = safexcel_aead_setkey,
2779 + .encrypt = safexcel_aead_encrypt,
2780 + .decrypt = safexcel_aead_decrypt,
2781 + .ivsize = CTR_RFC3686_IV_SIZE,
2782 + .maxauthsize = SHA1_DIGEST_SIZE,
2783 + .base = {
2784 + .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
2785 + .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
2786 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2787 + .cra_flags = CRYPTO_ALG_ASYNC |
2788 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2789 + .cra_blocksize = 1,
2790 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2791 + .cra_alignmask = 0,
2792 + .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
2793 + .cra_exit = safexcel_aead_cra_exit,
2794 + .cra_module = THIS_MODULE,
2795 + },
2796 + },
2797 +};
2798 +
2799 +static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
2800 +{
2801 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2802 +
2803 + safexcel_aead_sm4cbc_sm3_cra_init(tfm);
2804 + ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
2805 + return 0;
2806 +}
2807 +
2808 +struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
2809 + .type = SAFEXCEL_ALG_TYPE_AEAD,
2810 + .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
2811 + .alg.aead = {
2812 + .setkey = safexcel_aead_setkey,
2813 + .encrypt = safexcel_aead_encrypt,
2814 + .decrypt = safexcel_aead_decrypt,
2815 + .ivsize = CTR_RFC3686_IV_SIZE,
2816 + .maxauthsize = SM3_DIGEST_SIZE,
2817 + .base = {
2818 + .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
2819 + .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
2820 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2821 + .cra_flags = CRYPTO_ALG_ASYNC |
2822 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2823 + .cra_blocksize = 1,
2824 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2825 + .cra_alignmask = 0,
2826 + .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
2827 + .cra_exit = safexcel_aead_cra_exit,
2828 + .cra_module = THIS_MODULE,
2829 + },
2830 + },
2831 +};
2832 +
2833 +static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2834 + unsigned int len)
2835 +{
2836 + struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2837 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2838 +
2839 + /* last 4 bytes of key are the nonce! */
2840 + ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
2841 +
2842 + len -= CTR_RFC3686_NONCE_SIZE;
2843 + return safexcel_aead_gcm_setkey(ctfm, key, len);
2844 +}
2845 +
2846 +static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
2847 + unsigned int authsize)
2848 +{
2849 + return crypto_rfc4106_check_authsize(authsize);
2850 +}
2851 +
2852 +static int safexcel_rfc4106_encrypt(struct aead_request *req)
2853 +{
2854 + return crypto_ipsec_check_assoclen(req->assoclen) ?:
2855 + safexcel_aead_encrypt(req);
2856 +}
2857 +
2858 +static int safexcel_rfc4106_decrypt(struct aead_request *req)
2859 +{
2860 + return crypto_ipsec_check_assoclen(req->assoclen) ?:
2861 + safexcel_aead_decrypt(req);
2862 +}
2863 +
2864 +static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
2865 +{
2866 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2867 + int ret;
2868 +
2869 + ret = safexcel_aead_gcm_cra_init(tfm);
2870 + ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
2871 + ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
2872 + return ret;
2873 +}
2874 +
2875 +struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
2876 + .type = SAFEXCEL_ALG_TYPE_AEAD,
2877 + .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2878 + .alg.aead = {
2879 + .setkey = safexcel_rfc4106_gcm_setkey,
2880 + .setauthsize = safexcel_rfc4106_gcm_setauthsize,
2881 + .encrypt = safexcel_rfc4106_encrypt,
2882 + .decrypt = safexcel_rfc4106_decrypt,
2883 + .ivsize = GCM_RFC4106_IV_SIZE,
2884 + .maxauthsize = GHASH_DIGEST_SIZE,
2885 + .base = {
2886 + .cra_name = "rfc4106(gcm(aes))",
2887 + .cra_driver_name = "safexcel-rfc4106-gcm-aes",
2888 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2889 + .cra_flags = CRYPTO_ALG_ASYNC |
2890 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2891 + .cra_blocksize = 1,
2892 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2893 + .cra_alignmask = 0,
2894 + .cra_init = safexcel_rfc4106_gcm_cra_init,
2895 + .cra_exit = safexcel_aead_gcm_cra_exit,
2896 + },
2897 + },
2898 +};
2899 +
2900 +static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
2901 + unsigned int authsize)
2902 +{
2903 + if (authsize != GHASH_DIGEST_SIZE)
2904 + return -EINVAL;
2905 +
2906 + return 0;
2907 +}
2908 +
2909 +static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
2910 +{
2911 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2912 + int ret;
2913 +
2914 + ret = safexcel_aead_gcm_cra_init(tfm);
2915 + ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
2916 + return ret;
2917 +}
2918 +
2919 +struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
2920 + .type = SAFEXCEL_ALG_TYPE_AEAD,
2921 + .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2922 + .alg.aead = {
2923 + .setkey = safexcel_rfc4106_gcm_setkey,
2924 + .setauthsize = safexcel_rfc4543_gcm_setauthsize,
2925 + .encrypt = safexcel_rfc4106_encrypt,
2926 + .decrypt = safexcel_rfc4106_decrypt,
2927 + .ivsize = GCM_RFC4543_IV_SIZE,
2928 + .maxauthsize = GHASH_DIGEST_SIZE,
2929 + .base = {
2930 + .cra_name = "rfc4543(gcm(aes))",
2931 + .cra_driver_name = "safexcel-rfc4543-gcm-aes",
2932 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
2933 + .cra_flags = CRYPTO_ALG_ASYNC |
2934 + CRYPTO_ALG_KERN_DRIVER_ONLY,
2935 + .cra_blocksize = 1,
2936 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2937 + .cra_alignmask = 0,
2938 + .cra_init = safexcel_rfc4543_gcm_cra_init,
2939 + .cra_exit = safexcel_aead_gcm_cra_exit,
2940 + },
2941 + },
2942 +};
2943 +
2944 +static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2945 + unsigned int len)
2946 +{
2947 + struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2948 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2949 +
2950 + /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
2951 + *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
2952 + /* last 3 bytes of key are the nonce! */
2953 + memcpy((u8 *)&ctx->nonce + 1, key + len -
2954 + EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
2955 + EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
2956 +
2957 + len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
2958 + return safexcel_aead_ccm_setkey(ctfm, key, len);
2959 +}
2960 +
2961 +static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
2962 + unsigned int authsize)
2963 +{
2964 + /* Borrowed from crypto/ccm.c */
2965 + switch (authsize) {
2966 + case 8:
2967 + case 12:
2968 + case 16:
2969 + break;
2970 + default:
2971 + return -EINVAL;
2972 + }
2973 +
2974 + return 0;
2975 +}
2976 +
2977 +static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
2978 +{
2979 + struct safexcel_cipher_req *creq = aead_request_ctx(req);
2980 +
2981 + /* Borrowed from crypto/ccm.c */
2982 + if (req->assoclen != 16 && req->assoclen != 20)
2983 + return -EINVAL;
2984 +
2985 + return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2986 +}
2987 +
2988 +static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
2989 +{
2990 + struct safexcel_cipher_req *creq = aead_request_ctx(req);
2991 +
2992 + /* Borrowed from crypto/ccm.c */
2993 + if (req->assoclen != 16 && req->assoclen != 20)
2994 + return -EINVAL;
2995 +
2996 + return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2997 +}
2998 +
2999 +static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3000 +{
3001 + struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3002 + int ret;
3003 +
3004 + ret = safexcel_aead_ccm_cra_init(tfm);
3005 + ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3006 + ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3007 + return ret;
3008 +}
3009 +
3010 +struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3011 + .type = SAFEXCEL_ALG_TYPE_AEAD,
3012 + .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3013 + .alg.aead = {
3014 + .setkey = safexcel_rfc4309_ccm_setkey,
3015 + .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3016 + .encrypt = safexcel_rfc4309_ccm_encrypt,
3017 + .decrypt = safexcel_rfc4309_ccm_decrypt,
3018 + .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3019 + .maxauthsize = AES_BLOCK_SIZE,
3020 + .base = {
3021 + .cra_name = "rfc4309(ccm(aes))",
3022 + .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3023 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
3024 + .cra_flags = CRYPTO_ALG_ASYNC |
3025 + CRYPTO_ALG_KERN_DRIVER_ONLY,
3026 + .cra_blocksize = 1,
3027 + .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3028 + .cra_alignmask = 0,
3029 + .cra_init = safexcel_rfc4309_ccm_cra_init,
3030 + .cra_exit = safexcel_aead_cra_exit,
3031 + .cra_module = THIS_MODULE,
3032 + },
3033 },
3034 };
3035 --- a/drivers/crypto/inside-secure/safexcel.h
3036 +++ b/drivers/crypto/inside-secure/safexcel.h
3037 @@ -17,8 +17,11 @@
3038 #define EIP197_HIA_VERSION_BE 0xca35
3039 #define EIP197_HIA_VERSION_LE 0x35ca
3040 #define EIP97_VERSION_LE 0x9e61
3041 +#define EIP196_VERSION_LE 0x3bc4
3042 #define EIP197_VERSION_LE 0x3ac5
3043 #define EIP96_VERSION_LE 0x9f60
3044 +#define EIP201_VERSION_LE 0x36c9
3045 +#define EIP206_VERSION_LE 0x31ce
3046 #define EIP197_REG_LO16(reg) (reg & 0xffff)
3047 #define EIP197_REG_HI16(reg) ((reg >> 16) & 0xffff)
3048 #define EIP197_VERSION_MASK(reg) ((reg >> 16) & 0xfff)
3049 @@ -26,12 +29,23 @@
3050 ((reg >> 4) & 0xf0) | \
3051 ((reg >> 12) & 0xf))
3052
3053 +/* EIP197 HIA OPTIONS ENCODING */
3054 +#define EIP197_HIA_OPT_HAS_PE_ARB BIT(29)
3055 +
3056 +/* EIP206 OPTIONS ENCODING */
3057 +#define EIP206_OPT_ICE_TYPE(n) ((n>>8)&3)
3058 +
3059 +/* EIP197 OPTIONS ENCODING */
3060 +#define EIP197_OPT_HAS_TRC BIT(31)
3061 +
3062 /* Static configuration */
3063 #define EIP197_DEFAULT_RING_SIZE 400
3064 -#define EIP197_MAX_TOKENS 18
3065 +#define EIP197_EMB_TOKENS 4 /* Pad CD to 16 dwords */
3066 +#define EIP197_MAX_TOKENS 16
3067 #define EIP197_MAX_RINGS 4
3068 #define EIP197_FETCH_DEPTH 2
3069 #define EIP197_MAX_BATCH_SZ 64
3070 +#define EIP197_MAX_RING_AIC 14
3071
3072 #define EIP197_GFP_FLAGS(base) ((base).flags & CRYPTO_TFM_REQ_MAY_SLEEP ? \
3073 GFP_KERNEL : GFP_ATOMIC)
3074 @@ -138,6 +152,7 @@
3075 #define EIP197_HIA_AIC_R_ENABLED_STAT(r) (0xe010 - EIP197_HIA_AIC_R_OFF(r))
3076 #define EIP197_HIA_AIC_R_ACK(r) (0xe010 - EIP197_HIA_AIC_R_OFF(r))
3077 #define EIP197_HIA_AIC_R_ENABLE_CLR(r) (0xe014 - EIP197_HIA_AIC_R_OFF(r))
3078 +#define EIP197_HIA_AIC_R_VERSION(r) (0xe01c - EIP197_HIA_AIC_R_OFF(r))
3079 #define EIP197_HIA_AIC_G_ENABLE_CTRL 0xf808
3080 #define EIP197_HIA_AIC_G_ENABLED_STAT 0xf810
3081 #define EIP197_HIA_AIC_G_ACK 0xf810
3082 @@ -157,12 +172,16 @@
3083 #define EIP197_PE_EIP96_FUNCTION_EN(n) (0x1004 + (0x2000 * (n)))
3084 #define EIP197_PE_EIP96_CONTEXT_CTRL(n) (0x1008 + (0x2000 * (n)))
3085 #define EIP197_PE_EIP96_CONTEXT_STAT(n) (0x100c + (0x2000 * (n)))
3086 +#define EIP197_PE_EIP96_TOKEN_CTRL2(n) (0x102c + (0x2000 * (n)))
3087 #define EIP197_PE_EIP96_FUNCTION2_EN(n) (0x1030 + (0x2000 * (n)))
3088 #define EIP197_PE_EIP96_OPTIONS(n) (0x13f8 + (0x2000 * (n)))
3089 #define EIP197_PE_EIP96_VERSION(n) (0x13fc + (0x2000 * (n)))
3090 #define EIP197_PE_OUT_DBUF_THRES(n) (0x1c00 + (0x2000 * (n)))
3091 #define EIP197_PE_OUT_TBUF_THRES(n) (0x1d00 + (0x2000 * (n)))
3092 +#define EIP197_PE_OPTIONS(n) (0x1ff8 + (0x2000 * (n)))
3093 +#define EIP197_PE_VERSION(n) (0x1ffc + (0x2000 * (n)))
3094 #define EIP197_MST_CTRL 0xfff4
3095 +#define EIP197_OPTIONS 0xfff8
3096 #define EIP197_VERSION 0xfffc
3097
3098 /* EIP197-specific registers, no indirection */
3099 @@ -178,6 +197,7 @@
3100 #define EIP197_TRC_ECCADMINSTAT 0xf0838
3101 #define EIP197_TRC_ECCDATASTAT 0xf083c
3102 #define EIP197_TRC_ECCDATA 0xf0840
3103 +#define EIP197_STRC_CONFIG 0xf43f0
3104 #define EIP197_FLUE_CACHEBASE_LO(n) (0xf6000 + (32 * (n)))
3105 #define EIP197_FLUE_CACHEBASE_HI(n) (0xf6004 + (32 * (n)))
3106 #define EIP197_FLUE_CONFIG(n) (0xf6010 + (32 * (n)))
3107 @@ -188,6 +208,7 @@
3108
3109 /* EIP197_HIA_xDR_DESC_SIZE */
3110 #define EIP197_xDR_DESC_MODE_64BIT BIT(31)
3111 +#define EIP197_CDR_DESC_MODE_ADCP BIT(30)
3112
3113 /* EIP197_HIA_xDR_DMA_CFG */
3114 #define EIP197_HIA_xDR_WR_RES_BUF BIT(22)
3115 @@ -213,7 +234,6 @@
3116 /* EIP197_HIA_xDR_PROC_COUNT */
3117 #define EIP197_xDR_PROC_xD_PKT_OFFSET 24
3118 #define EIP197_xDR_PROC_xD_PKT_MASK GENMASK(6, 0)
3119 -#define EIP197_xDR_PROC_xD_COUNT(n) ((n) << 2)
3120 #define EIP197_xDR_PROC_xD_PKT(n) ((n) << 24)
3121 #define EIP197_xDR_PROC_CLR_COUNT BIT(31)
3122
3123 @@ -228,6 +248,8 @@
3124 #define EIP197_HIA_RA_PE_CTRL_EN BIT(30)
3125
3126 /* EIP197_HIA_OPTIONS */
3127 +#define EIP197_N_RINGS_OFFSET 0
3128 +#define EIP197_N_RINGS_MASK GENMASK(3, 0)
3129 #define EIP197_N_PES_OFFSET 4
3130 #define EIP197_N_PES_MASK GENMASK(4, 0)
3131 #define EIP97_N_PES_MASK GENMASK(2, 0)
3132 @@ -237,13 +259,13 @@
3133 #define EIP197_CFSIZE_OFFSET 9
3134 #define EIP197_CFSIZE_ADJUST 4
3135 #define EIP97_CFSIZE_OFFSET 8
3136 -#define EIP197_CFSIZE_MASK GENMASK(3, 0)
3137 -#define EIP97_CFSIZE_MASK GENMASK(4, 0)
3138 +#define EIP197_CFSIZE_MASK GENMASK(2, 0)
3139 +#define EIP97_CFSIZE_MASK GENMASK(3, 0)
3140 #define EIP197_RFSIZE_OFFSET 12
3141 #define EIP197_RFSIZE_ADJUST 4
3142 #define EIP97_RFSIZE_OFFSET 12
3143 -#define EIP197_RFSIZE_MASK GENMASK(3, 0)
3144 -#define EIP97_RFSIZE_MASK GENMASK(4, 0)
3145 +#define EIP197_RFSIZE_MASK GENMASK(2, 0)
3146 +#define EIP97_RFSIZE_MASK GENMASK(3, 0)
3147
3148 /* EIP197_HIA_AIC_R_ENABLE_CTRL */
3149 #define EIP197_CDR_IRQ(n) BIT((n) * 2)
3150 @@ -257,9 +279,9 @@
3151 #define EIP197_HIA_DxE_CFG_MIN_CTRL_SIZE(n) ((n) << 16)
3152 #define EIP197_HIA_DxE_CFG_CTRL_CACHE_CTRL(n) (((n) & 0x7) << 20)
3153 #define EIP197_HIA_DxE_CFG_MAX_CTRL_SIZE(n) ((n) << 24)
3154 -#define EIP197_HIA_DFE_CFG_DIS_DEBUG (BIT(31) | BIT(29))
3155 +#define EIP197_HIA_DFE_CFG_DIS_DEBUG GENMASK(31, 29)
3156 #define EIP197_HIA_DSE_CFG_EN_SINGLE_WR BIT(29)
3157 -#define EIP197_HIA_DSE_CFG_DIS_DEBUG BIT(31)
3158 +#define EIP197_HIA_DSE_CFG_DIS_DEBUG GENMASK(31, 30)
3159
3160 /* EIP197_HIA_DFE/DSE_THR_CTRL */
3161 #define EIP197_DxE_THR_CTRL_EN BIT(30)
3162 @@ -327,13 +349,21 @@
3163 #define EIP197_ADDRESS_MODE BIT(8)
3164 #define EIP197_CONTROL_MODE BIT(9)
3165
3166 +/* EIP197_PE_EIP96_TOKEN_CTRL2 */
3167 +#define EIP197_PE_EIP96_TOKEN_CTRL2_CTX_DONE BIT(3)
3168 +
3169 +/* EIP197_STRC_CONFIG */
3170 +#define EIP197_STRC_CONFIG_INIT BIT(31)
3171 +#define EIP197_STRC_CONFIG_LARGE_REC(s) (s<<8)
3172 +#define EIP197_STRC_CONFIG_SMALL_REC(s) (s<<0)
3173 +
3174 /* EIP197_FLUE_CONFIG */
3175 #define EIP197_FLUE_CONFIG_MAGIC 0xc7000004
3176
3177 /* Context Control */
3178 struct safexcel_context_record {
3179 - u32 control0;
3180 - u32 control1;
3181 + __le32 control0;
3182 + __le32 control1;
3183
3184 __le32 data[40];
3185 } __packed;
3186 @@ -358,10 +388,14 @@
3187 #define CONTEXT_CONTROL_CRYPTO_ALG_AES128 (0x5 << 17)
3188 #define CONTEXT_CONTROL_CRYPTO_ALG_AES192 (0x6 << 17)
3189 #define CONTEXT_CONTROL_CRYPTO_ALG_AES256 (0x7 << 17)
3190 +#define CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 (0x8 << 17)
3191 +#define CONTEXT_CONTROL_CRYPTO_ALG_SM4 (0xd << 17)
3192 +#define CONTEXT_CONTROL_DIGEST_INITIAL (0x0 << 21)
3193 #define CONTEXT_CONTROL_DIGEST_PRECOMPUTED (0x1 << 21)
3194 #define CONTEXT_CONTROL_DIGEST_XCM (0x2 << 21)
3195 #define CONTEXT_CONTROL_DIGEST_HMAC (0x3 << 21)
3196 #define CONTEXT_CONTROL_CRYPTO_ALG_MD5 (0x0 << 23)
3197 +#define CONTEXT_CONTROL_CRYPTO_ALG_CRC32 (0x0 << 23)
3198 #define CONTEXT_CONTROL_CRYPTO_ALG_SHA1 (0x2 << 23)
3199 #define CONTEXT_CONTROL_CRYPTO_ALG_SHA224 (0x4 << 23)
3200 #define CONTEXT_CONTROL_CRYPTO_ALG_SHA256 (0x3 << 23)
3201 @@ -371,17 +405,25 @@
3202 #define CONTEXT_CONTROL_CRYPTO_ALG_XCBC128 (0x1 << 23)
3203 #define CONTEXT_CONTROL_CRYPTO_ALG_XCBC192 (0x2 << 23)
3204 #define CONTEXT_CONTROL_CRYPTO_ALG_XCBC256 (0x3 << 23)
3205 +#define CONTEXT_CONTROL_CRYPTO_ALG_SM3 (0x7 << 23)
3206 +#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256 (0xb << 23)
3207 +#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224 (0xc << 23)
3208 +#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512 (0xd << 23)
3209 +#define CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384 (0xe << 23)
3210 +#define CONTEXT_CONTROL_CRYPTO_ALG_POLY1305 (0xf << 23)
3211 #define CONTEXT_CONTROL_INV_FR (0x5 << 24)
3212 #define CONTEXT_CONTROL_INV_TR (0x6 << 24)
3213
3214 /* control1 */
3215 #define CONTEXT_CONTROL_CRYPTO_MODE_ECB (0 << 0)
3216 #define CONTEXT_CONTROL_CRYPTO_MODE_CBC (1 << 0)
3217 +#define CONTEXT_CONTROL_CHACHA20_MODE_256_32 (2 << 0)
3218 #define CONTEXT_CONTROL_CRYPTO_MODE_OFB (4 << 0)
3219 #define CONTEXT_CONTROL_CRYPTO_MODE_CFB (5 << 0)
3220 #define CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD (6 << 0)
3221 #define CONTEXT_CONTROL_CRYPTO_MODE_XTS (7 << 0)
3222 #define CONTEXT_CONTROL_CRYPTO_MODE_XCM ((6 << 0) | BIT(17))
3223 +#define CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK (12 << 0)
3224 #define CONTEXT_CONTROL_IV0 BIT(5)
3225 #define CONTEXT_CONTROL_IV1 BIT(6)
3226 #define CONTEXT_CONTROL_IV2 BIT(7)
3227 @@ -394,6 +436,13 @@
3228 #define EIP197_XCM_MODE_GCM 1
3229 #define EIP197_XCM_MODE_CCM 2
3230
3231 +#define EIP197_AEAD_TYPE_IPSEC_ESP 2
3232 +#define EIP197_AEAD_TYPE_IPSEC_ESP_GMAC 3
3233 +#define EIP197_AEAD_IPSEC_IV_SIZE 8
3234 +#define EIP197_AEAD_IPSEC_NONCE_SIZE 4
3235 +#define EIP197_AEAD_IPSEC_COUNTER_SIZE 4
3236 +#define EIP197_AEAD_IPSEC_CCM_NONCE_SIZE 3
3237 +
3238 /* The hash counter given to the engine in the context has a granularity of
3239 * 64 bits.
3240 */
3241 @@ -423,6 +472,8 @@
3242 #define EIP197_TRC_PARAMS2_RC_SZ_SMALL(n) ((n) << 18)
3243
3244 /* Cache helpers */
3245 +#define EIP197_MIN_DSIZE 1024
3246 +#define EIP197_MIN_ASIZE 8
3247 #define EIP197_CS_TRC_REC_WC 64
3248 #define EIP197_CS_RC_SIZE (4 * sizeof(u32))
3249 #define EIP197_CS_RC_NEXT(x) (x)
3250 @@ -447,7 +498,7 @@
3251 u16 application_id;
3252 u16 rsvd1;
3253
3254 - u32 rsvd2;
3255 + u32 rsvd2[5];
3256 } __packed;
3257
3258
3259 @@ -465,16 +516,15 @@
3260
3261 u32 data_lo;
3262 u32 data_hi;
3263 -
3264 - struct result_data_desc result_data;
3265 } __packed;
3266
3267 /*
3268 * The EIP(1)97 only needs to fetch the descriptor part of
3269 * the result descriptor, not the result token part!
3270 */
3271 -#define EIP197_RD64_FETCH_SIZE ((sizeof(struct safexcel_result_desc) -\
3272 - sizeof(struct result_data_desc)) /\
3273 +#define EIP197_RD64_FETCH_SIZE (sizeof(struct safexcel_result_desc) /\
3274 + sizeof(u32))
3275 +#define EIP197_RD64_RESULT_SIZE (sizeof(struct result_data_desc) /\
3276 sizeof(u32))
3277
3278 struct safexcel_token {
3279 @@ -505,6 +555,8 @@
3280 {
3281 token->opcode = EIP197_TOKEN_OPCODE_NOOP;
3282 token->packet_length = BIT(2);
3283 + token->stat = 0;
3284 + token->instructions = 0;
3285 }
3286
3287 /* Instructions */
3288 @@ -526,14 +578,13 @@
3289 u16 application_id;
3290 u16 rsvd;
3291
3292 - u8 refresh:2;
3293 - u32 context_lo:30;
3294 + u32 context_lo;
3295 u32 context_hi;
3296
3297 u32 control0;
3298 u32 control1;
3299
3300 - u32 token[EIP197_MAX_TOKENS];
3301 + u32 token[EIP197_EMB_TOKENS];
3302 } __packed;
3303
3304 #define EIP197_OPTION_MAGIC_VALUE BIT(0)
3305 @@ -543,7 +594,10 @@
3306 #define EIP197_OPTION_2_TOKEN_IV_CMD GENMASK(11, 10)
3307 #define EIP197_OPTION_4_TOKEN_IV_CMD GENMASK(11, 9)
3308
3309 +#define EIP197_TYPE_BCLA 0x0
3310 #define EIP197_TYPE_EXTENDED 0x3
3311 +#define EIP197_CONTEXT_SMALL 0x2
3312 +#define EIP197_CONTEXT_SIZE_MASK 0x3
3313
3314 /* Basic Command Descriptor format */
3315 struct safexcel_command_desc {
3316 @@ -551,16 +605,22 @@
3317 u8 rsvd0:5;
3318 u8 last_seg:1;
3319 u8 first_seg:1;
3320 - u16 additional_cdata_size:8;
3321 + u8 additional_cdata_size:8;
3322
3323 u32 rsvd1;
3324
3325 u32 data_lo;
3326 u32 data_hi;
3327
3328 + u32 atok_lo;
3329 + u32 atok_hi;
3330 +
3331 struct safexcel_control_data_desc control_data;
3332 } __packed;
3333
3334 +#define EIP197_CD64_FETCH_SIZE (sizeof(struct safexcel_command_desc) /\
3335 + sizeof(u32))
3336 +
3337 /*
3338 * Internal structures & functions
3339 */
3340 @@ -578,15 +638,20 @@
3341
3342 struct safexcel_desc_ring {
3343 void *base;
3344 + void *shbase;
3345 void *base_end;
3346 + void *shbase_end;
3347 dma_addr_t base_dma;
3348 + dma_addr_t shbase_dma;
3349
3350 /* write and read pointers */
3351 void *write;
3352 + void *shwrite;
3353 void *read;
3354
3355 /* descriptor element offset */
3356 - unsigned offset;
3357 + unsigned int offset;
3358 + unsigned int shoffset;
3359 };
3360
3361 enum safexcel_alg_type {
3362 @@ -601,9 +666,11 @@
3363
3364 u32 cd_size;
3365 u32 cd_offset;
3366 + u32 cdsh_offset;
3367
3368 u32 rd_size;
3369 u32 rd_offset;
3370 + u32 res_offset;
3371 };
3372
3373 struct safexcel_work_data {
3374 @@ -654,6 +721,12 @@
3375 /* Priority we use for advertising our algorithms */
3376 #define SAFEXCEL_CRA_PRIORITY 300
3377
3378 +/* SM3 digest result for zero length message */
3379 +#define EIP197_SM3_ZEROM_HASH "\x1A\xB2\x1D\x83\x55\xCF\xA1\x7F" \
3380 + "\x8E\x61\x19\x48\x31\xE8\x1A\x8F" \
3381 + "\x22\xBE\xC8\xC7\x28\xFE\xFB\x74" \
3382 + "\x7E\xD0\x35\xEB\x50\x82\xAA\x2B"
3383 +
3384 /* EIP algorithm presence flags */
3385 enum safexcel_eip_algorithms {
3386 SAFEXCEL_ALG_BC0 = BIT(5),
3387 @@ -697,16 +770,23 @@
3388 enum safexcel_flags {
3389 EIP197_TRC_CACHE = BIT(0),
3390 SAFEXCEL_HW_EIP197 = BIT(1),
3391 + EIP197_PE_ARB = BIT(2),
3392 + EIP197_ICE = BIT(3),
3393 + EIP197_SIMPLE_TRC = BIT(4),
3394 };
3395
3396 struct safexcel_hwconfig {
3397 enum safexcel_eip_algorithms algo_flags;
3398 int hwver;
3399 int hiaver;
3400 + int ppver;
3401 int pever;
3402 int hwdataw;
3403 int hwcfsize;
3404 int hwrfsize;
3405 + int hwnumpes;
3406 + int hwnumrings;
3407 + int hwnumraic;
3408 };
3409
3410 struct safexcel_crypto_priv {
3411 @@ -778,7 +858,7 @@
3412
3413 void safexcel_dequeue(struct safexcel_crypto_priv *priv, int ring);
3414 int safexcel_rdesc_check_errors(struct safexcel_crypto_priv *priv,
3415 - struct safexcel_result_desc *rdesc);
3416 + void *rdp);
3417 void safexcel_complete(struct safexcel_crypto_priv *priv, int ring);
3418 int safexcel_invalidate_cache(struct crypto_async_request *async,
3419 struct safexcel_crypto_priv *priv,
3420 @@ -797,7 +877,8 @@
3421 bool first, bool last,
3422 dma_addr_t data, u32 len,
3423 u32 full_data_len,
3424 - dma_addr_t context);
3425 + dma_addr_t context,
3426 + struct safexcel_token **atoken);
3427 struct safexcel_result_desc *safexcel_add_rdesc(struct safexcel_crypto_priv *priv,
3428 int ring_id,
3429 bool first, bool last,
3430 @@ -853,5 +934,43 @@
3431 extern struct safexcel_alg_template safexcel_alg_xts_aes;
3432 extern struct safexcel_alg_template safexcel_alg_gcm;
3433 extern struct safexcel_alg_template safexcel_alg_ccm;
3434 +extern struct safexcel_alg_template safexcel_alg_crc32;
3435 +extern struct safexcel_alg_template safexcel_alg_cbcmac;
3436 +extern struct safexcel_alg_template safexcel_alg_xcbcmac;
3437 +extern struct safexcel_alg_template safexcel_alg_cmac;
3438 +extern struct safexcel_alg_template safexcel_alg_chacha20;
3439 +extern struct safexcel_alg_template safexcel_alg_chachapoly;
3440 +extern struct safexcel_alg_template safexcel_alg_chachapoly_esp;
3441 +extern struct safexcel_alg_template safexcel_alg_sm3;
3442 +extern struct safexcel_alg_template safexcel_alg_hmac_sm3;
3443 +extern struct safexcel_alg_template safexcel_alg_ecb_sm4;
3444 +extern struct safexcel_alg_template safexcel_alg_cbc_sm4;
3445 +extern struct safexcel_alg_template safexcel_alg_ofb_sm4;
3446 +extern struct safexcel_alg_template safexcel_alg_cfb_sm4;
3447 +extern struct safexcel_alg_template safexcel_alg_ctr_sm4;
3448 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4;
3449 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4;
3450 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4;
3451 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4;
3452 +extern struct safexcel_alg_template safexcel_alg_sha3_224;
3453 +extern struct safexcel_alg_template safexcel_alg_sha3_256;
3454 +extern struct safexcel_alg_template safexcel_alg_sha3_384;
3455 +extern struct safexcel_alg_template safexcel_alg_sha3_512;
3456 +extern struct safexcel_alg_template safexcel_alg_hmac_sha3_224;
3457 +extern struct safexcel_alg_template safexcel_alg_hmac_sha3_256;
3458 +extern struct safexcel_alg_template safexcel_alg_hmac_sha3_384;
3459 +extern struct safexcel_alg_template safexcel_alg_hmac_sha3_512;
3460 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des;
3461 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede;
3462 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede;
3463 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede;
3464 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede;
3465 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des;
3466 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des;
3467 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des;
3468 +extern struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des;
3469 +extern struct safexcel_alg_template safexcel_alg_rfc4106_gcm;
3470 +extern struct safexcel_alg_template safexcel_alg_rfc4543_gcm;
3471 +extern struct safexcel_alg_template safexcel_alg_rfc4309_ccm;
3472
3473 #endif
3474 --- a/drivers/crypto/inside-secure/safexcel_hash.c
3475 +++ b/drivers/crypto/inside-secure/safexcel_hash.c
3476 @@ -5,9 +5,13 @@
3477 * Antoine Tenart <antoine.tenart@free-electrons.com>
3478 */
3479
3480 +#include <crypto/aes.h>
3481 #include <crypto/hmac.h>
3482 #include <crypto/md5.h>
3483 #include <crypto/sha.h>
3484 +#include <crypto/sha3.h>
3485 +#include <crypto/skcipher.h>
3486 +#include <crypto/sm3.h>
3487 #include <linux/device.h>
3488 #include <linux/dma-mapping.h>
3489 #include <linux/dmapool.h>
3490 @@ -19,9 +23,19 @@
3491 struct safexcel_crypto_priv *priv;
3492
3493 u32 alg;
3494 -
3495 - u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
3496 - u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
3497 + u8 key_sz;
3498 + bool cbcmac;
3499 + bool do_fallback;
3500 + bool fb_init_done;
3501 + bool fb_do_setkey;
3502 +
3503 + __le32 ipad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
3504 + __le32 opad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
3505 +
3506 + struct crypto_cipher *kaes;
3507 + struct crypto_ahash *fback;
3508 + struct crypto_shash *shpre;
3509 + struct shash_desc *shdesc;
3510 };
3511
3512 struct safexcel_ahash_req {
3513 @@ -31,6 +45,8 @@
3514 bool needs_inv;
3515 bool hmac_zlen;
3516 bool len_is_le;
3517 + bool not_first;
3518 + bool xcbcmac;
3519
3520 int nents;
3521 dma_addr_t result_dma;
3522 @@ -39,7 +55,9 @@
3523
3524 u8 state_sz; /* expected state size, only set once */
3525 u8 block_sz; /* block size, only set once */
3526 - u32 state[SHA512_DIGEST_SIZE / sizeof(u32)] __aligned(sizeof(u32));
3527 + u8 digest_sz; /* output digest size, only set once */
3528 + __le32 state[SHA3_512_BLOCK_SIZE /
3529 + sizeof(__le32)] __aligned(sizeof(__le32));
3530
3531 u64 len;
3532 u64 processed;
3533 @@ -57,22 +75,36 @@
3534 }
3535
3536 static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
3537 - u32 input_length, u32 result_length)
3538 + u32 input_length, u32 result_length,
3539 + bool cbcmac)
3540 {
3541 struct safexcel_token *token =
3542 (struct safexcel_token *)cdesc->control_data.token;
3543
3544 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
3545 token[0].packet_length = input_length;
3546 - token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
3547 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
3548
3549 - token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
3550 - token[1].packet_length = result_length;
3551 - token[1].stat = EIP197_TOKEN_STAT_LAST_HASH |
3552 + input_length &= 15;
3553 + if (unlikely(cbcmac && input_length)) {
3554 + token[0].stat = 0;
3555 + token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
3556 + token[1].packet_length = 16 - input_length;
3557 + token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
3558 + token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
3559 + } else {
3560 + token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
3561 + eip197_noop_token(&token[1]);
3562 + }
3563 +
3564 + token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
3565 + token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
3566 EIP197_TOKEN_STAT_LAST_PACKET;
3567 - token[1].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
3568 + token[2].packet_length = result_length;
3569 + token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
3570 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
3571 +
3572 + eip197_noop_token(&token[3]);
3573 }
3574
3575 static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
3576 @@ -82,29 +114,49 @@
3577 struct safexcel_crypto_priv *priv = ctx->priv;
3578 u64 count = 0;
3579
3580 - cdesc->control_data.control0 |= ctx->alg;
3581 + cdesc->control_data.control0 = ctx->alg;
3582 + cdesc->control_data.control1 = 0;
3583
3584 /*
3585 * Copy the input digest if needed, and setup the context
3586 * fields. Do this now as we need it to setup the first command
3587 * descriptor.
3588 */
3589 - if (!req->processed) {
3590 - /* First - and possibly only - block of basic hash only */
3591 - if (req->finish) {
3592 + if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
3593 + if (req->xcbcmac)
3594 + memcpy(ctx->base.ctxr->data, ctx->ipad, ctx->key_sz);
3595 + else
3596 + memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
3597 +
3598 + if (!req->finish && req->xcbcmac)
3599 + cdesc->control_data.control0 |=
3600 + CONTEXT_CONTROL_DIGEST_XCM |
3601 + CONTEXT_CONTROL_TYPE_HASH_OUT |
3602 + CONTEXT_CONTROL_NO_FINISH_HASH |
3603 + CONTEXT_CONTROL_SIZE(req->state_sz /
3604 + sizeof(u32));
3605 + else
3606 cdesc->control_data.control0 |=
3607 + CONTEXT_CONTROL_DIGEST_XCM |
3608 + CONTEXT_CONTROL_TYPE_HASH_OUT |
3609 + CONTEXT_CONTROL_SIZE(req->state_sz /
3610 + sizeof(u32));
3611 + return;
3612 + } else if (!req->processed) {
3613 + /* First - and possibly only - block of basic hash only */
3614 + if (req->finish)
3615 + cdesc->control_data.control0 |= req->digest |
3616 CONTEXT_CONTROL_TYPE_HASH_OUT |
3617 CONTEXT_CONTROL_RESTART_HASH |
3618 /* ensure its not 0! */
3619 CONTEXT_CONTROL_SIZE(1);
3620 - } else {
3621 - cdesc->control_data.control0 |=
3622 + else
3623 + cdesc->control_data.control0 |= req->digest |
3624 CONTEXT_CONTROL_TYPE_HASH_OUT |
3625 CONTEXT_CONTROL_RESTART_HASH |
3626 CONTEXT_CONTROL_NO_FINISH_HASH |
3627 /* ensure its not 0! */
3628 CONTEXT_CONTROL_SIZE(1);
3629 - }
3630 return;
3631 }
3632
3633 @@ -204,7 +256,7 @@
3634 }
3635
3636 if (sreq->result_dma) {
3637 - dma_unmap_single(priv->dev, sreq->result_dma, sreq->state_sz,
3638 + dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
3639 DMA_FROM_DEVICE);
3640 sreq->result_dma = 0;
3641 }
3642 @@ -223,14 +275,15 @@
3643 memcpy(sreq->cache, sreq->state,
3644 crypto_ahash_digestsize(ahash));
3645
3646 - memcpy(sreq->state, ctx->opad, sreq->state_sz);
3647 + memcpy(sreq->state, ctx->opad, sreq->digest_sz);
3648
3649 sreq->len = sreq->block_sz +
3650 crypto_ahash_digestsize(ahash);
3651 sreq->processed = sreq->block_sz;
3652 sreq->hmac = 0;
3653
3654 - ctx->base.needs_inv = true;
3655 + if (priv->flags & EIP197_TRC_CACHE)
3656 + ctx->base.needs_inv = true;
3657 areq->nbytes = 0;
3658 safexcel_ahash_enqueue(areq);
3659
3660 @@ -238,8 +291,14 @@
3661 return 1;
3662 }
3663
3664 - memcpy(areq->result, sreq->state,
3665 - crypto_ahash_digestsize(ahash));
3666 + if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
3667 + ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
3668 + /* Undo final XOR with 0xffffffff ...*/
3669 + *(__le32 *)areq->result = ~sreq->state[0];
3670 + } else {
3671 + memcpy(areq->result, sreq->state,
3672 + crypto_ahash_digestsize(ahash));
3673 + }
3674 }
3675
3676 cache_len = safexcel_queued_len(sreq);
3677 @@ -261,10 +320,11 @@
3678 struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
3679 struct safexcel_result_desc *rdesc;
3680 struct scatterlist *sg;
3681 - int i, extra = 0, n_cdesc = 0, ret = 0;
3682 - u64 queued, len, cache_len;
3683 + struct safexcel_token *dmmy;
3684 + int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
3685 + u64 queued, len;
3686
3687 - queued = len = safexcel_queued_len(req);
3688 + queued = safexcel_queued_len(req);
3689 if (queued <= HASH_CACHE_SIZE)
3690 cache_len = queued;
3691 else
3692 @@ -287,15 +347,52 @@
3693 areq->nbytes - extra);
3694
3695 queued -= extra;
3696 - len -= extra;
3697
3698 if (!queued) {
3699 *commands = 0;
3700 *results = 0;
3701 return 0;
3702 }
3703 +
3704 + extra = 0;
3705 + }
3706 +
3707 + if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
3708 + if (unlikely(cache_len < AES_BLOCK_SIZE)) {
3709 + /*
3710 + * Cache contains less than 1 full block, complete.
3711 + */
3712 + extra = AES_BLOCK_SIZE - cache_len;
3713 + if (queued > cache_len) {
3714 + /* More data follows: borrow bytes */
3715 + u64 tmp = queued - cache_len;
3716 +
3717 + skip = min_t(u64, tmp, extra);
3718 + sg_pcopy_to_buffer(areq->src,
3719 + sg_nents(areq->src),
3720 + req->cache + cache_len,
3721 + skip, 0);
3722 + }
3723 + extra -= skip;
3724 + memset(req->cache + cache_len + skip, 0, extra);
3725 + if (!ctx->cbcmac && extra) {
3726 + // 10- padding for XCBCMAC & CMAC
3727 + req->cache[cache_len + skip] = 0x80;
3728 + // HW will use K2 iso K3 - compensate!
3729 + for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
3730 + ((__be32 *)req->cache)[i] ^=
3731 + cpu_to_be32(le32_to_cpu(
3732 + ctx->ipad[i] ^ ctx->ipad[i + 4]));
3733 + }
3734 + cache_len = AES_BLOCK_SIZE;
3735 + queued = queued + extra;
3736 + }
3737 +
3738 + /* XCBC continue: XOR previous result into 1st word */
3739 + crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
3740 }
3741
3742 + len = queued;
3743 /* Add a command descriptor for the cached data, if any */
3744 if (cache_len) {
3745 req->cache_dma = dma_map_single(priv->dev, req->cache,
3746 @@ -306,8 +403,9 @@
3747 req->cache_sz = cache_len;
3748 first_cdesc = safexcel_add_cdesc(priv, ring, 1,
3749 (cache_len == len),
3750 - req->cache_dma, cache_len, len,
3751 - ctx->base.ctxr_dma);
3752 + req->cache_dma, cache_len,
3753 + len, ctx->base.ctxr_dma,
3754 + &dmmy);
3755 if (IS_ERR(first_cdesc)) {
3756 ret = PTR_ERR(first_cdesc);
3757 goto unmap_cache;
3758 @@ -319,10 +417,6 @@
3759 goto send_command;
3760 }
3761
3762 - /* Skip descriptor generation for zero-length requests */
3763 - if (!areq->nbytes)
3764 - goto send_command;
3765 -
3766 /* Now handle the current ahash request buffer(s) */
3767 req->nents = dma_map_sg(priv->dev, areq->src,
3768 sg_nents_for_len(areq->src,
3769 @@ -336,26 +430,34 @@
3770 for_each_sg(areq->src, sg, req->nents, i) {
3771 int sglen = sg_dma_len(sg);
3772
3773 + if (unlikely(sglen <= skip)) {
3774 + skip -= sglen;
3775 + continue;
3776 + }
3777 +
3778 /* Do not overflow the request */
3779 - if (queued < sglen)
3780 + if ((queued + skip) <= sglen)
3781 sglen = queued;
3782 + else
3783 + sglen -= skip;
3784
3785 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
3786 !(queued - sglen),
3787 - sg_dma_address(sg),
3788 - sglen, len, ctx->base.ctxr_dma);
3789 + sg_dma_address(sg) + skip, sglen,
3790 + len, ctx->base.ctxr_dma, &dmmy);
3791 if (IS_ERR(cdesc)) {
3792 ret = PTR_ERR(cdesc);
3793 goto unmap_sg;
3794 }
3795 - n_cdesc++;
3796
3797 - if (n_cdesc == 1)
3798 + if (!n_cdesc)
3799 first_cdesc = cdesc;
3800 + n_cdesc++;
3801
3802 queued -= sglen;
3803 if (!queued)
3804 break;
3805 + skip = 0;
3806 }
3807
3808 send_command:
3809 @@ -363,9 +465,9 @@
3810 safexcel_context_control(ctx, req, first_cdesc);
3811
3812 /* Add the token */
3813 - safexcel_hash_token(first_cdesc, len, req->state_sz);
3814 + safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
3815
3816 - req->result_dma = dma_map_single(priv->dev, req->state, req->state_sz,
3817 + req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
3818 DMA_FROM_DEVICE);
3819 if (dma_mapping_error(priv->dev, req->result_dma)) {
3820 ret = -EINVAL;
3821 @@ -374,7 +476,7 @@
3822
3823 /* Add a result descriptor */
3824 rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
3825 - req->state_sz);
3826 + req->digest_sz);
3827 if (IS_ERR(rdesc)) {
3828 ret = PTR_ERR(rdesc);
3829 goto unmap_result;
3830 @@ -382,17 +484,20 @@
3831
3832 safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
3833
3834 - req->processed += len;
3835 + req->processed += len - extra;
3836
3837 *commands = n_cdesc;
3838 *results = 1;
3839 return 0;
3840
3841 unmap_result:
3842 - dma_unmap_single(priv->dev, req->result_dma, req->state_sz,
3843 + dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
3844 DMA_FROM_DEVICE);
3845 unmap_sg:
3846 - dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
3847 + if (req->nents) {
3848 + dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
3849 + req->nents = 0;
3850 + }
3851 cdesc_rollback:
3852 for (i = 0; i < n_cdesc; i++)
3853 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
3854 @@ -590,16 +695,12 @@
3855
3856 if (ctx->base.ctxr) {
3857 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
3858 - req->processed &&
3859 - (/* invalidate for basic hash continuation finish */
3860 - (req->finish &&
3861 - (req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED)) ||
3862 + /* invalidate for *any* non-XCBC continuation */
3863 + ((req->not_first && !req->xcbcmac) ||
3864 /* invalidate if (i)digest changed */
3865 memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
3866 - /* invalidate for HMAC continuation finish */
3867 - (req->finish && (req->processed != req->block_sz)) ||
3868 /* invalidate for HMAC finish with odigest changed */
3869 - (req->finish &&
3870 + (req->finish && req->hmac &&
3871 memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
3872 ctx->opad, req->state_sz))))
3873 /*
3874 @@ -622,6 +723,7 @@
3875 if (!ctx->base.ctxr)
3876 return -ENOMEM;
3877 }
3878 + req->not_first = true;
3879
3880 ring = ctx->base.ring;
3881
3882 @@ -691,8 +793,34 @@
3883 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
3884 memcpy(areq->result, sha512_zero_message_hash,
3885 SHA512_DIGEST_SIZE);
3886 + else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
3887 + memcpy(areq->result,
3888 + EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
3889 + }
3890
3891 return 0;
3892 + } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
3893 + ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
3894 + req->len == sizeof(u32) && !areq->nbytes)) {
3895 + /* Zero length CRC32 */
3896 + memcpy(areq->result, ctx->ipad, sizeof(u32));
3897 + return 0;
3898 + } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
3899 + !areq->nbytes)) {
3900 + /* Zero length CBC MAC */
3901 + memset(areq->result, 0, AES_BLOCK_SIZE);
3902 + return 0;
3903 + } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
3904 + !areq->nbytes)) {
3905 + /* Zero length (X)CBC/CMAC */
3906 + int i;
3907 +
3908 + for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
3909 + ((__be32 *)areq->result)[i] =
3910 + cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));//K3
3911 + areq->result[0] ^= 0x80; // 10- padding
3912 + crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
3913 + return 0;
3914 } else if (unlikely(req->hmac &&
3915 (req->len == req->block_sz) &&
3916 !areq->nbytes)) {
3917 @@ -792,6 +920,7 @@
3918 ctx->priv = tmpl->priv;
3919 ctx->base.send = safexcel_ahash_send;
3920 ctx->base.handle_result = safexcel_handle_result;
3921 + ctx->fb_do_setkey = false;
3922
3923 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3924 sizeof(struct safexcel_ahash_req));
3925 @@ -808,6 +937,7 @@
3926 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3927 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
3928 req->state_sz = SHA1_DIGEST_SIZE;
3929 + req->digest_sz = SHA1_DIGEST_SIZE;
3930 req->block_sz = SHA1_BLOCK_SIZE;
3931
3932 return 0;
3933 @@ -889,6 +1019,7 @@
3934 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3935 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
3936 req->state_sz = SHA1_DIGEST_SIZE;
3937 + req->digest_sz = SHA1_DIGEST_SIZE;
3938 req->block_sz = SHA1_BLOCK_SIZE;
3939 req->hmac = true;
3940
3941 @@ -1125,6 +1256,7 @@
3942 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
3943 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
3944 req->state_sz = SHA256_DIGEST_SIZE;
3945 + req->digest_sz = SHA256_DIGEST_SIZE;
3946 req->block_sz = SHA256_BLOCK_SIZE;
3947
3948 return 0;
3949 @@ -1180,6 +1312,7 @@
3950 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
3951 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
3952 req->state_sz = SHA256_DIGEST_SIZE;
3953 + req->digest_sz = SHA256_DIGEST_SIZE;
3954 req->block_sz = SHA256_BLOCK_SIZE;
3955
3956 return 0;
3957 @@ -1248,6 +1381,7 @@
3958 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
3959 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
3960 req->state_sz = SHA256_DIGEST_SIZE;
3961 + req->digest_sz = SHA256_DIGEST_SIZE;
3962 req->block_sz = SHA256_BLOCK_SIZE;
3963 req->hmac = true;
3964
3965 @@ -1318,6 +1452,7 @@
3966 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
3967 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
3968 req->state_sz = SHA256_DIGEST_SIZE;
3969 + req->digest_sz = SHA256_DIGEST_SIZE;
3970 req->block_sz = SHA256_BLOCK_SIZE;
3971 req->hmac = true;
3972
3973 @@ -1375,6 +1510,7 @@
3974 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
3975 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
3976 req->state_sz = SHA512_DIGEST_SIZE;
3977 + req->digest_sz = SHA512_DIGEST_SIZE;
3978 req->block_sz = SHA512_BLOCK_SIZE;
3979
3980 return 0;
3981 @@ -1430,6 +1566,7 @@
3982 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
3983 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
3984 req->state_sz = SHA512_DIGEST_SIZE;
3985 + req->digest_sz = SHA512_DIGEST_SIZE;
3986 req->block_sz = SHA512_BLOCK_SIZE;
3987
3988 return 0;
3989 @@ -1498,6 +1635,7 @@
3990 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
3991 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
3992 req->state_sz = SHA512_DIGEST_SIZE;
3993 + req->digest_sz = SHA512_DIGEST_SIZE;
3994 req->block_sz = SHA512_BLOCK_SIZE;
3995 req->hmac = true;
3996
3997 @@ -1568,6 +1706,7 @@
3998 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
3999 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
4000 req->state_sz = SHA512_DIGEST_SIZE;
4001 + req->digest_sz = SHA512_DIGEST_SIZE;
4002 req->block_sz = SHA512_BLOCK_SIZE;
4003 req->hmac = true;
4004
4005 @@ -1625,6 +1764,7 @@
4006 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
4007 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
4008 req->state_sz = MD5_DIGEST_SIZE;
4009 + req->digest_sz = MD5_DIGEST_SIZE;
4010 req->block_sz = MD5_HMAC_BLOCK_SIZE;
4011
4012 return 0;
4013 @@ -1686,6 +1826,7 @@
4014 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
4015 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
4016 req->state_sz = MD5_DIGEST_SIZE;
4017 + req->digest_sz = MD5_DIGEST_SIZE;
4018 req->block_sz = MD5_HMAC_BLOCK_SIZE;
4019 req->len_is_le = true; /* MD5 is little endian! ... */
4020 req->hmac = true;
4021 @@ -1738,5 +1879,1235 @@
4022 .cra_module = THIS_MODULE,
4023 },
4024 },
4025 + },
4026 +};
4027 +
4028 +static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
4029 +{
4030 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
4031 + int ret = safexcel_ahash_cra_init(tfm);
4032 +
4033 + /* Default 'key' is all zeroes */
4034 + memset(ctx->ipad, 0, sizeof(u32));
4035 + return ret;
4036 +}
4037 +
4038 +static int safexcel_crc32_init(struct ahash_request *areq)
4039 +{
4040 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
4041 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
4042 +
4043 + memset(req, 0, sizeof(*req));
4044 +
4045 + /* Start from loaded key */
4046 + req->state[0] = (__force __le32)le32_to_cpu(~ctx->ipad[0]);
4047 + /* Set processed to non-zero to enable invalidation detection */
4048 + req->len = sizeof(u32);
4049 + req->processed = sizeof(u32);
4050 +
4051 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
4052 + req->digest = CONTEXT_CONTROL_DIGEST_XCM;
4053 + req->state_sz = sizeof(u32);
4054 + req->digest_sz = sizeof(u32);
4055 + req->block_sz = sizeof(u32);
4056 +
4057 + return 0;
4058 +}
4059 +
4060 +static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
4061 + unsigned int keylen)
4062 +{
4063 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
4064 +
4065 + if (keylen != sizeof(u32))
4066 + return -EINVAL;
4067 +
4068 + memcpy(ctx->ipad, key, sizeof(u32));
4069 + return 0;
4070 +}
4071 +
4072 +static int safexcel_crc32_digest(struct ahash_request *areq)
4073 +{
4074 + return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
4075 +}
4076 +
4077 +struct safexcel_alg_template safexcel_alg_crc32 = {
4078 + .type = SAFEXCEL_ALG_TYPE_AHASH,
4079 + .algo_mask = 0,
4080 + .alg.ahash = {
4081 + .init = safexcel_crc32_init,
4082 + .update = safexcel_ahash_update,
4083 + .final = safexcel_ahash_final,
4084 + .finup = safexcel_ahash_finup,
4085 + .digest = safexcel_crc32_digest,
4086 + .setkey = safexcel_crc32_setkey,
4087 + .export = safexcel_ahash_export,
4088 + .import = safexcel_ahash_import,
4089 + .halg = {
4090 + .digestsize = sizeof(u32),
4091 + .statesize = sizeof(struct safexcel_ahash_export_state),
4092 + .base = {
4093 + .cra_name = "crc32",
4094 + .cra_driver_name = "safexcel-crc32",
4095 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
4096 + .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
4097 + CRYPTO_ALG_ASYNC |
4098 + CRYPTO_ALG_KERN_DRIVER_ONLY,
4099 + .cra_blocksize = 1,
4100 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
4101 + .cra_init = safexcel_crc32_cra_init,
4102 + .cra_exit = safexcel_ahash_cra_exit,
4103 + .cra_module = THIS_MODULE,
4104 + },
4105 + },
4106 + },
4107 +};
4108 +
4109 +static int safexcel_cbcmac_init(struct ahash_request *areq)
4110 +{
4111 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
4112 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
4113 +
4114 + memset(req, 0, sizeof(*req));
4115 +
4116 + /* Start from loaded keys */
4117 + memcpy(req->state, ctx->ipad, ctx->key_sz);
4118 + /* Set processed to non-zero to enable invalidation detection */
4119 + req->len = AES_BLOCK_SIZE;
4120 + req->processed = AES_BLOCK_SIZE;
4121 +
4122 + req->digest = CONTEXT_CONTROL_DIGEST_XCM;
4123 + req->state_sz = ctx->key_sz;
4124 + req->digest_sz = AES_BLOCK_SIZE;
4125 + req->block_sz = AES_BLOCK_SIZE;
4126 + req->xcbcmac = true;
4127 +
4128 + return 0;
4129 +}
4130 +
4131 +static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
4132 + unsigned int len)
4133 +{
4134 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
4135 + struct crypto_aes_ctx aes;
4136 + int ret, i;
4137 +
4138 + ret = aes_expandkey(&aes, key, len);
4139 + if (ret)
4140 + return ret;
4141 +
4142 + memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE);
4143 + for (i = 0; i < len / sizeof(u32); i++)
4144 + ctx->ipad[i + 8] = (__force __le32)cpu_to_be32(aes.key_enc[i]);
4145 +
4146 + if (len == AES_KEYSIZE_192) {
4147 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
4148 + ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
4149 + } else if (len == AES_KEYSIZE_256) {
4150 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
4151 + ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
4152 + } else {
4153 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
4154 + ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
4155 + }
4156 + ctx->cbcmac = true;
4157 +
4158 + memzero_explicit(&aes, sizeof(aes));
4159 + return 0;
4160 +}
4161 +
4162 +static int safexcel_cbcmac_digest(struct ahash_request *areq)
4163 +{
4164 + return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
4165 +}
4166 +
4167 +struct safexcel_alg_template safexcel_alg_cbcmac = {
4168 + .type = SAFEXCEL_ALG_TYPE_AHASH,
4169 + .algo_mask = 0,
4170 + .alg.ahash = {
4171 + .init = safexcel_cbcmac_init,
4172 + .update = safexcel_ahash_update,
4173 + .final = safexcel_ahash_final,
4174 + .finup = safexcel_ahash_finup,
4175 + .digest = safexcel_cbcmac_digest,
4176 + .setkey = safexcel_cbcmac_setkey,
4177 + .export = safexcel_ahash_export,
4178 + .import = safexcel_ahash_import,
4179 + .halg = {
4180 + .digestsize = AES_BLOCK_SIZE,
4181 + .statesize = sizeof(struct safexcel_ahash_export_state),
4182 + .base = {
4183 + .cra_name = "cbcmac(aes)",
4184 + .cra_driver_name = "safexcel-cbcmac-aes",
4185 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
4186 + .cra_flags = CRYPTO_ALG_ASYNC |
4187 + CRYPTO_ALG_KERN_DRIVER_ONLY,
4188 + .cra_blocksize = 1,
4189 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
4190 + .cra_init = safexcel_ahash_cra_init,
4191 + .cra_exit = safexcel_ahash_cra_exit,
4192 + .cra_module = THIS_MODULE,
4193 + },
4194 + },
4195 + },
4196 +};
4197 +
4198 +static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
4199 + unsigned int len)
4200 +{
4201 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
4202 + struct crypto_aes_ctx aes;
4203 + u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
4204 + int ret, i;
4205 +
4206 + ret = aes_expandkey(&aes, key, len);
4207 + if (ret)
4208 + return ret;
4209 +
4210 + /* precompute the XCBC key material */
4211 + crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
4212 + crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
4213 + CRYPTO_TFM_REQ_MASK);
4214 + ret = crypto_cipher_setkey(ctx->kaes, key, len);
4215 + if (ret)
4216 + return ret;
4217 +
4218 + crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
4219 + "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
4220 + crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
4221 + "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
4222 + crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
4223 + "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
4224 + for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
4225 + ctx->ipad[i] =
4226 + cpu_to_le32((__force u32)cpu_to_be32(key_tmp[i]));
4227 +
4228 + crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
4229 + crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
4230 + CRYPTO_TFM_REQ_MASK);
4231 + ret = crypto_cipher_setkey(ctx->kaes,
4232 + (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
4233 + AES_MIN_KEY_SIZE);
4234 + if (ret)
4235 + return ret;
4236 +
4237 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
4238 + ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
4239 + ctx->cbcmac = false;
4240 +
4241 + memzero_explicit(&aes, sizeof(aes));
4242 + return 0;
4243 +}
4244 +
4245 +static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
4246 +{
4247 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
4248 +
4249 + safexcel_ahash_cra_init(tfm);
4250 + ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
4251 + return PTR_ERR_OR_ZERO(ctx->kaes);
4252 +}
4253 +
4254 +static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
4255 +{
4256 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
4257 +
4258 + crypto_free_cipher(ctx->kaes);
4259 + safexcel_ahash_cra_exit(tfm);
4260 +}
4261 +
4262 +struct safexcel_alg_template safexcel_alg_xcbcmac = {
4263 + .type = SAFEXCEL_ALG_TYPE_AHASH,
4264 + .algo_mask = 0,
4265 + .alg.ahash = {
4266 + .init = safexcel_cbcmac_init,
4267 + .update = safexcel_ahash_update,
4268 + .final = safexcel_ahash_final,
4269 + .finup = safexcel_ahash_finup,
4270 + .digest = safexcel_cbcmac_digest,
4271 + .setkey = safexcel_xcbcmac_setkey,
4272 + .export = safexcel_ahash_export,
4273 + .import = safexcel_ahash_import,
4274 + .halg = {
4275 + .digestsize = AES_BLOCK_SIZE,
4276 + .statesize = sizeof(struct safexcel_ahash_export_state),
4277 + .base = {
4278 + .cra_name = "xcbc(aes)",
4279 + .cra_driver_name = "safexcel-xcbc-aes",
4280 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
4281 + .cra_flags = CRYPTO_ALG_ASYNC |
4282 + CRYPTO_ALG_KERN_DRIVER_ONLY,
4283 + .cra_blocksize = AES_BLOCK_SIZE,
4284 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
4285 + .cra_init = safexcel_xcbcmac_cra_init,
4286 + .cra_exit = safexcel_xcbcmac_cra_exit,
4287 + .cra_module = THIS_MODULE,
4288 + },
4289 + },
4290 + },
4291 +};
4292 +
4293 +static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
4294 + unsigned int len)
4295 +{
4296 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
4297 + struct crypto_aes_ctx aes;
4298 + __be64 consts[4];
4299 + u64 _const[2];
4300 + u8 msb_mask, gfmask;
4301 + int ret, i;
4302 +
4303 + ret = aes_expandkey(&aes, key, len);
4304 + if (ret)
4305 + return ret;
4306 +
4307 + for (i = 0; i < len / sizeof(u32); i++)
4308 + ctx->ipad[i + 8] =
4309 + cpu_to_le32((__force u32)cpu_to_be32(aes.key_enc[i]));
4310 +
4311 + /* precompute the CMAC key material */
4312 + crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
4313 + crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
4314 + CRYPTO_TFM_REQ_MASK);
4315 + ret = crypto_cipher_setkey(ctx->kaes, key, len);
4316 + if (ret)
4317 + return ret;
4318 +
4319 + /* code below borrowed from crypto/cmac.c */
4320 + /* encrypt the zero block */
4321 + memset(consts, 0, AES_BLOCK_SIZE);
4322 + crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
4323 +
4324 + gfmask = 0x87;
4325 + _const[0] = be64_to_cpu(consts[1]);
4326 + _const[1] = be64_to_cpu(consts[0]);
4327 +
4328 + /* gf(2^128) multiply zero-ciphertext with u and u^2 */
4329 + for (i = 0; i < 4; i += 2) {
4330 + msb_mask = ((s64)_const[1] >> 63) & gfmask;
4331 + _const[1] = (_const[1] << 1) | (_const[0] >> 63);
4332 + _const[0] = (_const[0] << 1) ^ msb_mask;
4333 +
4334 + consts[i + 0] = cpu_to_be64(_const[1]);
4335 + consts[i + 1] = cpu_to_be64(_const[0]);
4336 + }
4337 + /* end of code borrowed from crypto/cmac.c */
4338 +
4339 + for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
4340 + ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *)consts)[i]);
4341 +
4342 + if (len == AES_KEYSIZE_192) {
4343 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
4344 + ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
4345 + } else if (len == AES_KEYSIZE_256) {
4346 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
4347 + ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
4348 + } else {
4349 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
4350 + ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
4351 + }
4352 + ctx->cbcmac = false;
4353 +
4354 + memzero_explicit(&aes, sizeof(aes));
4355 + return 0;
4356 +}
4357 +
4358 +struct safexcel_alg_template safexcel_alg_cmac = {
4359 + .type = SAFEXCEL_ALG_TYPE_AHASH,
4360 + .algo_mask = 0,
4361 + .alg.ahash = {
4362 + .init = safexcel_cbcmac_init,
4363 + .update = safexcel_ahash_update,
4364 + .final = safexcel_ahash_final,
4365 + .finup = safexcel_ahash_finup,
4366 + .digest = safexcel_cbcmac_digest,
4367 + .setkey = safexcel_cmac_setkey,
4368 + .export = safexcel_ahash_export,
4369 + .import = safexcel_ahash_import,
4370 + .halg = {
4371 + .digestsize = AES_BLOCK_SIZE,
4372 + .statesize = sizeof(struct safexcel_ahash_export_state),
4373 + .base = {
4374 + .cra_name = "cmac(aes)",
4375 + .cra_driver_name = "safexcel-cmac-aes",
4376 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
4377 + .cra_flags = CRYPTO_ALG_ASYNC |
4378 + CRYPTO_ALG_KERN_DRIVER_ONLY,
4379 + .cra_blocksize = AES_BLOCK_SIZE,
4380 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
4381 + .cra_init = safexcel_xcbcmac_cra_init,
4382 + .cra_exit = safexcel_xcbcmac_cra_exit,
4383 + .cra_module = THIS_MODULE,
4384 + },
4385 + },
4386 + },
4387 +};
4388 +
4389 +static int safexcel_sm3_init(struct ahash_request *areq)
4390 +{
4391 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
4392 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
4393 +
4394 + memset(req, 0, sizeof(*req));
4395 +
4396 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
4397 + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
4398 + req->state_sz = SM3_DIGEST_SIZE;
4399 + req->digest_sz = SM3_DIGEST_SIZE;
4400 + req->block_sz = SM3_BLOCK_SIZE;
4401 +
4402 + return 0;
4403 +}
4404 +
4405 +static int safexcel_sm3_digest(struct ahash_request *areq)
4406 +{
4407 + int ret = safexcel_sm3_init(areq);
4408 +
4409 + if (ret)
4410 + return ret;
4411 +
4412 + return safexcel_ahash_finup(areq);
4413 +}
4414 +
4415 +struct safexcel_alg_template safexcel_alg_sm3 = {
4416 + .type = SAFEXCEL_ALG_TYPE_AHASH,
4417 + .algo_mask = SAFEXCEL_ALG_SM3,
4418 + .alg.ahash = {
4419 + .init = safexcel_sm3_init,
4420 + .update = safexcel_ahash_update,
4421 + .final = safexcel_ahash_final,
4422 + .finup = safexcel_ahash_finup,
4423 + .digest = safexcel_sm3_digest,
4424 + .export = safexcel_ahash_export,
4425 + .import = safexcel_ahash_import,
4426 + .halg = {
4427 + .digestsize = SM3_DIGEST_SIZE,
4428 + .statesize = sizeof(struct safexcel_ahash_export_state),
4429 + .base = {
4430 + .cra_name = "sm3",
4431 + .cra_driver_name = "safexcel-sm3",
4432 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
4433 + .cra_flags = CRYPTO_ALG_ASYNC |
4434 + CRYPTO_ALG_KERN_DRIVER_ONLY,
4435 + .cra_blocksize = SM3_BLOCK_SIZE,
4436 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
4437 + .cra_init = safexcel_ahash_cra_init,
4438 + .cra_exit = safexcel_ahash_cra_exit,
4439 + .cra_module = THIS_MODULE,
4440 + },
4441 + },
4442 + },
4443 +};
4444 +
4445 +static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
4446 + unsigned int keylen)
4447 +{
4448 + return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
4449 + SM3_DIGEST_SIZE);
4450 +}
4451 +
4452 +static int safexcel_hmac_sm3_init(struct ahash_request *areq)
4453 +{
4454 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
4455 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
4456 +
4457 + memset(req, 0, sizeof(*req));
4458 +
4459 + /* Start from ipad precompute */
4460 + memcpy(req->state, ctx->ipad, SM3_DIGEST_SIZE);
4461 + /* Already processed the key^ipad part now! */
4462 + req->len = SM3_BLOCK_SIZE;
4463 + req->processed = SM3_BLOCK_SIZE;
4464 +
4465 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
4466 + req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
4467 + req->state_sz = SM3_DIGEST_SIZE;
4468 + req->digest_sz = SM3_DIGEST_SIZE;
4469 + req->block_sz = SM3_BLOCK_SIZE;
4470 + req->hmac = true;
4471 +
4472 + return 0;
4473 +}
4474 +
4475 +static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
4476 +{
4477 + int ret = safexcel_hmac_sm3_init(areq);
4478 +
4479 + if (ret)
4480 + return ret;
4481 +
4482 + return safexcel_ahash_finup(areq);
4483 +}
4484 +
4485 +struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
4486 + .type = SAFEXCEL_ALG_TYPE_AHASH,
4487 + .algo_mask = SAFEXCEL_ALG_SM3,
4488 + .alg.ahash = {
4489 + .init = safexcel_hmac_sm3_init,
4490 + .update = safexcel_ahash_update,
4491 + .final = safexcel_ahash_final,
4492 + .finup = safexcel_ahash_finup,
4493 + .digest = safexcel_hmac_sm3_digest,
4494 + .setkey = safexcel_hmac_sm3_setkey,
4495 + .export = safexcel_ahash_export,
4496 + .import = safexcel_ahash_import,
4497 + .halg = {
4498 + .digestsize = SM3_DIGEST_SIZE,
4499 + .statesize = sizeof(struct safexcel_ahash_export_state),
4500 + .base = {
4501 + .cra_name = "hmac(sm3)",
4502 + .cra_driver_name = "safexcel-hmac-sm3",
4503 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
4504 + .cra_flags = CRYPTO_ALG_ASYNC |
4505 + CRYPTO_ALG_KERN_DRIVER_ONLY,
4506 + .cra_blocksize = SM3_BLOCK_SIZE,
4507 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
4508 + .cra_init = safexcel_ahash_cra_init,
4509 + .cra_exit = safexcel_ahash_cra_exit,
4510 + .cra_module = THIS_MODULE,
4511 + },
4512 + },
4513 + },
4514 +};
4515 +
4516 +static int safexcel_sha3_224_init(struct ahash_request *areq)
4517 +{
4518 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
4519 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4520 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
4521 +
4522 + memset(req, 0, sizeof(*req));
4523 +
4524 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
4525 + req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
4526 + req->state_sz = SHA3_224_DIGEST_SIZE;
4527 + req->digest_sz = SHA3_224_DIGEST_SIZE;
4528 + req->block_sz = SHA3_224_BLOCK_SIZE;
4529 + ctx->do_fallback = false;
4530 + ctx->fb_init_done = false;
4531 + return 0;
4532 +}
4533 +
4534 +static int safexcel_sha3_fbcheck(struct ahash_request *req)
4535 +{
4536 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
4537 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4538 + struct ahash_request *subreq = ahash_request_ctx(req);
4539 + int ret = 0;
4540 +
4541 + if (ctx->do_fallback) {
4542 + ahash_request_set_tfm(subreq, ctx->fback);
4543 + ahash_request_set_callback(subreq, req->base.flags,
4544 + req->base.complete, req->base.data);
4545 + ahash_request_set_crypt(subreq, req->src, req->result,
4546 + req->nbytes);
4547 + if (!ctx->fb_init_done) {
4548 + if (ctx->fb_do_setkey) {
4549 + /* Set fallback cipher HMAC key */
4550 + u8 key[SHA3_224_BLOCK_SIZE];
4551 +
4552 + memcpy(key, ctx->ipad,
4553 + crypto_ahash_blocksize(ctx->fback) / 2);
4554 + memcpy(key +
4555 + crypto_ahash_blocksize(ctx->fback) / 2,
4556 + ctx->opad,
4557 + crypto_ahash_blocksize(ctx->fback) / 2);
4558 + ret = crypto_ahash_setkey(ctx->fback, key,
4559 + crypto_ahash_blocksize(ctx->fback));
4560 + memzero_explicit(key,
4561 + crypto_ahash_blocksize(ctx->fback));
4562 + ctx->fb_do_setkey = false;
4563 + }
4564 + ret = ret ?: crypto_ahash_init(subreq);
4565 + ctx->fb_init_done = true;
4566 + }
4567 + }
4568 + return ret;
4569 +}
4570 +
4571 +static int safexcel_sha3_update(struct ahash_request *req)
4572 +{
4573 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
4574 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4575 + struct ahash_request *subreq = ahash_request_ctx(req);
4576 +
4577 + ctx->do_fallback = true;
4578 + return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
4579 +}
4580 +
4581 +static int safexcel_sha3_final(struct ahash_request *req)
4582 +{
4583 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
4584 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4585 + struct ahash_request *subreq = ahash_request_ctx(req);
4586 +
4587 + ctx->do_fallback = true;
4588 + return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
4589 +}
4590 +
4591 +static int safexcel_sha3_finup(struct ahash_request *req)
4592 +{
4593 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
4594 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4595 + struct ahash_request *subreq = ahash_request_ctx(req);
4596 +
4597 + ctx->do_fallback |= !req->nbytes;
4598 + if (ctx->do_fallback)
4599 + /* Update or ex/import happened or len 0, cannot use the HW */
4600 + return safexcel_sha3_fbcheck(req) ?:
4601 + crypto_ahash_finup(subreq);
4602 + else
4603 + return safexcel_ahash_finup(req);
4604 +}
4605 +
4606 +static int safexcel_sha3_digest_fallback(struct ahash_request *req)
4607 +{
4608 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
4609 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4610 + struct ahash_request *subreq = ahash_request_ctx(req);
4611 +
4612 + ctx->do_fallback = true;
4613 + ctx->fb_init_done = false;
4614 + return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
4615 +}
4616 +
4617 +static int safexcel_sha3_224_digest(struct ahash_request *req)
4618 +{
4619 + if (req->nbytes)
4620 + return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
4621 +
4622 + /* HW cannot do zero length hash, use fallback instead */
4623 + return safexcel_sha3_digest_fallback(req);
4624 +}
4625 +
4626 +static int safexcel_sha3_export(struct ahash_request *req, void *out)
4627 +{
4628 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
4629 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4630 + struct ahash_request *subreq = ahash_request_ctx(req);
4631 +
4632 + ctx->do_fallback = true;
4633 + return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
4634 +}
4635 +
4636 +static int safexcel_sha3_import(struct ahash_request *req, const void *in)
4637 +{
4638 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
4639 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4640 + struct ahash_request *subreq = ahash_request_ctx(req);
4641 +
4642 + ctx->do_fallback = true;
4643 + return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
4644 + // return safexcel_ahash_import(req, in);
4645 +}
4646 +
4647 +static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
4648 +{
4649 + struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
4650 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
4651 +
4652 + safexcel_ahash_cra_init(tfm);
4653 +
4654 + /* Allocate fallback implementation */
4655 + ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
4656 + CRYPTO_ALG_ASYNC |
4657 + CRYPTO_ALG_NEED_FALLBACK);
4658 + if (IS_ERR(ctx->fback))
4659 + return PTR_ERR(ctx->fback);
4660 +
4661 + /* Update statesize from fallback algorithm! */
4662 + crypto_hash_alg_common(ahash)->statesize =
4663 + crypto_ahash_statesize(ctx->fback);
4664 + crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
4665 + sizeof(struct ahash_request) +
4666 + crypto_ahash_reqsize(ctx->fback)));
4667 + return 0;
4668 +}
4669 +
4670 +static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
4671 +{
4672 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
4673 +
4674 + crypto_free_ahash(ctx->fback);
4675 + safexcel_ahash_cra_exit(tfm);
4676 +}
4677 +
4678 +struct safexcel_alg_template safexcel_alg_sha3_224 = {
4679 + .type = SAFEXCEL_ALG_TYPE_AHASH,
4680 + .algo_mask = SAFEXCEL_ALG_SHA3,
4681 + .alg.ahash = {
4682 + .init = safexcel_sha3_224_init,
4683 + .update = safexcel_sha3_update,
4684 + .final = safexcel_sha3_final,
4685 + .finup = safexcel_sha3_finup,
4686 + .digest = safexcel_sha3_224_digest,
4687 + .export = safexcel_sha3_export,
4688 + .import = safexcel_sha3_import,
4689 + .halg = {
4690 + .digestsize = SHA3_224_DIGEST_SIZE,
4691 + .statesize = sizeof(struct safexcel_ahash_export_state),
4692 + .base = {
4693 + .cra_name = "sha3-224",
4694 + .cra_driver_name = "safexcel-sha3-224",
4695 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
4696 + .cra_flags = CRYPTO_ALG_ASYNC |
4697 + CRYPTO_ALG_KERN_DRIVER_ONLY |
4698 + CRYPTO_ALG_NEED_FALLBACK,
4699 + .cra_blocksize = SHA3_224_BLOCK_SIZE,
4700 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
4701 + .cra_init = safexcel_sha3_cra_init,
4702 + .cra_exit = safexcel_sha3_cra_exit,
4703 + .cra_module = THIS_MODULE,
4704 + },
4705 + },
4706 + },
4707 +};
4708 +
4709 +static int safexcel_sha3_256_init(struct ahash_request *areq)
4710 +{
4711 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
4712 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4713 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
4714 +
4715 + memset(req, 0, sizeof(*req));
4716 +
4717 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
4718 + req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
4719 + req->state_sz = SHA3_256_DIGEST_SIZE;
4720 + req->digest_sz = SHA3_256_DIGEST_SIZE;
4721 + req->block_sz = SHA3_256_BLOCK_SIZE;
4722 + ctx->do_fallback = false;
4723 + ctx->fb_init_done = false;
4724 + return 0;
4725 +}
4726 +
4727 +static int safexcel_sha3_256_digest(struct ahash_request *req)
4728 +{
4729 + if (req->nbytes)
4730 + return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
4731 +
4732 + /* HW cannot do zero length hash, use fallback instead */
4733 + return safexcel_sha3_digest_fallback(req);
4734 +}
4735 +
4736 +struct safexcel_alg_template safexcel_alg_sha3_256 = {
4737 + .type = SAFEXCEL_ALG_TYPE_AHASH,
4738 + .algo_mask = SAFEXCEL_ALG_SHA3,
4739 + .alg.ahash = {
4740 + .init = safexcel_sha3_256_init,
4741 + .update = safexcel_sha3_update,
4742 + .final = safexcel_sha3_final,
4743 + .finup = safexcel_sha3_finup,
4744 + .digest = safexcel_sha3_256_digest,
4745 + .export = safexcel_sha3_export,
4746 + .import = safexcel_sha3_import,
4747 + .halg = {
4748 + .digestsize = SHA3_256_DIGEST_SIZE,
4749 + .statesize = sizeof(struct safexcel_ahash_export_state),
4750 + .base = {
4751 + .cra_name = "sha3-256",
4752 + .cra_driver_name = "safexcel-sha3-256",
4753 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
4754 + .cra_flags = CRYPTO_ALG_ASYNC |
4755 + CRYPTO_ALG_KERN_DRIVER_ONLY |
4756 + CRYPTO_ALG_NEED_FALLBACK,
4757 + .cra_blocksize = SHA3_256_BLOCK_SIZE,
4758 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
4759 + .cra_init = safexcel_sha3_cra_init,
4760 + .cra_exit = safexcel_sha3_cra_exit,
4761 + .cra_module = THIS_MODULE,
4762 + },
4763 + },
4764 + },
4765 +};
4766 +
4767 +static int safexcel_sha3_384_init(struct ahash_request *areq)
4768 +{
4769 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
4770 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4771 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
4772 +
4773 + memset(req, 0, sizeof(*req));
4774 +
4775 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
4776 + req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
4777 + req->state_sz = SHA3_384_DIGEST_SIZE;
4778 + req->digest_sz = SHA3_384_DIGEST_SIZE;
4779 + req->block_sz = SHA3_384_BLOCK_SIZE;
4780 + ctx->do_fallback = false;
4781 + ctx->fb_init_done = false;
4782 + return 0;
4783 +}
4784 +
4785 +static int safexcel_sha3_384_digest(struct ahash_request *req)
4786 +{
4787 + if (req->nbytes)
4788 + return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
4789 +
4790 + /* HW cannot do zero length hash, use fallback instead */
4791 + return safexcel_sha3_digest_fallback(req);
4792 +}
4793 +
4794 +struct safexcel_alg_template safexcel_alg_sha3_384 = {
4795 + .type = SAFEXCEL_ALG_TYPE_AHASH,
4796 + .algo_mask = SAFEXCEL_ALG_SHA3,
4797 + .alg.ahash = {
4798 + .init = safexcel_sha3_384_init,
4799 + .update = safexcel_sha3_update,
4800 + .final = safexcel_sha3_final,
4801 + .finup = safexcel_sha3_finup,
4802 + .digest = safexcel_sha3_384_digest,
4803 + .export = safexcel_sha3_export,
4804 + .import = safexcel_sha3_import,
4805 + .halg = {
4806 + .digestsize = SHA3_384_DIGEST_SIZE,
4807 + .statesize = sizeof(struct safexcel_ahash_export_state),
4808 + .base = {
4809 + .cra_name = "sha3-384",
4810 + .cra_driver_name = "safexcel-sha3-384",
4811 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
4812 + .cra_flags = CRYPTO_ALG_ASYNC |
4813 + CRYPTO_ALG_KERN_DRIVER_ONLY |
4814 + CRYPTO_ALG_NEED_FALLBACK,
4815 + .cra_blocksize = SHA3_384_BLOCK_SIZE,
4816 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
4817 + .cra_init = safexcel_sha3_cra_init,
4818 + .cra_exit = safexcel_sha3_cra_exit,
4819 + .cra_module = THIS_MODULE,
4820 + },
4821 + },
4822 + },
4823 +};
4824 +
4825 +static int safexcel_sha3_512_init(struct ahash_request *areq)
4826 +{
4827 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
4828 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4829 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
4830 +
4831 + memset(req, 0, sizeof(*req));
4832 +
4833 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
4834 + req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
4835 + req->state_sz = SHA3_512_DIGEST_SIZE;
4836 + req->digest_sz = SHA3_512_DIGEST_SIZE;
4837 + req->block_sz = SHA3_512_BLOCK_SIZE;
4838 + ctx->do_fallback = false;
4839 + ctx->fb_init_done = false;
4840 + return 0;
4841 +}
4842 +
4843 +static int safexcel_sha3_512_digest(struct ahash_request *req)
4844 +{
4845 + if (req->nbytes)
4846 + return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
4847 +
4848 + /* HW cannot do zero length hash, use fallback instead */
4849 + return safexcel_sha3_digest_fallback(req);
4850 +}
4851 +
4852 +struct safexcel_alg_template safexcel_alg_sha3_512 = {
4853 + .type = SAFEXCEL_ALG_TYPE_AHASH,
4854 + .algo_mask = SAFEXCEL_ALG_SHA3,
4855 + .alg.ahash = {
4856 + .init = safexcel_sha3_512_init,
4857 + .update = safexcel_sha3_update,
4858 + .final = safexcel_sha3_final,
4859 + .finup = safexcel_sha3_finup,
4860 + .digest = safexcel_sha3_512_digest,
4861 + .export = safexcel_sha3_export,
4862 + .import = safexcel_sha3_import,
4863 + .halg = {
4864 + .digestsize = SHA3_512_DIGEST_SIZE,
4865 + .statesize = sizeof(struct safexcel_ahash_export_state),
4866 + .base = {
4867 + .cra_name = "sha3-512",
4868 + .cra_driver_name = "safexcel-sha3-512",
4869 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
4870 + .cra_flags = CRYPTO_ALG_ASYNC |
4871 + CRYPTO_ALG_KERN_DRIVER_ONLY |
4872 + CRYPTO_ALG_NEED_FALLBACK,
4873 + .cra_blocksize = SHA3_512_BLOCK_SIZE,
4874 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
4875 + .cra_init = safexcel_sha3_cra_init,
4876 + .cra_exit = safexcel_sha3_cra_exit,
4877 + .cra_module = THIS_MODULE,
4878 + },
4879 + },
4880 + },
4881 +};
4882 +
4883 +static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
4884 +{
4885 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
4886 + int ret;
4887 +
4888 + ret = safexcel_sha3_cra_init(tfm);
4889 + if (ret)
4890 + return ret;
4891 +
4892 + /* Allocate precalc basic digest implementation */
4893 + ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
4894 + if (IS_ERR(ctx->shpre))
4895 + return PTR_ERR(ctx->shpre);
4896 +
4897 + ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
4898 + crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
4899 + if (!ctx->shdesc) {
4900 + crypto_free_shash(ctx->shpre);
4901 + return -ENOMEM;
4902 + }
4903 + ctx->shdesc->tfm = ctx->shpre;
4904 + return 0;
4905 +}
4906 +
4907 +static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
4908 +{
4909 + struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
4910 +
4911 + crypto_free_ahash(ctx->fback);
4912 + crypto_free_shash(ctx->shpre);
4913 + kfree(ctx->shdesc);
4914 + safexcel_ahash_cra_exit(tfm);
4915 +}
4916 +
4917 +static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
4918 + unsigned int keylen)
4919 +{
4920 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4921 + int ret = 0;
4922 +
4923 + if (keylen > crypto_ahash_blocksize(tfm)) {
4924 + /*
4925 + * If the key is larger than the blocksize, then hash it
4926 + * first using our fallback cipher
4927 + */
4928 + ret = crypto_shash_digest(ctx->shdesc, key, keylen,
4929 + (u8 *)ctx->ipad);
4930 + keylen = crypto_shash_digestsize(ctx->shpre);
4931 +
4932 + /*
4933 + * If the digest is larger than half the blocksize, we need to
4934 + * move the rest to opad due to the way our HMAC infra works.
4935 + */
4936 + if (keylen > crypto_ahash_blocksize(tfm) / 2)
4937 + /* Buffers overlap, need to use memmove iso memcpy! */
4938 + memmove(ctx->opad,
4939 + (u8 *)ctx->ipad +
4940 + crypto_ahash_blocksize(tfm) / 2,
4941 + keylen - crypto_ahash_blocksize(tfm) / 2);
4942 + } else {
4943 + /*
4944 + * Copy the key to our ipad & opad buffers
4945 + * Note that ipad and opad each contain one half of the key,
4946 + * to match the existing HMAC driver infrastructure.
4947 + */
4948 + if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
4949 + memcpy(ctx->ipad, key, keylen);
4950 + } else {
4951 + memcpy(ctx->ipad, key,
4952 + crypto_ahash_blocksize(tfm) / 2);
4953 + memcpy(ctx->opad,
4954 + key + crypto_ahash_blocksize(tfm) / 2,
4955 + keylen - crypto_ahash_blocksize(tfm) / 2);
4956 + }
4957 + }
4958 +
4959 + /* Pad key with zeroes */
4960 + if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
4961 + memset((u8 *)ctx->ipad + keylen, 0,
4962 + crypto_ahash_blocksize(tfm) / 2 - keylen);
4963 + memset(ctx->opad, 0, crypto_ahash_blocksize(tfm) / 2);
4964 + } else {
4965 + memset((u8 *)ctx->opad + keylen -
4966 + crypto_ahash_blocksize(tfm) / 2, 0,
4967 + crypto_ahash_blocksize(tfm) - keylen);
4968 + }
4969 +
4970 + /* If doing fallback, still need to set the new key! */
4971 + ctx->fb_do_setkey = true;
4972 + return ret;
4973 +}
4974 +
4975 +static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
4976 +{
4977 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
4978 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
4979 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
4980 +
4981 + memset(req, 0, sizeof(*req));
4982 +
4983 + /* Copy (half of) the key */
4984 + memcpy(req->state, ctx->ipad, SHA3_224_BLOCK_SIZE / 2);
4985 + /* Start of HMAC should have len == processed == blocksize */
4986 + req->len = SHA3_224_BLOCK_SIZE;
4987 + req->processed = SHA3_224_BLOCK_SIZE;
4988 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
4989 + req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
4990 + req->state_sz = SHA3_224_BLOCK_SIZE / 2;
4991 + req->digest_sz = SHA3_224_DIGEST_SIZE;
4992 + req->block_sz = SHA3_224_BLOCK_SIZE;
4993 + req->hmac = true;
4994 + ctx->do_fallback = false;
4995 + ctx->fb_init_done = false;
4996 + return 0;
4997 +}
4998 +
4999 +static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
5000 +{
5001 + if (req->nbytes)
5002 + return safexcel_hmac_sha3_224_init(req) ?:
5003 + safexcel_ahash_finup(req);
5004 +
5005 + /* HW cannot do zero length HMAC, use fallback instead */
5006 + return safexcel_sha3_digest_fallback(req);
5007 +}
5008 +
5009 +static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
5010 +{
5011 + return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
5012 +}
5013 +
5014 +struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
5015 + .type = SAFEXCEL_ALG_TYPE_AHASH,
5016 + .algo_mask = SAFEXCEL_ALG_SHA3,
5017 + .alg.ahash = {
5018 + .init = safexcel_hmac_sha3_224_init,
5019 + .update = safexcel_sha3_update,
5020 + .final = safexcel_sha3_final,
5021 + .finup = safexcel_sha3_finup,
5022 + .digest = safexcel_hmac_sha3_224_digest,
5023 + .setkey = safexcel_hmac_sha3_setkey,
5024 + .export = safexcel_sha3_export,
5025 + .import = safexcel_sha3_import,
5026 + .halg = {
5027 + .digestsize = SHA3_224_DIGEST_SIZE,
5028 + .statesize = sizeof(struct safexcel_ahash_export_state),
5029 + .base = {
5030 + .cra_name = "hmac(sha3-224)",
5031 + .cra_driver_name = "safexcel-hmac-sha3-224",
5032 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
5033 + .cra_flags = CRYPTO_ALG_ASYNC |
5034 + CRYPTO_ALG_KERN_DRIVER_ONLY |
5035 + CRYPTO_ALG_NEED_FALLBACK,
5036 + .cra_blocksize = SHA3_224_BLOCK_SIZE,
5037 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
5038 + .cra_init = safexcel_hmac_sha3_224_cra_init,
5039 + .cra_exit = safexcel_hmac_sha3_cra_exit,
5040 + .cra_module = THIS_MODULE,
5041 + },
5042 + },
5043 + },
5044 +};
5045 +
5046 +static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
5047 +{
5048 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
5049 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
5050 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
5051 +
5052 + memset(req, 0, sizeof(*req));
5053 +
5054 + /* Copy (half of) the key */
5055 + memcpy(req->state, ctx->ipad, SHA3_256_BLOCK_SIZE / 2);
5056 + /* Start of HMAC should have len == processed == blocksize */
5057 + req->len = SHA3_256_BLOCK_SIZE;
5058 + req->processed = SHA3_256_BLOCK_SIZE;
5059 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
5060 + req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
5061 + req->state_sz = SHA3_256_BLOCK_SIZE / 2;
5062 + req->digest_sz = SHA3_256_DIGEST_SIZE;
5063 + req->block_sz = SHA3_256_BLOCK_SIZE;
5064 + req->hmac = true;
5065 + ctx->do_fallback = false;
5066 + ctx->fb_init_done = false;
5067 + return 0;
5068 +}
5069 +
5070 +static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
5071 +{
5072 + if (req->nbytes)
5073 + return safexcel_hmac_sha3_256_init(req) ?:
5074 + safexcel_ahash_finup(req);
5075 +
5076 + /* HW cannot do zero length HMAC, use fallback instead */
5077 + return safexcel_sha3_digest_fallback(req);
5078 +}
5079 +
5080 +static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
5081 +{
5082 + return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
5083 +}
5084 +
5085 +struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
5086 + .type = SAFEXCEL_ALG_TYPE_AHASH,
5087 + .algo_mask = SAFEXCEL_ALG_SHA3,
5088 + .alg.ahash = {
5089 + .init = safexcel_hmac_sha3_256_init,
5090 + .update = safexcel_sha3_update,
5091 + .final = safexcel_sha3_final,
5092 + .finup = safexcel_sha3_finup,
5093 + .digest = safexcel_hmac_sha3_256_digest,
5094 + .setkey = safexcel_hmac_sha3_setkey,
5095 + .export = safexcel_sha3_export,
5096 + .import = safexcel_sha3_import,
5097 + .halg = {
5098 + .digestsize = SHA3_256_DIGEST_SIZE,
5099 + .statesize = sizeof(struct safexcel_ahash_export_state),
5100 + .base = {
5101 + .cra_name = "hmac(sha3-256)",
5102 + .cra_driver_name = "safexcel-hmac-sha3-256",
5103 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
5104 + .cra_flags = CRYPTO_ALG_ASYNC |
5105 + CRYPTO_ALG_KERN_DRIVER_ONLY |
5106 + CRYPTO_ALG_NEED_FALLBACK,
5107 + .cra_blocksize = SHA3_256_BLOCK_SIZE,
5108 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
5109 + .cra_init = safexcel_hmac_sha3_256_cra_init,
5110 + .cra_exit = safexcel_hmac_sha3_cra_exit,
5111 + .cra_module = THIS_MODULE,
5112 + },
5113 + },
5114 + },
5115 +};
5116 +
5117 +static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
5118 +{
5119 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
5120 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
5121 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
5122 +
5123 + memset(req, 0, sizeof(*req));
5124 +
5125 + /* Copy (half of) the key */
5126 + memcpy(req->state, ctx->ipad, SHA3_384_BLOCK_SIZE / 2);
5127 + /* Start of HMAC should have len == processed == blocksize */
5128 + req->len = SHA3_384_BLOCK_SIZE;
5129 + req->processed = SHA3_384_BLOCK_SIZE;
5130 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
5131 + req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
5132 + req->state_sz = SHA3_384_BLOCK_SIZE / 2;
5133 + req->digest_sz = SHA3_384_DIGEST_SIZE;
5134 + req->block_sz = SHA3_384_BLOCK_SIZE;
5135 + req->hmac = true;
5136 + ctx->do_fallback = false;
5137 + ctx->fb_init_done = false;
5138 + return 0;
5139 +}
5140 +
5141 +static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
5142 +{
5143 + if (req->nbytes)
5144 + return safexcel_hmac_sha3_384_init(req) ?:
5145 + safexcel_ahash_finup(req);
5146 +
5147 + /* HW cannot do zero length HMAC, use fallback instead */
5148 + return safexcel_sha3_digest_fallback(req);
5149 +}
5150 +
5151 +static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
5152 +{
5153 + return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
5154 +}
5155 +
5156 +struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
5157 + .type = SAFEXCEL_ALG_TYPE_AHASH,
5158 + .algo_mask = SAFEXCEL_ALG_SHA3,
5159 + .alg.ahash = {
5160 + .init = safexcel_hmac_sha3_384_init,
5161 + .update = safexcel_sha3_update,
5162 + .final = safexcel_sha3_final,
5163 + .finup = safexcel_sha3_finup,
5164 + .digest = safexcel_hmac_sha3_384_digest,
5165 + .setkey = safexcel_hmac_sha3_setkey,
5166 + .export = safexcel_sha3_export,
5167 + .import = safexcel_sha3_import,
5168 + .halg = {
5169 + .digestsize = SHA3_384_DIGEST_SIZE,
5170 + .statesize = sizeof(struct safexcel_ahash_export_state),
5171 + .base = {
5172 + .cra_name = "hmac(sha3-384)",
5173 + .cra_driver_name = "safexcel-hmac-sha3-384",
5174 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
5175 + .cra_flags = CRYPTO_ALG_ASYNC |
5176 + CRYPTO_ALG_KERN_DRIVER_ONLY |
5177 + CRYPTO_ALG_NEED_FALLBACK,
5178 + .cra_blocksize = SHA3_384_BLOCK_SIZE,
5179 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
5180 + .cra_init = safexcel_hmac_sha3_384_cra_init,
5181 + .cra_exit = safexcel_hmac_sha3_cra_exit,
5182 + .cra_module = THIS_MODULE,
5183 + },
5184 + },
5185 + },
5186 +};
5187 +
5188 +static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
5189 +{
5190 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
5191 + struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
5192 + struct safexcel_ahash_req *req = ahash_request_ctx(areq);
5193 +
5194 + memset(req, 0, sizeof(*req));
5195 +
5196 + /* Copy (half of) the key */
5197 + memcpy(req->state, ctx->ipad, SHA3_512_BLOCK_SIZE / 2);
5198 + /* Start of HMAC should have len == processed == blocksize */
5199 + req->len = SHA3_512_BLOCK_SIZE;
5200 + req->processed = SHA3_512_BLOCK_SIZE;
5201 + ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
5202 + req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
5203 + req->state_sz = SHA3_512_BLOCK_SIZE / 2;
5204 + req->digest_sz = SHA3_512_DIGEST_SIZE;
5205 + req->block_sz = SHA3_512_BLOCK_SIZE;
5206 + req->hmac = true;
5207 + ctx->do_fallback = false;
5208 + ctx->fb_init_done = false;
5209 + return 0;
5210 +}
5211 +
5212 +static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
5213 +{
5214 + if (req->nbytes)
5215 + return safexcel_hmac_sha3_512_init(req) ?:
5216 + safexcel_ahash_finup(req);
5217 +
5218 + /* HW cannot do zero length HMAC, use fallback instead */
5219 + return safexcel_sha3_digest_fallback(req);
5220 +}
5221 +
5222 +static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
5223 +{
5224 + return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
5225 +}
5226 +struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
5227 + .type = SAFEXCEL_ALG_TYPE_AHASH,
5228 + .algo_mask = SAFEXCEL_ALG_SHA3,
5229 + .alg.ahash = {
5230 + .init = safexcel_hmac_sha3_512_init,
5231 + .update = safexcel_sha3_update,
5232 + .final = safexcel_sha3_final,
5233 + .finup = safexcel_sha3_finup,
5234 + .digest = safexcel_hmac_sha3_512_digest,
5235 + .setkey = safexcel_hmac_sha3_setkey,
5236 + .export = safexcel_sha3_export,
5237 + .import = safexcel_sha3_import,
5238 + .halg = {
5239 + .digestsize = SHA3_512_DIGEST_SIZE,
5240 + .statesize = sizeof(struct safexcel_ahash_export_state),
5241 + .base = {
5242 + .cra_name = "hmac(sha3-512)",
5243 + .cra_driver_name = "safexcel-hmac-sha3-512",
5244 + .cra_priority = SAFEXCEL_CRA_PRIORITY,
5245 + .cra_flags = CRYPTO_ALG_ASYNC |
5246 + CRYPTO_ALG_KERN_DRIVER_ONLY |
5247 + CRYPTO_ALG_NEED_FALLBACK,
5248 + .cra_blocksize = SHA3_512_BLOCK_SIZE,
5249 + .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
5250 + .cra_init = safexcel_hmac_sha3_512_cra_init,
5251 + .cra_exit = safexcel_hmac_sha3_cra_exit,
5252 + .cra_module = THIS_MODULE,
5253 + },
5254 + },
5255 },
5256 };
5257 --- a/drivers/crypto/inside-secure/safexcel_ring.c
5258 +++ b/drivers/crypto/inside-secure/safexcel_ring.c
5259 @@ -14,7 +14,12 @@
5260 struct safexcel_desc_ring *cdr,
5261 struct safexcel_desc_ring *rdr)
5262 {
5263 - cdr->offset = sizeof(u32) * priv->config.cd_offset;
5264 + int i;
5265 + struct safexcel_command_desc *cdesc;
5266 + dma_addr_t atok;
5267 +
5268 + /* Actual command descriptor ring */
5269 + cdr->offset = priv->config.cd_offset;
5270 cdr->base = dmam_alloc_coherent(priv->dev,
5271 cdr->offset * EIP197_DEFAULT_RING_SIZE,
5272 &cdr->base_dma, GFP_KERNEL);
5273 @@ -24,7 +29,34 @@
5274 cdr->base_end = cdr->base + cdr->offset * (EIP197_DEFAULT_RING_SIZE - 1);
5275 cdr->read = cdr->base;
5276
5277 - rdr->offset = sizeof(u32) * priv->config.rd_offset;
5278 + /* Command descriptor shadow ring for storing additional token data */
5279 + cdr->shoffset = priv->config.cdsh_offset;
5280 + cdr->shbase = dmam_alloc_coherent(priv->dev,
5281 + cdr->shoffset *
5282 + EIP197_DEFAULT_RING_SIZE,
5283 + &cdr->shbase_dma, GFP_KERNEL);
5284 + if (!cdr->shbase)
5285 + return -ENOMEM;
5286 + cdr->shwrite = cdr->shbase;
5287 + cdr->shbase_end = cdr->shbase + cdr->shoffset *
5288 + (EIP197_DEFAULT_RING_SIZE - 1);
5289 +
5290 + /*
5291 + * Populate command descriptors with physical pointers to shadow descs.
5292 + * Note that we only need to do this once if we don't overwrite them.
5293 + */
5294 + cdesc = cdr->base;
5295 + atok = cdr->shbase_dma;
5296 + for (i = 0; i < EIP197_DEFAULT_RING_SIZE; i++) {
5297 + cdesc->atok_lo = lower_32_bits(atok);
5298 + cdesc->atok_hi = upper_32_bits(atok);
5299 + cdesc = (void *)cdesc + cdr->offset;
5300 + atok += cdr->shoffset;
5301 + }
5302 +
5303 + rdr->offset = priv->config.rd_offset;
5304 + /* Use shoffset for result token offset here */
5305 + rdr->shoffset = priv->config.res_offset;
5306 rdr->base = dmam_alloc_coherent(priv->dev,
5307 rdr->offset * EIP197_DEFAULT_RING_SIZE,
5308 &rdr->base_dma, GFP_KERNEL);
5309 @@ -42,11 +74,40 @@
5310 return (atomic_inc_return(&priv->ring_used) % priv->config.rings);
5311 }
5312
5313 -static void *safexcel_ring_next_wptr(struct safexcel_crypto_priv *priv,
5314 - struct safexcel_desc_ring *ring)
5315 +static void *safexcel_ring_next_cwptr(struct safexcel_crypto_priv *priv,
5316 + struct safexcel_desc_ring *ring,
5317 + bool first,
5318 + struct safexcel_token **atoken)
5319 {
5320 void *ptr = ring->write;
5321
5322 + if (first)
5323 + *atoken = ring->shwrite;
5324 +
5325 + if ((ring->write == ring->read - ring->offset) ||
5326 + (ring->read == ring->base && ring->write == ring->base_end))
5327 + return ERR_PTR(-ENOMEM);
5328 +
5329 + if (ring->write == ring->base_end) {
5330 + ring->write = ring->base;
5331 + ring->shwrite = ring->shbase;
5332 + } else {
5333 + ring->write += ring->offset;
5334 + ring->shwrite += ring->shoffset;
5335 + }
5336 +
5337 + return ptr;
5338 +}
5339 +
5340 +static void *safexcel_ring_next_rwptr(struct safexcel_crypto_priv *priv,
5341 + struct safexcel_desc_ring *ring,
5342 + struct result_data_desc **rtoken)
5343 +{
5344 + void *ptr = ring->write;
5345 +
5346 + /* Result token at relative offset shoffset */
5347 + *rtoken = ring->write + ring->shoffset;
5348 +
5349 if ((ring->write == ring->read - ring->offset) ||
5350 (ring->read == ring->base && ring->write == ring->base_end))
5351 return ERR_PTR(-ENOMEM);
5352 @@ -106,10 +167,13 @@
5353 if (ring->write == ring->read)
5354 return;
5355
5356 - if (ring->write == ring->base)
5357 + if (ring->write == ring->base) {
5358 ring->write = ring->base_end;
5359 - else
5360 + ring->shwrite = ring->shbase_end;
5361 + } else {
5362 ring->write -= ring->offset;
5363 + ring->shwrite -= ring->shoffset;
5364 + }
5365 }
5366
5367 struct safexcel_command_desc *safexcel_add_cdesc(struct safexcel_crypto_priv *priv,
5368 @@ -117,26 +181,26 @@
5369 bool first, bool last,
5370 dma_addr_t data, u32 data_len,
5371 u32 full_data_len,
5372 - dma_addr_t context) {
5373 + dma_addr_t context,
5374 + struct safexcel_token **atoken)
5375 +{
5376 struct safexcel_command_desc *cdesc;
5377 - int i;
5378
5379 - cdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].cdr);
5380 + cdesc = safexcel_ring_next_cwptr(priv, &priv->ring[ring_id].cdr,
5381 + first, atoken);
5382 if (IS_ERR(cdesc))
5383 return cdesc;
5384
5385 - memset(cdesc, 0, sizeof(struct safexcel_command_desc));
5386 -
5387 - cdesc->first_seg = first;
5388 - cdesc->last_seg = last;
5389 cdesc->particle_size = data_len;
5390 + cdesc->rsvd0 = 0;
5391 + cdesc->last_seg = last;
5392 + cdesc->first_seg = first;
5393 + cdesc->additional_cdata_size = 0;
5394 + cdesc->rsvd1 = 0;
5395 cdesc->data_lo = lower_32_bits(data);
5396 cdesc->data_hi = upper_32_bits(data);
5397
5398 - if (first && context) {
5399 - struct safexcel_token *token =
5400 - (struct safexcel_token *)cdesc->control_data.token;
5401 -
5402 + if (first) {
5403 /*
5404 * Note that the length here MUST be >0 or else the EIP(1)97
5405 * may hang. Newer EIP197 firmware actually incorporates this
5406 @@ -146,20 +210,12 @@
5407 cdesc->control_data.packet_length = full_data_len ?: 1;
5408 cdesc->control_data.options = EIP197_OPTION_MAGIC_VALUE |
5409 EIP197_OPTION_64BIT_CTX |
5410 - EIP197_OPTION_CTX_CTRL_IN_CMD;
5411 - cdesc->control_data.context_lo =
5412 - (lower_32_bits(context) & GENMASK(31, 2)) >> 2;
5413 + EIP197_OPTION_CTX_CTRL_IN_CMD |
5414 + EIP197_OPTION_RC_AUTO;
5415 + cdesc->control_data.type = EIP197_TYPE_BCLA;
5416 + cdesc->control_data.context_lo = lower_32_bits(context) |
5417 + EIP197_CONTEXT_SMALL;
5418 cdesc->control_data.context_hi = upper_32_bits(context);
5419 -
5420 - if (priv->version == EIP197B_MRVL ||
5421 - priv->version == EIP197D_MRVL)
5422 - cdesc->control_data.options |= EIP197_OPTION_RC_AUTO;
5423 -
5424 - /* TODO: large xform HMAC with SHA-384/512 uses refresh = 3 */
5425 - cdesc->control_data.refresh = 2;
5426 -
5427 - for (i = 0; i < EIP197_MAX_TOKENS; i++)
5428 - eip197_noop_token(&token[i]);
5429 }
5430
5431 return cdesc;
5432 @@ -171,18 +227,27 @@
5433 dma_addr_t data, u32 len)
5434 {
5435 struct safexcel_result_desc *rdesc;
5436 + struct result_data_desc *rtoken;
5437
5438 - rdesc = safexcel_ring_next_wptr(priv, &priv->ring[ring_id].rdr);
5439 + rdesc = safexcel_ring_next_rwptr(priv, &priv->ring[ring_id].rdr,
5440 + &rtoken);
5441 if (IS_ERR(rdesc))
5442 return rdesc;
5443
5444 - memset(rdesc, 0, sizeof(struct safexcel_result_desc));
5445 -
5446 - rdesc->first_seg = first;
5447 - rdesc->last_seg = last;
5448 rdesc->particle_size = len;
5449 + rdesc->rsvd0 = 0;
5450 + rdesc->descriptor_overflow = 0;
5451 + rdesc->buffer_overflow = 0;
5452 + rdesc->last_seg = last;
5453 + rdesc->first_seg = first;
5454 + rdesc->result_size = EIP197_RD64_RESULT_SIZE;
5455 + rdesc->rsvd1 = 0;
5456 rdesc->data_lo = lower_32_bits(data);
5457 rdesc->data_hi = upper_32_bits(data);
5458
5459 + /* Clear length & error code in result token */
5460 + rtoken->packet_length = 0;
5461 + rtoken->error_code = 0;
5462 +
5463 return rdesc;
5464 }