ltq-deu: add aes_ofb and aes_cfb algorithms
[openwrt/staging/dedeckeh.git] / package / kernel / lantiq / ltq-deu / src / ifxmips_aes.c
1 /******************************************************************************
2 **
3 ** FILE NAME : ifxmips_aes.c
4 ** PROJECT : IFX UEIP
5 ** MODULES : DEU Module
6 **
7 ** DATE : September 8, 2009
8 ** AUTHOR : Mohammad Firdaus
9 ** DESCRIPTION : Data Encryption Unit Driver for AES Algorithm
10 ** COPYRIGHT : Copyright (c) 2009
11 ** Infineon Technologies AG
12 ** Am Campeon 1-12, 85579 Neubiberg, Germany
13 **
14 ** This program is free software; you can redistribute it and/or modify
15 ** it under the terms of the GNU General Public License as published by
16 ** the Free Software Foundation; either version 2 of the License, or
17 ** (at your option) any later version.
18 **
19 ** HISTORY
20 ** $Date $Author $Comment
21 ** 08,Sept 2009 Mohammad Firdaus Initial UEIP release
22 *******************************************************************************/
23 /*!
24 \defgroup IFX_DEU IFX_DEU_DRIVERS
25 \ingroup API
26 \brief ifx DEU driver module
27 */
28
29 /*!
30 \file ifxmips_aes.c
31 \ingroup IFX_DEU
32 \brief AES Encryption Driver main file
33 */
34
35 /*!
36 \defgroup IFX_AES_FUNCTIONS IFX_AES_FUNCTIONS
37 \ingroup IFX_DEU
38 \brief IFX AES driver Functions
39 */
40
41
42 /* Project Header Files */
43 #if defined(CONFIG_MODVERSIONS)
44 #define MODVERSIONS
45 #include <linux/modeversions>
46 #endif
47
48 #include <linux/version.h>
49 #include <linux/module.h>
50 #include <linux/init.h>
51 #include <linux/proc_fs.h>
52 #include <linux/fs.h>
53 #include <linux/types.h>
54 #include <linux/errno.h>
55 #include <linux/crypto.h>
56 #include <linux/interrupt.h>
57 #include <linux/delay.h>
58 #include <asm/byteorder.h>
59 #include <crypto/algapi.h>
60 #include <crypto/internal/skcipher.h>
61
62 #include "ifxmips_deu.h"
63
64 #if defined(CONFIG_DANUBE)
65 #include "ifxmips_deu_danube.h"
66 extern int ifx_danube_pre_1_4;
67 #elif defined(CONFIG_AR9)
68 #include "ifxmips_deu_ar9.h"
69 #elif defined(CONFIG_VR9) || defined(CONFIG_AR10)
70 #include "ifxmips_deu_vr9.h"
71 #else
72 #error "Unkown platform"
73 #endif
74
75 /* DMA related header and variables */
76
77 spinlock_t aes_lock;
78 #define CRTCL_SECT_INIT spin_lock_init(&aes_lock)
79 #define CRTCL_SECT_START spin_lock_irqsave(&aes_lock, flag)
80 #define CRTCL_SECT_END spin_unlock_irqrestore(&aes_lock, flag)
81
82 /* Definition of constants */
83 #define AES_START IFX_AES_CON
84 #define AES_MIN_KEY_SIZE 16
85 #define AES_MAX_KEY_SIZE 32
86 #define AES_BLOCK_SIZE 16
87 #define CTR_RFC3686_NONCE_SIZE 4
88 #define CTR_RFC3686_IV_SIZE 8
89 #define CTR_RFC3686_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE)
90
91 #ifdef CRYPTO_DEBUG
92 extern char debug_level;
93 #define DPRINTF(level, format, args...) if (level < debug_level) printk(KERN_INFO "[%s %s %d]: " format, __FILE__, __func__, __LINE__, ##args);
94 #else
95 #define DPRINTF(level, format, args...)
96 #endif /* CRYPTO_DEBUG */
97
98 /* Function decleration */
99 int aes_chip_init(void);
100 u32 endian_swap(u32 input);
101 u32 input_swap(u32 input);
102 u32* memory_alignment(const u8 *arg, u32 *buff_alloc, int in_out, int nbytes);
103 void aes_dma_memory_copy(u32 *outcopy, u32 *out_dma, u8 *out_arg, int nbytes);
104 void des_dma_memory_copy(u32 *outcopy, u32 *out_dma, u8 *out_arg, int nbytes);
105 int aes_memory_allocate(int value);
106 int des_memory_allocate(int value);
107 void memory_release(u32 *addr);
108
109
110 extern void ifx_deu_aes (void *ctx_arg, uint8_t *out_arg, const uint8_t *in_arg,
111 uint8_t *iv_arg, size_t nbytes, int encdec, int mode);
112 /* End of function decleration */
113
114 struct aes_ctx {
115 int key_length;
116 u32 buf[AES_MAX_KEY_SIZE];
117 u8 nonce[CTR_RFC3686_NONCE_SIZE];
118 };
119
120 extern int disable_deudma;
121 extern int disable_multiblock;
122
123 /*! \fn int aes_set_key (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
124 * \ingroup IFX_AES_FUNCTIONS
125 * \brief sets the AES keys
126 * \param tfm linux crypto algo transform
127 * \param in_key input key
128 * \param key_len key lengths of 16, 24 and 32 bytes supported
129 * \return -EINVAL - bad key length, 0 - SUCCESS
130 */
131 int aes_set_key (struct crypto_tfm *tfm, const u8 *in_key, unsigned int key_len)
132 {
133 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
134
135 //printk("set_key in %s\n", __FILE__);
136
137 //aes_chip_init();
138
139 if (key_len != 16 && key_len != 24 && key_len != 32) {
140 return -EINVAL;
141 }
142
143 ctx->key_length = key_len;
144 DPRINTF(0, "ctx @%p, key_len %d, ctx->key_length %d\n", ctx, key_len, ctx->key_length);
145 memcpy ((u8 *) (ctx->buf), in_key, key_len);
146
147 return 0;
148 }
149
150
151 /*! \fn int aes_set_key_skcipher (struct crypto_skcipher *tfm, const uint8_t *in_key, unsigned int key_len)
152 * \ingroup IFX_AES_FUNCTIONS
153 * \brief sets the AES keys for skcipher
154 * \param tfm linux crypto skcipher
155 * \param in_key input key
156 * \param key_len key lengths of 16, 24 and 32 bytes supported
157 * \return -EINVAL - bad key length, 0 - SUCCESS
158 */
159 int aes_set_key_skcipher (struct crypto_skcipher *tfm, const u8 *in_key, unsigned int key_len)
160 {
161 return aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
162 }
163
164
165 /*! \fn void ifx_deu_aes (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg, size_t nbytes, int encdec, int mode)
166 * \ingroup IFX_AES_FUNCTIONS
167 * \brief main interface to AES hardware
168 * \param ctx_arg crypto algo context
169 * \param out_arg output bytestream
170 * \param in_arg input bytestream
171 * \param iv_arg initialization vector
172 * \param nbytes length of bytestream
173 * \param encdec 1 for encrypt; 0 for decrypt
174 * \param mode operation mode such as ebc, cbc, ctr
175 *
176 */
177 void ifx_deu_aes (void *ctx_arg, u8 *out_arg, const u8 *in_arg,
178 u8 *iv_arg, size_t nbytes, int encdec, int mode)
179
180 {
181 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
182 volatile struct aes_t *aes = (volatile struct aes_t *) AES_START;
183 struct aes_ctx *ctx = (struct aes_ctx *)ctx_arg;
184 u32 *in_key = ctx->buf;
185 unsigned long flag;
186 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
187 int key_len = ctx->key_length;
188
189 int i = 0;
190 int byte_cnt = nbytes;
191
192
193 CRTCL_SECT_START;
194 /* 128, 192 or 256 bit key length */
195 aes->controlr.K = key_len / 8 - 2;
196 if (key_len == 128 / 8) {
197 aes->K3R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 0));
198 aes->K2R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 1));
199 aes->K1R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 2));
200 aes->K0R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 3));
201 }
202 else if (key_len == 192 / 8) {
203 aes->K5R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 0));
204 aes->K4R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 1));
205 aes->K3R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 2));
206 aes->K2R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 3));
207 aes->K1R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 4));
208 aes->K0R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 5));
209 }
210 else if (key_len == 256 / 8) {
211 aes->K7R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 0));
212 aes->K6R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 1));
213 aes->K5R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 2));
214 aes->K4R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 3));
215 aes->K3R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 4));
216 aes->K2R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 5));
217 aes->K1R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 6));
218 aes->K0R = DEU_ENDIAN_SWAP(*((u32 *) in_key + 7));
219 }
220 else {
221 printk (KERN_ERR "[%s %s %d]: Invalid key_len : %d\n", __FILE__, __func__, __LINE__, key_len);
222 CRTCL_SECT_END;
223 return;// -EINVAL;
224 }
225
226 /* let HW pre-process DEcryption key in any case (even if
227 ENcryption is used). Key Valid (KV) bit is then only
228 checked in decryption routine! */
229 aes->controlr.PNK = 1;
230
231
232 aes->controlr.E_D = !encdec; //encryption
233 aes->controlr.O = mode; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
234
235 //aes->controlr.F = 128; //default; only for CFB and OFB modes; change only for customer-specific apps
236 if (mode > 0) {
237 aes->IV3R = DEU_ENDIAN_SWAP(*(u32 *) iv_arg);
238 aes->IV2R = DEU_ENDIAN_SWAP(*((u32 *) iv_arg + 1));
239 aes->IV1R = DEU_ENDIAN_SWAP(*((u32 *) iv_arg + 2));
240 aes->IV0R = DEU_ENDIAN_SWAP(*((u32 *) iv_arg + 3));
241 };
242
243
244 i = 0;
245 while (byte_cnt >= 16) {
246
247 aes->ID3R = INPUT_ENDIAN_SWAP(*((u32 *) in_arg + (i * 4) + 0));
248 aes->ID2R = INPUT_ENDIAN_SWAP(*((u32 *) in_arg + (i * 4) + 1));
249 aes->ID1R = INPUT_ENDIAN_SWAP(*((u32 *) in_arg + (i * 4) + 2));
250 aes->ID0R = INPUT_ENDIAN_SWAP(*((u32 *) in_arg + (i * 4) + 3)); /* start crypto */
251
252 while (aes->controlr.BUS) {
253 // this will not take long
254 }
255
256 *((volatile u32 *) out_arg + (i * 4) + 0) = aes->OD3R;
257 *((volatile u32 *) out_arg + (i * 4) + 1) = aes->OD2R;
258 *((volatile u32 *) out_arg + (i * 4) + 2) = aes->OD1R;
259 *((volatile u32 *) out_arg + (i * 4) + 3) = aes->OD0R;
260
261 i++;
262 byte_cnt -= 16;
263 }
264
265 /* To handle all non-aligned bytes (not aligned to 16B size) */
266 if (byte_cnt) {
267 u8 temparea[16] = {0,};
268
269 memcpy(temparea, ((u32 *) in_arg + (i * 4)), byte_cnt);
270
271 aes->ID3R = INPUT_ENDIAN_SWAP(*((u32 *) temparea + 0));
272 aes->ID2R = INPUT_ENDIAN_SWAP(*((u32 *) temparea + 1));
273 aes->ID1R = INPUT_ENDIAN_SWAP(*((u32 *) temparea + 2));
274 aes->ID0R = INPUT_ENDIAN_SWAP(*((u32 *) temparea + 3)); /* start crypto */
275
276 while (aes->controlr.BUS) {
277 }
278
279 *((volatile u32 *) temparea + 0) = aes->OD3R;
280 *((volatile u32 *) temparea + 1) = aes->OD2R;
281 *((volatile u32 *) temparea + 2) = aes->OD1R;
282 *((volatile u32 *) temparea + 3) = aes->OD0R;
283
284 memcpy(((u32 *) out_arg + (i * 4)), temparea, byte_cnt);
285 }
286
287 //tc.chen : copy iv_arg back
288 if (mode > 0) {
289 *((u32 *) iv_arg) = DEU_ENDIAN_SWAP(aes->IV3R);
290 *((u32 *) iv_arg + 1) = DEU_ENDIAN_SWAP(aes->IV2R);
291 *((u32 *) iv_arg + 2) = DEU_ENDIAN_SWAP(aes->IV1R);
292 *((u32 *) iv_arg + 3) = DEU_ENDIAN_SWAP(aes->IV0R);
293 }
294
295 CRTCL_SECT_END;
296 }
297
298 /*!
299 * \fn int ctr_rfc3686_aes_set_key (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
300 * \ingroup IFX_AES_FUNCTIONS
301 * \brief sets RFC3686 key
302 * \param tfm linux crypto algo transform
303 * \param in_key input key
304 * \param key_len key lengths of 20, 28 and 36 bytes supported; last 4 bytes is nonce
305 * \return 0 - SUCCESS
306 * -EINVAL - bad key length
307 */
308 int ctr_rfc3686_aes_set_key (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
309 {
310 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
311
312 //printk("ctr_rfc3686_aes_set_key in %s\n", __FILE__);
313
314 memcpy(ctx->nonce, in_key + (key_len - CTR_RFC3686_NONCE_SIZE),
315 CTR_RFC3686_NONCE_SIZE);
316
317 key_len -= CTR_RFC3686_NONCE_SIZE; // remove 4 bytes of nonce
318
319 if (key_len != 16 && key_len != 24 && key_len != 32) {
320 return -EINVAL;
321 }
322
323 ctx->key_length = key_len;
324
325 memcpy ((u8 *) (ctx->buf), in_key, key_len);
326
327 return 0;
328 }
329
330 /*!
331 * \fn int ctr_rfc3686_aes_set_key_skcipher (struct crypto_skcipher *tfm, const uint8_t *in_key, unsigned int key_len)
332 * \ingroup IFX_AES_FUNCTIONS
333 * \brief sets RFC3686 key for skcipher
334 * \param tfm linux crypto skcipher
335 * \param in_key input key
336 * \param key_len key lengths of 20, 28 and 36 bytes supported; last 4 bytes is nonce
337 * \return 0 - SUCCESS
338 * -EINVAL - bad key length
339 */
340 int ctr_rfc3686_aes_set_key_skcipher (struct crypto_skcipher *tfm, const uint8_t *in_key, unsigned int key_len)
341 {
342 return ctr_rfc3686_aes_set_key(crypto_skcipher_tfm(tfm), in_key, key_len);
343 }
344
345
346 /*! \fn void ifx_deu_aes (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg, u32 nbytes, int encdec, int mode)
347 * \ingroup IFX_AES_FUNCTIONS
348 * \brief main interface with deu hardware in DMA mode
349 * \param ctx_arg crypto algo context
350 * \param out_arg output bytestream
351 * \param in_arg input bytestream
352 * \param iv_arg initialization vector
353 * \param nbytes length of bytestream
354 * \param encdec 1 for encrypt; 0 for decrypt
355 * \param mode operation mode such as ebc, cbc, ctr
356 */
357
358
359 //definitions from linux/include/crypto.h:
360 //#define CRYPTO_TFM_MODE_ECB 0x00000001
361 //#define CRYPTO_TFM_MODE_CBC 0x00000002
362 //#define CRYPTO_TFM_MODE_CFB 0x00000004
363 //#define CRYPTO_TFM_MODE_CTR 0x00000008
364 //#define CRYPTO_TFM_MODE_OFB 0x00000010 // not even defined
365 //but hardware definition: 0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
366
367 /*! \fn void ifx_deu_aes_ecb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
368 * \ingroup IFX_AES_FUNCTIONS
369 * \brief sets AES hardware to ECB mode
370 * \param ctx crypto algo context
371 * \param dst output bytestream
372 * \param src input bytestream
373 * \param iv initialization vector
374 * \param nbytes length of bytestream
375 * \param encdec 1 for encrypt; 0 for decrypt
376 * \param inplace not used
377 */
378 void ifx_deu_aes_ecb (void *ctx, uint8_t *dst, const uint8_t *src,
379 uint8_t *iv, size_t nbytes, int encdec, int inplace)
380 {
381 ifx_deu_aes (ctx, dst, src, NULL, nbytes, encdec, 0);
382 }
383
384 /*! \fn void ifx_deu_aes_cbc (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
385 * \ingroup IFX_AES_FUNCTIONS
386 * \brief sets AES hardware to CBC mode
387 * \param ctx crypto algo context
388 * \param dst output bytestream
389 * \param src input bytestream
390 * \param iv initialization vector
391 * \param nbytes length of bytestream
392 * \param encdec 1 for encrypt; 0 for decrypt
393 * \param inplace not used
394 */
395 void ifx_deu_aes_cbc (void *ctx, uint8_t *dst, const uint8_t *src,
396 uint8_t *iv, size_t nbytes, int encdec, int inplace)
397 {
398 ifx_deu_aes (ctx, dst, src, iv, nbytes, encdec, 1);
399 }
400
401 /*! \fn void ifx_deu_aes_ofb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
402 * \ingroup IFX_AES_FUNCTIONS
403 * \brief sets AES hardware to OFB mode
404 * \param ctx crypto algo context
405 * \param dst output bytestream
406 * \param src input bytestream
407 * \param iv initialization vector
408 * \param nbytes length of bytestream
409 * \param encdec 1 for encrypt; 0 for decrypt
410 * \param inplace not used
411 */
412 void ifx_deu_aes_ofb (void *ctx, uint8_t *dst, const uint8_t *src,
413 uint8_t *iv, size_t nbytes, int encdec, int inplace)
414 {
415 ifx_deu_aes (ctx, dst, src, iv, nbytes, encdec, 2);
416 }
417
418 /*! \fn void ifx_deu_aes_cfb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
419 * \ingroup IFX_AES_FUNCTIONS
420 * \brief sets AES hardware to CFB mode
421 * \param ctx crypto algo context
422 * \param dst output bytestream
423 * \param src input bytestream
424 * \param iv initialization vector
425 * \param nbytes length of bytestream
426 * \param encdec 1 for encrypt; 0 for decrypt
427 * \param inplace not used
428 */
429 void ifx_deu_aes_cfb (void *ctx, uint8_t *dst, const uint8_t *src,
430 uint8_t *iv, size_t nbytes, int encdec, int inplace)
431 {
432 ifx_deu_aes (ctx, dst, src, iv, nbytes, encdec, 3);
433 }
434
435 /*! \fn void ifx_deu_aes_ctr (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
436 * \ingroup IFX_AES_FUNCTIONS
437 * \brief sets AES hardware to CTR mode
438 * \param ctx crypto algo context
439 * \param dst output bytestream
440 * \param src input bytestream
441 * \param iv initialization vector
442 * \param nbytes length of bytestream
443 * \param encdec 1 for encrypt; 0 for decrypt
444 * \param inplace not used
445 */
446 void ifx_deu_aes_ctr (void *ctx, uint8_t *dst, const uint8_t *src,
447 uint8_t *iv, size_t nbytes, int encdec, int inplace)
448 {
449 ifx_deu_aes (ctx, dst, src, iv, nbytes, encdec, 4);
450 }
451
452 /*! \fn void aes_encrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
453 * \ingroup IFX_AES_FUNCTIONS
454 * \brief encrypt AES_BLOCK_SIZE of data
455 * \param tfm linux crypto algo transform
456 * \param out output bytestream
457 * \param in input bytestream
458 */
459 void aes_encrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
460 {
461 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
462 ifx_deu_aes (ctx, out, in, NULL, AES_BLOCK_SIZE,
463 CRYPTO_DIR_ENCRYPT, 0);
464 }
465
466 /*! \fn void aes_decrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
467 * \ingroup IFX_AES_FUNCTIONS
468 * \brief decrypt AES_BLOCK_SIZE of data
469 * \param tfm linux crypto algo transform
470 * \param out output bytestream
471 * \param in input bytestream
472 */
473 void aes_decrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
474 {
475 struct aes_ctx *ctx = crypto_tfm_ctx(tfm);
476 ifx_deu_aes (ctx, out, in, NULL, AES_BLOCK_SIZE,
477 CRYPTO_DIR_DECRYPT, 0);
478 }
479
480 /*
481 * \brief AES function mappings
482 */
483 struct crypto_alg ifxdeu_aes_alg = {
484 .cra_name = "aes",
485 .cra_driver_name = "ifxdeu-aes",
486 .cra_priority = 300,
487 .cra_flags = CRYPTO_ALG_TYPE_CIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
488 .cra_blocksize = AES_BLOCK_SIZE,
489 .cra_ctxsize = sizeof(struct aes_ctx),
490 .cra_module = THIS_MODULE,
491 .cra_list = LIST_HEAD_INIT(ifxdeu_aes_alg.cra_list),
492 .cra_u = {
493 .cipher = {
494 .cia_min_keysize = AES_MIN_KEY_SIZE,
495 .cia_max_keysize = AES_MAX_KEY_SIZE,
496 .cia_setkey = aes_set_key,
497 .cia_encrypt = aes_encrypt,
498 .cia_decrypt = aes_decrypt,
499 }
500 }
501 };
502
503 /*! \fn int ecb_aes_encrypt(struct skcipher_req *req)
504 * \ingroup IFX_AES_FUNCTIONS
505 * \brief ECB AES encrypt using linux crypto skcipher
506 * \param req skcipher request
507 * \return err
508 */
509 int ecb_aes_encrypt(struct skcipher_request *req)
510 {
511 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
512 struct skcipher_walk walk;
513 int err;
514 unsigned int enc_bytes, nbytes;
515
516 err = skcipher_walk_virt(&walk, req, false);
517
518 while ((nbytes = enc_bytes = walk.nbytes)) {
519 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
520 ifx_deu_aes_ecb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
521 NULL, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
522 nbytes &= AES_BLOCK_SIZE - 1;
523 err = skcipher_walk_done(&walk, nbytes);
524 }
525
526 return err;
527 }
528
529 /*! \fn int ecb_aes_decrypt(struct skcipher_req *req)
530 * \ingroup IFX_AES_FUNCTIONS
531 * \brief ECB AES decrypt using linux crypto skcipher
532 * \param req skcipher request
533 * \return err
534 */
535 int ecb_aes_decrypt(struct skcipher_request *req)
536 {
537 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
538 struct skcipher_walk walk;
539 int err;
540 unsigned int dec_bytes, nbytes;
541
542 err = skcipher_walk_virt(&walk, req, false);
543
544 while ((nbytes = dec_bytes = walk.nbytes)) {
545 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
546 ifx_deu_aes_ecb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
547 NULL, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
548 nbytes &= AES_BLOCK_SIZE - 1;
549 err = skcipher_walk_done(&walk, nbytes);
550 }
551
552 return err;
553 }
554
555 /*
556 * \brief AES function mappings
557 */
558 struct skcipher_alg ifxdeu_ecb_aes_alg = {
559 .base.cra_name = "ecb(aes)",
560 .base.cra_driver_name = "ifxdeu-ecb(aes)",
561 .base.cra_priority = 400,
562 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
563 .base.cra_blocksize = AES_BLOCK_SIZE,
564 .base.cra_ctxsize = sizeof(struct aes_ctx),
565 .base.cra_module = THIS_MODULE,
566 .base.cra_list = LIST_HEAD_INIT(ifxdeu_ecb_aes_alg.base.cra_list),
567 .min_keysize = AES_MIN_KEY_SIZE,
568 .max_keysize = AES_MAX_KEY_SIZE,
569 .setkey = aes_set_key_skcipher,
570 .encrypt = ecb_aes_encrypt,
571 .decrypt = ecb_aes_decrypt,
572 };
573
574
575 /*! \fn int ecb_aes_encrypt(struct skcipher_req *req)
576 * \ingroup IFX_AES_FUNCTIONS
577 * \brief CBC AES encrypt using linux crypto skcipher
578 * \param req skcipher request
579 * \return err
580 */
581 int cbc_aes_encrypt(struct skcipher_request *req)
582 {
583 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
584 struct skcipher_walk walk;
585 int err;
586 unsigned int enc_bytes, nbytes;
587
588 err = skcipher_walk_virt(&walk, req, false);
589
590 while ((nbytes = enc_bytes = walk.nbytes)) {
591 u8 *iv = walk.iv;
592 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
593 ifx_deu_aes_cbc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
594 iv, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
595 nbytes &= AES_BLOCK_SIZE - 1;
596 err = skcipher_walk_done(&walk, nbytes);
597 }
598
599 return err;
600 }
601
602 /*! \fn int cbc_aes_decrypt(struct skcipher_req *req)
603 * \ingroup IFX_AES_FUNCTIONS
604 * \brief CBC AES decrypt using linux crypto skcipher
605 * \param req skcipher request
606 * \return err
607 */
608 int cbc_aes_decrypt(struct skcipher_request *req)
609 {
610 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
611 struct skcipher_walk walk;
612 int err;
613 unsigned int dec_bytes, nbytes;
614
615 err = skcipher_walk_virt(&walk, req, false);
616
617 while ((nbytes = dec_bytes = walk.nbytes)) {
618 u8 *iv = walk.iv;
619 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
620 ifx_deu_aes_cbc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
621 iv, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
622 nbytes &= AES_BLOCK_SIZE - 1;
623 err = skcipher_walk_done(&walk, nbytes);
624 }
625
626 return err;
627 }
628
629 /*
630 * \brief AES function mappings
631 */
632 struct skcipher_alg ifxdeu_cbc_aes_alg = {
633 .base.cra_name = "cbc(aes)",
634 .base.cra_driver_name = "ifxdeu-cbc(aes)",
635 .base.cra_priority = 400,
636 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
637 .base.cra_blocksize = AES_BLOCK_SIZE,
638 .base.cra_ctxsize = sizeof(struct aes_ctx),
639 .base.cra_module = THIS_MODULE,
640 .base.cra_list = LIST_HEAD_INIT(ifxdeu_cbc_aes_alg.base.cra_list),
641 .min_keysize = AES_MIN_KEY_SIZE,
642 .max_keysize = AES_MAX_KEY_SIZE,
643 .ivsize = AES_BLOCK_SIZE,
644 .setkey = aes_set_key_skcipher,
645 .encrypt = cbc_aes_encrypt,
646 .decrypt = cbc_aes_decrypt,
647 };
648
649
650 /*! \fn int ofb_aes_encrypt(struct skcipher_req *req)
651 * \ingroup IFX_AES_FUNCTIONS
652 * \brief OFB AES encrypt using linux crypto skcipher
653 * \param req skcipher request
654 * \return err
655 */
656 int ofb_aes_encrypt(struct skcipher_request *req)
657 {
658 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
659 struct skcipher_walk walk;
660 int err;
661 unsigned int enc_bytes, nbytes;
662
663 err = skcipher_walk_virt(&walk, req, false);
664
665 while ((nbytes = enc_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
666 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
667 ifx_deu_aes_ofb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
668 walk.iv, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
669 nbytes &= AES_BLOCK_SIZE - 1;
670 err = skcipher_walk_done(&walk, nbytes);
671 }
672
673 /* to handle remaining bytes < AES_BLOCK_SIZE */
674 if (walk.nbytes) {
675 ifx_deu_aes_ofb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
676 walk.iv, walk.nbytes, CRYPTO_DIR_ENCRYPT, 0);
677 err = skcipher_walk_done(&walk, 0);
678 }
679
680 return err;
681 }
682
683 /*! \fn int ofb_aes_decrypt(struct skcipher_req *req)
684 * \ingroup IFX_AES_FUNCTIONS
685 * \brief OFB AES decrypt using linux crypto skcipher
686 * \param req skcipher request
687 * \return err
688 */
689 int ofb_aes_decrypt(struct skcipher_request *req)
690 {
691 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
692 struct skcipher_walk walk;
693 int err;
694 unsigned int dec_bytes, nbytes;
695
696 err = skcipher_walk_virt(&walk, req, false);
697
698 while ((nbytes = dec_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
699 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
700 ifx_deu_aes_ofb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
701 walk.iv, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
702 nbytes &= AES_BLOCK_SIZE - 1;
703 err = skcipher_walk_done(&walk, nbytes);
704 }
705
706 /* to handle remaining bytes < AES_BLOCK_SIZE */
707 if (walk.nbytes) {
708 ifx_deu_aes_ofb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
709 walk.iv, walk.nbytes, CRYPTO_DIR_DECRYPT, 0);
710 err = skcipher_walk_done(&walk, 0);
711 }
712
713 return err;
714 }
715
716 /*
717 * \brief AES function mappings
718 */
719 struct skcipher_alg ifxdeu_ofb_aes_alg = {
720 .base.cra_name = "ofb(aes)",
721 .base.cra_driver_name = "ifxdeu-ofb(aes)",
722 .base.cra_priority = 400,
723 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
724 .base.cra_blocksize = 1,
725 .base.cra_ctxsize = sizeof(struct aes_ctx),
726 .base.cra_module = THIS_MODULE,
727 .base.cra_list = LIST_HEAD_INIT(ifxdeu_ofb_aes_alg.base.cra_list),
728 .min_keysize = AES_MIN_KEY_SIZE,
729 .max_keysize = AES_MAX_KEY_SIZE,
730 .ivsize = AES_BLOCK_SIZE,
731 .chunksize = AES_BLOCK_SIZE,
732 .walksize = AES_BLOCK_SIZE,
733 .setkey = aes_set_key_skcipher,
734 .encrypt = ofb_aes_encrypt,
735 .decrypt = ofb_aes_decrypt,
736 };
737
738
739 /*! \fn int cfb_aes_encrypt(struct skcipher_req *req)
740 * \ingroup IFX_AES_FUNCTIONS
741 * \brief CFB AES encrypt using linux crypto skcipher
742 * \param req skcipher request
743 * \return err
744 */
745 int cfb_aes_encrypt(struct skcipher_request *req)
746 {
747 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
748 struct skcipher_walk walk;
749 int err;
750 unsigned int enc_bytes, nbytes;
751
752 err = skcipher_walk_virt(&walk, req, false);
753
754 while ((nbytes = enc_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
755 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
756 ifx_deu_aes_cfb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
757 walk.iv, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
758 nbytes &= AES_BLOCK_SIZE - 1;
759 err = skcipher_walk_done(&walk, nbytes);
760 }
761
762 /* to handle remaining bytes < AES_BLOCK_SIZE */
763 if (walk.nbytes) {
764 ifx_deu_aes_cfb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
765 walk.iv, walk.nbytes, CRYPTO_DIR_ENCRYPT, 0);
766 err = skcipher_walk_done(&walk, 0);
767 }
768
769 return err;
770 }
771
772 /*! \fn int cfb_aes_decrypt(struct skcipher_req *req)
773 * \ingroup IFX_AES_FUNCTIONS
774 * \brief CFB AES decrypt using linux crypto skcipher
775 * \param req skcipher request
776 * \return err
777 */
778 int cfb_aes_decrypt(struct skcipher_request *req)
779 {
780 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
781 struct skcipher_walk walk;
782 int err;
783 unsigned int dec_bytes, nbytes;
784
785 err = skcipher_walk_virt(&walk, req, false);
786
787 while ((nbytes = dec_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
788 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
789 ifx_deu_aes_cfb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
790 walk.iv, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
791 nbytes &= AES_BLOCK_SIZE - 1;
792 err = skcipher_walk_done(&walk, nbytes);
793 }
794
795 /* to handle remaining bytes < AES_BLOCK_SIZE */
796 if (walk.nbytes) {
797 ifx_deu_aes_cfb(ctx, walk.dst.virt.addr, walk.src.virt.addr,
798 walk.iv, walk.nbytes, CRYPTO_DIR_DECRYPT, 0);
799 err = skcipher_walk_done(&walk, 0);
800 }
801
802 return err;
803 }
804
805 /*
806 * \brief AES function mappings
807 */
808 struct skcipher_alg ifxdeu_cfb_aes_alg = {
809 .base.cra_name = "cfb(aes)",
810 .base.cra_driver_name = "ifxdeu-cfb(aes)",
811 .base.cra_priority = 400,
812 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
813 .base.cra_blocksize = 1,
814 .base.cra_ctxsize = sizeof(struct aes_ctx),
815 .base.cra_module = THIS_MODULE,
816 .base.cra_list = LIST_HEAD_INIT(ifxdeu_cfb_aes_alg.base.cra_list),
817 .min_keysize = AES_MIN_KEY_SIZE,
818 .max_keysize = AES_MAX_KEY_SIZE,
819 .ivsize = AES_BLOCK_SIZE,
820 .chunksize = AES_BLOCK_SIZE,
821 .walksize = AES_BLOCK_SIZE,
822 .setkey = aes_set_key_skcipher,
823 .encrypt = cfb_aes_encrypt,
824 .decrypt = cfb_aes_decrypt,
825 };
826
827
828 /*! \fn int ctr_basic_aes_encrypt(struct skcipher_req *req)
829 * \ingroup IFX_AES_FUNCTIONS
830 * \brief Counter mode AES encrypt using linux crypto skcipher
831 * \param req skcipher request
832 * \return err
833 */
834 int ctr_basic_aes_encrypt(struct skcipher_request *req)
835 {
836 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
837 struct skcipher_walk walk;
838 int err;
839 unsigned int enc_bytes, nbytes;
840
841 err = skcipher_walk_virt(&walk, req, false);
842
843 while ((nbytes = enc_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
844 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
845 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
846 walk.iv, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
847 nbytes &= AES_BLOCK_SIZE - 1;
848 err = skcipher_walk_done(&walk, nbytes);
849 }
850
851 /* to handle remaining bytes < AES_BLOCK_SIZE */
852 if (walk.nbytes) {
853 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
854 walk.iv, walk.nbytes, CRYPTO_DIR_ENCRYPT, 0);
855 err = skcipher_walk_done(&walk, 0);
856 }
857
858 return err;
859 }
860
861 /*! \fn int ctr_basic_aes_encrypt(struct skcipher_req *req)
862 * \ingroup IFX_AES_FUNCTIONS
863 * \brief Counter mode AES decrypt using linux crypto skcipher
864 * \param req skcipher request
865 * \return err
866 */
867 int ctr_basic_aes_decrypt(struct skcipher_request *req)
868 {
869 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
870 struct skcipher_walk walk;
871 int err;
872 unsigned int dec_bytes, nbytes;
873
874 err = skcipher_walk_virt(&walk, req, false);
875
876 while ((nbytes = dec_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
877 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
878 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
879 walk.iv, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
880 nbytes &= AES_BLOCK_SIZE - 1;
881 err = skcipher_walk_done(&walk, nbytes);
882 }
883
884 /* to handle remaining bytes < AES_BLOCK_SIZE */
885 if (walk.nbytes) {
886 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
887 walk.iv, walk.nbytes, CRYPTO_DIR_DECRYPT, 0);
888 err = skcipher_walk_done(&walk, 0);
889 }
890
891 return err;
892 }
893
894 /*
895 * \brief AES function mappings
896 */
897 struct skcipher_alg ifxdeu_ctr_basic_aes_alg = {
898 .base.cra_name = "ctr(aes)",
899 .base.cra_driver_name = "ifxdeu-ctr(aes)",
900 .base.cra_priority = 400,
901 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
902 .base.cra_blocksize = 1,
903 .base.cra_ctxsize = sizeof(struct aes_ctx),
904 .base.cra_module = THIS_MODULE,
905 .base.cra_list = LIST_HEAD_INIT(ifxdeu_ctr_basic_aes_alg.base.cra_list),
906 .min_keysize = AES_MIN_KEY_SIZE,
907 .max_keysize = AES_MAX_KEY_SIZE,
908 .ivsize = AES_BLOCK_SIZE,
909 .walksize = AES_BLOCK_SIZE,
910 .setkey = aes_set_key_skcipher,
911 .encrypt = ctr_basic_aes_encrypt,
912 .decrypt = ctr_basic_aes_decrypt,
913 };
914
915
916 /*! \fn int ctr_rfc3686_aes_encrypt(struct skcipher_req *req)
917 * \ingroup IFX_AES_FUNCTIONS
918 * \brief Counter mode AES (rfc3686) encrypt using linux crypto skcipher
919 * \param req skcipher request
920 * \return err
921 */
922 int ctr_rfc3686_aes_encrypt(struct skcipher_request *req)
923 {
924 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
925 struct skcipher_walk walk;
926 unsigned int nbytes, enc_bytes;
927 int err;
928 u8 rfc3686_iv[16];
929
930 err = skcipher_walk_virt(&walk, req, false);
931 nbytes = walk.nbytes;
932
933 /* set up counter block */
934 memcpy(rfc3686_iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
935 memcpy(rfc3686_iv + CTR_RFC3686_NONCE_SIZE, walk.iv, CTR_RFC3686_IV_SIZE);
936
937 /* initialize counter portion of counter block */
938 *(__be32 *)(rfc3686_iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
939 cpu_to_be32(1);
940
941 while ((nbytes = enc_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
942 enc_bytes -= (nbytes % AES_BLOCK_SIZE);
943 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
944 rfc3686_iv, enc_bytes, CRYPTO_DIR_ENCRYPT, 0);
945 nbytes &= AES_BLOCK_SIZE - 1;
946 err = skcipher_walk_done(&walk, nbytes);
947 }
948
949 /* to handle remaining bytes < AES_BLOCK_SIZE */
950 if (walk.nbytes) {
951 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
952 rfc3686_iv, walk.nbytes, CRYPTO_DIR_ENCRYPT, 0);
953 err = skcipher_walk_done(&walk, 0);
954 }
955
956 return err;
957 }
958
959 /*! \fn int ctr_rfc3686_aes_decrypt(struct skcipher_req *req)
960 * \ingroup IFX_AES_FUNCTIONS
961 * \brief Counter mode AES (rfc3686) decrypt using linux crypto skcipher
962 * \param req skcipher request
963 * \return err
964 */
965 int ctr_rfc3686_aes_decrypt(struct skcipher_request *req)
966 {
967 struct aes_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
968 struct skcipher_walk walk;
969 unsigned int nbytes, dec_bytes;
970 int err;
971 u8 rfc3686_iv[16];
972
973 err = skcipher_walk_virt(&walk, req, false);
974 nbytes = walk.nbytes;
975
976 /* set up counter block */
977 memcpy(rfc3686_iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
978 memcpy(rfc3686_iv + CTR_RFC3686_NONCE_SIZE, walk.iv, CTR_RFC3686_IV_SIZE);
979
980 /* initialize counter portion of counter block */
981 *(__be32 *)(rfc3686_iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
982 cpu_to_be32(1);
983
984 while ((nbytes = dec_bytes = walk.nbytes) && (walk.nbytes >= AES_BLOCK_SIZE)) {
985 dec_bytes -= (nbytes % AES_BLOCK_SIZE);
986 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
987 rfc3686_iv, dec_bytes, CRYPTO_DIR_DECRYPT, 0);
988 nbytes &= AES_BLOCK_SIZE - 1;
989 err = skcipher_walk_done(&walk, nbytes);
990 }
991
992 /* to handle remaining bytes < AES_BLOCK_SIZE */
993 if (walk.nbytes) {
994 ifx_deu_aes_ctr(ctx, walk.dst.virt.addr, walk.src.virt.addr,
995 rfc3686_iv, walk.nbytes, CRYPTO_DIR_DECRYPT, 0);
996 err = skcipher_walk_done(&walk, 0);
997 }
998
999 return err;
1000 }
1001
1002 /*
1003 * \brief AES function mappings
1004 */
1005 struct skcipher_alg ifxdeu_ctr_rfc3686_aes_alg = {
1006 .base.cra_name = "rfc3686(ctr(aes))",
1007 .base.cra_driver_name = "ifxdeu-ctr-rfc3686(aes)",
1008 .base.cra_priority = 400,
1009 .base.cra_flags = CRYPTO_ALG_TYPE_SKCIPHER | CRYPTO_ALG_KERN_DRIVER_ONLY,
1010 .base.cra_blocksize = 1,
1011 .base.cra_ctxsize = sizeof(struct aes_ctx),
1012 .base.cra_module = THIS_MODULE,
1013 .base.cra_list = LIST_HEAD_INIT(ifxdeu_ctr_rfc3686_aes_alg.base.cra_list),
1014 .min_keysize = AES_MIN_KEY_SIZE,
1015 .max_keysize = CTR_RFC3686_MAX_KEY_SIZE,
1016 .ivsize = CTR_RFC3686_IV_SIZE,
1017 .walksize = AES_BLOCK_SIZE,
1018 .setkey = ctr_rfc3686_aes_set_key_skcipher,
1019 .encrypt = ctr_rfc3686_aes_encrypt,
1020 .decrypt = ctr_rfc3686_aes_decrypt,
1021 };
1022
1023
1024 /*! \fn int ifxdeu_init_aes (void)
1025 * \ingroup IFX_AES_FUNCTIONS
1026 * \brief function to initialize AES driver
1027 * \return ret
1028 */
1029 int ifxdeu_init_aes (void)
1030 {
1031 int ret = -ENOSYS;
1032
1033 aes_chip_init();
1034
1035 if ((ret = crypto_register_alg(&ifxdeu_aes_alg)))
1036 goto aes_err;
1037
1038 if ((ret = crypto_register_skcipher(&ifxdeu_ecb_aes_alg)))
1039 goto ecb_aes_err;
1040
1041 if ((ret = crypto_register_skcipher(&ifxdeu_cbc_aes_alg)))
1042 goto cbc_aes_err;
1043
1044 if ((ret = crypto_register_skcipher(&ifxdeu_ofb_aes_alg)))
1045 goto ofb_aes_err;
1046
1047 if ((ret = crypto_register_skcipher(&ifxdeu_cfb_aes_alg)))
1048 goto cfb_aes_err;
1049
1050 if ((ret = crypto_register_skcipher(&ifxdeu_ctr_basic_aes_alg)))
1051 goto ctr_basic_aes_err;
1052
1053 if ((ret = crypto_register_skcipher(&ifxdeu_ctr_rfc3686_aes_alg)))
1054 goto ctr_rfc3686_aes_err;
1055
1056 CRTCL_SECT_INIT;
1057
1058
1059 printk (KERN_NOTICE "IFX DEU AES initialized%s%s.\n", disable_multiblock ? "" : " (multiblock)", disable_deudma ? "" : " (DMA)");
1060 return ret;
1061
1062 ctr_rfc3686_aes_err:
1063 crypto_unregister_skcipher(&ifxdeu_ctr_rfc3686_aes_alg);
1064 printk (KERN_ERR "IFX ctr_rfc3686_aes initialization failed!\n");
1065 return ret;
1066 ctr_basic_aes_err:
1067 crypto_unregister_skcipher(&ifxdeu_ctr_basic_aes_alg);
1068 printk (KERN_ERR "IFX ctr_basic_aes initialization failed!\n");
1069 return ret;
1070 cfb_aes_err:
1071 crypto_unregister_skcipher(&ifxdeu_cfb_aes_alg);
1072 printk (KERN_ERR "IFX cfb_aes initialization failed!\n");
1073 return ret;
1074 ofb_aes_err:
1075 crypto_unregister_skcipher(&ifxdeu_ofb_aes_alg);
1076 printk (KERN_ERR "IFX ofb_aes initialization failed!\n");
1077 return ret;
1078 cbc_aes_err:
1079 crypto_unregister_skcipher(&ifxdeu_cbc_aes_alg);
1080 printk (KERN_ERR "IFX cbc_aes initialization failed!\n");
1081 return ret;
1082 ecb_aes_err:
1083 crypto_unregister_skcipher(&ifxdeu_ecb_aes_alg);
1084 printk (KERN_ERR "IFX aes initialization failed!\n");
1085 return ret;
1086 aes_err:
1087 printk(KERN_ERR "IFX DEU AES initialization failed!\n");
1088
1089 return ret;
1090 }
1091
1092 /*! \fn void ifxdeu_fini_aes (void)
1093 * \ingroup IFX_AES_FUNCTIONS
1094 * \brief unregister aes driver
1095 */
1096 void ifxdeu_fini_aes (void)
1097 {
1098 crypto_unregister_alg (&ifxdeu_aes_alg);
1099 crypto_unregister_skcipher (&ifxdeu_ecb_aes_alg);
1100 crypto_unregister_skcipher (&ifxdeu_cbc_aes_alg);
1101 crypto_unregister_skcipher (&ifxdeu_ofb_aes_alg);
1102 crypto_unregister_skcipher (&ifxdeu_cfb_aes_alg);
1103 crypto_unregister_skcipher (&ifxdeu_ctr_basic_aes_alg);
1104 crypto_unregister_skcipher (&ifxdeu_ctr_rfc3686_aes_alg);
1105
1106 }