1 /******************************************************************************
3 ** FILE NAME : ifxmips_aes.c
5 ** MODULES : DEU Module
7 ** DATE : September 8, 2009
8 ** AUTHOR : Mohammad Firdaus
9 ** DESCRIPTION : Data Encryption Unit Driver for AES Algorithm
10 ** COPYRIGHT : Copyright (c) 2009
11 ** Infineon Technologies AG
12 ** Am Campeon 1-12, 85579 Neubiberg, Germany
14 ** This program is free software; you can redistribute it and/or modify
15 ** it under the terms of the GNU General Public License as published by
16 ** the Free Software Foundation; either version 2 of the License, or
17 ** (at your option) any later version.
20 ** $Date $Author $Comment
21 ** 08,Sept 2009 Mohammad Firdaus Initial UEIP release
22 *******************************************************************************/
24 \defgroup IFX_DEU IFX_DEU_DRIVERS
26 \brief ifx DEU driver module
32 \brief AES Encryption Driver main file
36 \defgroup IFX_AES_FUNCTIONS IFX_AES_FUNCTIONS
38 \brief IFX AES driver Functions
42 /* Project Header Files */
43 #if defined(CONFIG_MODVERSIONS)
45 #include <linux/modeversions>
48 #include <linux/version.h>
49 #include <linux/module.h>
50 #include <linux/init.h>
51 #include <linux/proc_fs.h>
53 #include <linux/types.h>
54 #include <linux/errno.h>
55 #include <linux/crypto.h>
56 #include <linux/interrupt.h>
57 #include <linux/delay.h>
58 #include <asm/byteorder.h>
59 #include <crypto/algapi.h>
60 #include <crypto/b128ops.h>
61 #include <crypto/gf128mul.h>
62 #include <crypto/scatterwalk.h>
63 #include <crypto/xts.h>
64 #include <crypto/internal/hash.h>
65 #include <crypto/internal/skcipher.h>
67 #include "ifxmips_deu.h"
69 #if defined(CONFIG_DANUBE)
70 #include "ifxmips_deu_danube.h"
71 extern int ifx_danube_pre_1_4
;
72 #elif defined(CONFIG_AR9)
73 #include "ifxmips_deu_ar9.h"
74 #elif defined(CONFIG_VR9) || defined(CONFIG_AR10)
75 #include "ifxmips_deu_vr9.h"
77 #error "Unkown platform"
80 /* DMA related header and variables */
83 #define CRTCL_SECT_INIT spin_lock_init(&aes_lock)
84 #define CRTCL_SECT_START spin_lock_irqsave(&aes_lock, flag)
85 #define CRTCL_SECT_END spin_unlock_irqrestore(&aes_lock, flag)
87 /* Definition of constants */
88 #define AES_START IFX_AES_CON
89 #define AES_MIN_KEY_SIZE 16
90 #define AES_MAX_KEY_SIZE 32
91 #define AES_BLOCK_SIZE 16
92 #define AES_BLOCK_WORDS 4
93 #define CTR_RFC3686_NONCE_SIZE 4
94 #define CTR_RFC3686_IV_SIZE 8
95 #define CTR_RFC3686_MIN_KEY_SIZE (AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE)
96 #define CTR_RFC3686_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE)
97 #define AES_CBCMAC_DBN_TEMP_SIZE 128
100 extern char debug_level
;
101 #define DPRINTF(level, format, args...) if (level < debug_level) printk(KERN_INFO "[%s %s %d]: " format, __FILE__, __func__, __LINE__, ##args);
103 #define DPRINTF(level, format, args...)
104 #endif /* CRYPTO_DEBUG */
106 /* Function decleration */
107 int aes_chip_init(void);
108 u32
endian_swap(u32 input
);
109 u32
input_swap(u32 input
);
110 u32
* memory_alignment(const u8
*arg
, u32
*buff_alloc
, int in_out
, int nbytes
);
111 void aes_dma_memory_copy(u32
*outcopy
, u32
*out_dma
, u8
*out_arg
, int nbytes
);
112 void des_dma_memory_copy(u32
*outcopy
, u32
*out_dma
, u8
*out_arg
, int nbytes
);
113 int aes_memory_allocate(int value
);
114 int des_memory_allocate(int value
);
115 void memory_release(u32
*addr
);
118 extern void ifx_deu_aes (void *ctx_arg
, uint8_t *out_arg
, const uint8_t *in_arg
,
119 uint8_t *iv_arg
, size_t nbytes
, int encdec
, int mode
);
120 /* End of function decleration */
124 u8 buf
[AES_MAX_KEY_SIZE
];
125 u8 tweakkey
[AES_MAX_KEY_SIZE
];
126 u8 nonce
[CTR_RFC3686_NONCE_SIZE
];
127 u8 lastbuffer
[4 * XTS_BLOCK_SIZE
];
132 u32 (*temp
)[AES_BLOCK_WORDS
];
133 u8 block
[AES_BLOCK_SIZE
];
134 u8 hash
[AES_BLOCK_SIZE
];
137 extern int disable_deudma
;
138 extern int disable_multiblock
;
140 /*! \fn int aes_set_key (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
141 * \ingroup IFX_AES_FUNCTIONS
142 * \brief sets the AES keys
143 * \param tfm linux crypto algo transform
144 * \param in_key input key
145 * \param key_len key lengths of 16, 24 and 32 bytes supported
146 * \return -EINVAL - bad key length, 0 - SUCCESS
148 int aes_set_key (struct crypto_tfm
*tfm
, const u8
*in_key
, unsigned int key_len
)
150 struct aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
152 //printk("set_key in %s\n", __FILE__);
156 if (key_len
!= 16 && key_len
!= 24 && key_len
!= 32) {
160 ctx
->key_length
= key_len
;
162 DPRINTF(0, "ctx @%p, key_len %d, ctx->key_length %d\n", ctx
, key_len
, ctx
->key_length
);
163 memcpy ((u8
*) (ctx
->buf
), in_key
, key_len
);
169 /*! \fn int aes_set_key_skcipher (struct crypto_skcipher *tfm, const uint8_t *in_key, unsigned int key_len)
170 * \ingroup IFX_AES_FUNCTIONS
171 * \brief sets the AES keys for skcipher
172 * \param tfm linux crypto skcipher
173 * \param in_key input key
174 * \param key_len key lengths of 16, 24 and 32 bytes supported
175 * \return -EINVAL - bad key length, 0 - SUCCESS
177 int aes_set_key_skcipher (struct crypto_skcipher
*tfm
, const u8
*in_key
, unsigned int key_len
)
179 return aes_set_key(crypto_skcipher_tfm(tfm
), in_key
, key_len
);
183 /*! \fn void aes_set_key_skcipher (void *ctx_arg)
184 * \ingroup IFX_AES_FUNCTIONS
185 * \brief sets the AES key to the hardware, requires spinlock to be set by caller
186 * \param ctx_arg crypto algo context
189 void aes_set_key_hw (void *ctx_arg
)
191 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
192 volatile struct aes_t
*aes
= (volatile struct aes_t
*) AES_START
;
193 struct aes_ctx
*ctx
= (struct aes_ctx
*)ctx_arg
;
194 u8
*in_key
= ctx
->buf
;
195 int key_len
= ctx
->key_length
;
196 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
198 if (ctx
->use_tweak
) in_key
= ctx
->tweakkey
;
200 /* 128, 192 or 256 bit key length */
201 aes
->controlr
.K
= key_len
/ 8 - 2;
202 if (key_len
== 128 / 8) {
203 aes
->K3R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 0));
204 aes
->K2R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 1));
205 aes
->K1R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 2));
206 aes
->K0R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 3));
208 else if (key_len
== 192 / 8) {
209 aes
->K5R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 0));
210 aes
->K4R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 1));
211 aes
->K3R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 2));
212 aes
->K2R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 3));
213 aes
->K1R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 4));
214 aes
->K0R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 5));
216 else if (key_len
== 256 / 8) {
217 aes
->K7R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 0));
218 aes
->K6R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 1));
219 aes
->K5R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 2));
220 aes
->K4R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 3));
221 aes
->K3R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 4));
222 aes
->K2R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 5));
223 aes
->K1R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 6));
224 aes
->K0R
= DEU_ENDIAN_SWAP(*((u32
*) in_key
+ 7));
227 printk (KERN_ERR
"[%s %s %d]: Invalid key_len : %d\n", __FILE__
, __func__
, __LINE__
, key_len
);
231 /* let HW pre-process DEcryption key in any case (even if
232 ENcryption is used). Key Valid (KV) bit is then only
233 checked in decryption routine! */
234 aes
->controlr
.PNK
= 1;
239 /*! \fn void ifx_deu_aes (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg, size_t nbytes, int encdec, int mode)
240 * \ingroup IFX_AES_FUNCTIONS
241 * \brief main interface to AES hardware
242 * \param ctx_arg crypto algo context
243 * \param out_arg output bytestream
244 * \param in_arg input bytestream
245 * \param iv_arg initialization vector
246 * \param nbytes length of bytestream
247 * \param encdec 1 for encrypt; 0 for decrypt
248 * \param mode operation mode such as ebc, cbc, ctr
251 void ifx_deu_aes (void *ctx_arg
, u8
*out_arg
, const u8
*in_arg
,
252 u8
*iv_arg
, size_t nbytes
, int encdec
, int mode
)
255 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
256 volatile struct aes_t
*aes
= (volatile struct aes_t
*) AES_START
;
257 struct aes_ctx
*ctx
= (struct aes_ctx
*)ctx_arg
;
259 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
261 int byte_cnt
= nbytes
;
265 aes_set_key_hw (ctx_arg
);
267 aes
->controlr
.E_D
= !encdec
; //encryption
268 aes
->controlr
.O
= mode
; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
270 //aes->controlr.F = 128; //default; only for CFB and OFB modes; change only for customer-specific apps
272 aes
->IV3R
= DEU_ENDIAN_SWAP(*(u32
*) iv_arg
);
273 aes
->IV2R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 1));
274 aes
->IV1R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 2));
275 aes
->IV0R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 3));
280 while (byte_cnt
>= 16) {
282 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 0));
283 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 1));
284 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 2));
285 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 3)); /* start crypto */
287 while (aes
->controlr
.BUS
) {
288 // this will not take long
291 *((volatile u32
*) out_arg
+ (i
* 4) + 0) = aes
->OD3R
;
292 *((volatile u32
*) out_arg
+ (i
* 4) + 1) = aes
->OD2R
;
293 *((volatile u32
*) out_arg
+ (i
* 4) + 2) = aes
->OD1R
;
294 *((volatile u32
*) out_arg
+ (i
* 4) + 3) = aes
->OD0R
;
300 /* To handle all non-aligned bytes (not aligned to 16B size) */
302 u8 temparea
[16] = {0,};
304 memcpy(temparea
, ((u32
*) in_arg
+ (i
* 4)), byte_cnt
);
306 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) temparea
+ 0));
307 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) temparea
+ 1));
308 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) temparea
+ 2));
309 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) temparea
+ 3)); /* start crypto */
311 while (aes
->controlr
.BUS
) {
314 *((volatile u32
*) temparea
+ 0) = aes
->OD3R
;
315 *((volatile u32
*) temparea
+ 1) = aes
->OD2R
;
316 *((volatile u32
*) temparea
+ 2) = aes
->OD1R
;
317 *((volatile u32
*) temparea
+ 3) = aes
->OD0R
;
319 memcpy(((u32
*) out_arg
+ (i
* 4)), temparea
, byte_cnt
);
322 //tc.chen : copy iv_arg back
324 *((u32
*) iv_arg
) = DEU_ENDIAN_SWAP(aes
->IV3R
);
325 *((u32
*) iv_arg
+ 1) = DEU_ENDIAN_SWAP(aes
->IV2R
);
326 *((u32
*) iv_arg
+ 2) = DEU_ENDIAN_SWAP(aes
->IV1R
);
327 *((u32
*) iv_arg
+ 3) = DEU_ENDIAN_SWAP(aes
->IV0R
);
334 * \fn int ctr_rfc3686_aes_set_key (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
335 * \ingroup IFX_AES_FUNCTIONS
336 * \brief sets RFC3686 key
337 * \param tfm linux crypto algo transform
338 * \param in_key input key
339 * \param key_len key lengths of 20, 28 and 36 bytes supported; last 4 bytes is nonce
340 * \return 0 - SUCCESS
341 * -EINVAL - bad key length
343 int ctr_rfc3686_aes_set_key (struct crypto_tfm
*tfm
, const uint8_t *in_key
, unsigned int key_len
)
345 struct aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
347 //printk("ctr_rfc3686_aes_set_key in %s\n", __FILE__);
349 memcpy(ctx
->nonce
, in_key
+ (key_len
- CTR_RFC3686_NONCE_SIZE
),
350 CTR_RFC3686_NONCE_SIZE
);
352 key_len
-= CTR_RFC3686_NONCE_SIZE
; // remove 4 bytes of nonce
354 if (key_len
!= 16 && key_len
!= 24 && key_len
!= 32) {
358 ctx
->key_length
= key_len
;
361 memcpy ((u8
*) (ctx
->buf
), in_key
, key_len
);
367 * \fn int ctr_rfc3686_aes_set_key_skcipher (struct crypto_skcipher *tfm, const uint8_t *in_key, unsigned int key_len)
368 * \ingroup IFX_AES_FUNCTIONS
369 * \brief sets RFC3686 key for skcipher
370 * \param tfm linux crypto skcipher
371 * \param in_key input key
372 * \param key_len key lengths of 20, 28 and 36 bytes supported; last 4 bytes is nonce
373 * \return 0 - SUCCESS
374 * -EINVAL - bad key length
376 int ctr_rfc3686_aes_set_key_skcipher (struct crypto_skcipher
*tfm
, const uint8_t *in_key
, unsigned int key_len
)
378 return ctr_rfc3686_aes_set_key(crypto_skcipher_tfm(tfm
), in_key
, key_len
);
381 /*! \fn void ifx_deu_aes (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg, u32 nbytes, int encdec, int mode)
382 * \ingroup IFX_AES_FUNCTIONS
383 * \brief main interface with deu hardware in DMA mode
384 * \param ctx_arg crypto algo context
385 * \param out_arg output bytestream
386 * \param in_arg input bytestream
387 * \param iv_arg initialization vector
388 * \param nbytes length of bytestream
389 * \param encdec 1 for encrypt; 0 for decrypt
390 * \param mode operation mode such as ebc, cbc, ctr
394 //definitions from linux/include/crypto.h:
395 //#define CRYPTO_TFM_MODE_ECB 0x00000001
396 //#define CRYPTO_TFM_MODE_CBC 0x00000002
397 //#define CRYPTO_TFM_MODE_CFB 0x00000004
398 //#define CRYPTO_TFM_MODE_CTR 0x00000008
399 //#define CRYPTO_TFM_MODE_OFB 0x00000010 // not even defined
400 //but hardware definition: 0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
402 /*! \fn void ifx_deu_aes_ecb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
403 * \ingroup IFX_AES_FUNCTIONS
404 * \brief sets AES hardware to ECB mode
405 * \param ctx crypto algo context
406 * \param dst output bytestream
407 * \param src input bytestream
408 * \param iv initialization vector
409 * \param nbytes length of bytestream
410 * \param encdec 1 for encrypt; 0 for decrypt
411 * \param inplace not used
413 void ifx_deu_aes_ecb (void *ctx
, uint8_t *dst
, const uint8_t *src
,
414 uint8_t *iv
, size_t nbytes
, int encdec
, int inplace
)
416 ifx_deu_aes (ctx
, dst
, src
, NULL
, nbytes
, encdec
, 0);
419 /*! \fn void ifx_deu_aes_cbc (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
420 * \ingroup IFX_AES_FUNCTIONS
421 * \brief sets AES hardware to CBC mode
422 * \param ctx crypto algo context
423 * \param dst output bytestream
424 * \param src input bytestream
425 * \param iv initialization vector
426 * \param nbytes length of bytestream
427 * \param encdec 1 for encrypt; 0 for decrypt
428 * \param inplace not used
430 void ifx_deu_aes_cbc (void *ctx
, uint8_t *dst
, const uint8_t *src
,
431 uint8_t *iv
, size_t nbytes
, int encdec
, int inplace
)
433 ifx_deu_aes (ctx
, dst
, src
, iv
, nbytes
, encdec
, 1);
436 /*! \fn void ifx_deu_aes_ofb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
437 * \ingroup IFX_AES_FUNCTIONS
438 * \brief sets AES hardware to OFB mode
439 * \param ctx crypto algo context
440 * \param dst output bytestream
441 * \param src input bytestream
442 * \param iv initialization vector
443 * \param nbytes length of bytestream
444 * \param encdec 1 for encrypt; 0 for decrypt
445 * \param inplace not used
447 void ifx_deu_aes_ofb (void *ctx
, uint8_t *dst
, const uint8_t *src
,
448 uint8_t *iv
, size_t nbytes
, int encdec
, int inplace
)
450 ifx_deu_aes (ctx
, dst
, src
, iv
, nbytes
, encdec
, 2);
453 /*! \fn void ifx_deu_aes_cfb (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
454 * \ingroup IFX_AES_FUNCTIONS
455 * \brief sets AES hardware to CFB mode
456 * \param ctx crypto algo context
457 * \param dst output bytestream
458 * \param src input bytestream
459 * \param iv initialization vector
460 * \param nbytes length of bytestream
461 * \param encdec 1 for encrypt; 0 for decrypt
462 * \param inplace not used
464 void ifx_deu_aes_cfb (void *ctx
, uint8_t *dst
, const uint8_t *src
,
465 uint8_t *iv
, size_t nbytes
, int encdec
, int inplace
)
467 ifx_deu_aes (ctx
, dst
, src
, iv
, nbytes
, encdec
, 3);
470 /*! \fn void ifx_deu_aes_ctr (void *ctx, uint8_t *dst, const uint8_t *src, uint8_t *iv, size_t nbytes, int encdec, int inplace)
471 * \ingroup IFX_AES_FUNCTIONS
472 * \brief sets AES hardware to CTR mode
473 * \param ctx crypto algo context
474 * \param dst output bytestream
475 * \param src input bytestream
476 * \param iv initialization vector
477 * \param nbytes length of bytestream
478 * \param encdec 1 for encrypt; 0 for decrypt
479 * \param inplace not used
481 void ifx_deu_aes_ctr (void *ctx
, uint8_t *dst
, const uint8_t *src
,
482 uint8_t *iv
, size_t nbytes
, int encdec
, int inplace
)
484 ifx_deu_aes (ctx
, dst
, src
, iv
, nbytes
, encdec
, 4);
487 /*! \fn void aes_encrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
488 * \ingroup IFX_AES_FUNCTIONS
489 * \brief encrypt AES_BLOCK_SIZE of data
490 * \param tfm linux crypto algo transform
491 * \param out output bytestream
492 * \param in input bytestream
494 void aes_encrypt (struct crypto_tfm
*tfm
, uint8_t *out
, const uint8_t *in
)
496 struct aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
497 ifx_deu_aes (ctx
, out
, in
, NULL
, AES_BLOCK_SIZE
,
498 CRYPTO_DIR_ENCRYPT
, 0);
501 /*! \fn void aes_decrypt (struct crypto_tfm *tfm, uint8_t *out, const uint8_t *in)
502 * \ingroup IFX_AES_FUNCTIONS
503 * \brief decrypt AES_BLOCK_SIZE of data
504 * \param tfm linux crypto algo transform
505 * \param out output bytestream
506 * \param in input bytestream
508 void aes_decrypt (struct crypto_tfm
*tfm
, uint8_t *out
, const uint8_t *in
)
510 struct aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
511 ifx_deu_aes (ctx
, out
, in
, NULL
, AES_BLOCK_SIZE
,
512 CRYPTO_DIR_DECRYPT
, 0);
516 * \brief AES function mappings
518 struct crypto_alg ifxdeu_aes_alg
= {
520 .cra_driver_name
= "ifxdeu-aes",
522 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
523 .cra_blocksize
= AES_BLOCK_SIZE
,
524 .cra_ctxsize
= sizeof(struct aes_ctx
),
525 .cra_module
= THIS_MODULE
,
526 .cra_list
= LIST_HEAD_INIT(ifxdeu_aes_alg
.cra_list
),
529 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
530 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
531 .cia_setkey
= aes_set_key
,
532 .cia_encrypt
= aes_encrypt
,
533 .cia_decrypt
= aes_decrypt
,
538 /*! \fn int ecb_aes_encrypt(struct skcipher_req *req)
539 * \ingroup IFX_AES_FUNCTIONS
540 * \brief ECB AES encrypt using linux crypto skcipher
541 * \param req skcipher request
544 int ecb_aes_encrypt(struct skcipher_request
*req
)
546 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
547 struct skcipher_walk walk
;
549 unsigned int enc_bytes
, nbytes
;
551 err
= skcipher_walk_virt(&walk
, req
, false);
553 while ((nbytes
= enc_bytes
= walk
.nbytes
)) {
554 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
555 ifx_deu_aes_ecb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
556 NULL
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
557 nbytes
&= AES_BLOCK_SIZE
- 1;
558 err
= skcipher_walk_done(&walk
, nbytes
);
564 /*! \fn int ecb_aes_decrypt(struct skcipher_req *req)
565 * \ingroup IFX_AES_FUNCTIONS
566 * \brief ECB AES decrypt using linux crypto skcipher
567 * \param req skcipher request
570 int ecb_aes_decrypt(struct skcipher_request
*req
)
572 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
573 struct skcipher_walk walk
;
575 unsigned int dec_bytes
, nbytes
;
577 err
= skcipher_walk_virt(&walk
, req
, false);
579 while ((nbytes
= dec_bytes
= walk
.nbytes
)) {
580 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
581 ifx_deu_aes_ecb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
582 NULL
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
583 nbytes
&= AES_BLOCK_SIZE
- 1;
584 err
= skcipher_walk_done(&walk
, nbytes
);
591 * \brief AES function mappings
593 struct skcipher_alg ifxdeu_ecb_aes_alg
= {
594 .base
.cra_name
= "ecb(aes)",
595 .base
.cra_driver_name
= "ifxdeu-ecb(aes)",
596 .base
.cra_priority
= 400,
597 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
598 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
599 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
600 .base
.cra_module
= THIS_MODULE
,
601 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_ecb_aes_alg
.base
.cra_list
),
602 .min_keysize
= AES_MIN_KEY_SIZE
,
603 .max_keysize
= AES_MAX_KEY_SIZE
,
604 .setkey
= aes_set_key_skcipher
,
605 .encrypt
= ecb_aes_encrypt
,
606 .decrypt
= ecb_aes_decrypt
,
609 /*! \fn int ecb_aes_encrypt(struct skcipher_req *req)
610 * \ingroup IFX_AES_FUNCTIONS
611 * \brief CBC AES encrypt using linux crypto skcipher
612 * \param req skcipher request
615 int cbc_aes_encrypt(struct skcipher_request
*req
)
617 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
618 struct skcipher_walk walk
;
620 unsigned int enc_bytes
, nbytes
;
622 err
= skcipher_walk_virt(&walk
, req
, false);
624 while ((nbytes
= enc_bytes
= walk
.nbytes
)) {
626 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
627 ifx_deu_aes_cbc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
628 iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
629 nbytes
&= AES_BLOCK_SIZE
- 1;
630 err
= skcipher_walk_done(&walk
, nbytes
);
636 /*! \fn int cbc_aes_decrypt(struct skcipher_req *req)
637 * \ingroup IFX_AES_FUNCTIONS
638 * \brief CBC AES decrypt using linux crypto skcipher
639 * \param req skcipher request
642 int cbc_aes_decrypt(struct skcipher_request
*req
)
644 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
645 struct skcipher_walk walk
;
647 unsigned int dec_bytes
, nbytes
;
649 err
= skcipher_walk_virt(&walk
, req
, false);
651 while ((nbytes
= dec_bytes
= walk
.nbytes
)) {
653 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
654 ifx_deu_aes_cbc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
655 iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
656 nbytes
&= AES_BLOCK_SIZE
- 1;
657 err
= skcipher_walk_done(&walk
, nbytes
);
664 * \brief AES function mappings
666 struct skcipher_alg ifxdeu_cbc_aes_alg
= {
667 .base
.cra_name
= "cbc(aes)",
668 .base
.cra_driver_name
= "ifxdeu-cbc(aes)",
669 .base
.cra_priority
= 400,
670 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
671 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
672 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
673 .base
.cra_module
= THIS_MODULE
,
674 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_cbc_aes_alg
.base
.cra_list
),
675 .min_keysize
= AES_MIN_KEY_SIZE
,
676 .max_keysize
= AES_MAX_KEY_SIZE
,
677 .ivsize
= AES_BLOCK_SIZE
,
678 .setkey
= aes_set_key_skcipher
,
679 .encrypt
= cbc_aes_encrypt
,
680 .decrypt
= cbc_aes_decrypt
,
683 /*! \fn void ifx_deu_aes_xts (void *ctx_arg, u8 *out_arg, const u8 *in_arg, u8 *iv_arg, size_t nbytes, int encdec)
684 * \ingroup IFX_AES_FUNCTIONS
685 * \brief main interface to AES hardware for XTS impl
686 * \param ctx_arg crypto algo context
687 * \param out_arg output bytestream
688 * \param in_arg input bytestream
689 * \param iv_arg initialization vector
690 * \param nbytes length of bytestream
691 * \param encdec 1 for encrypt; 0 for decrypt
694 void ifx_deu_aes_xts (void *ctx_arg
, u8
*out_arg
, const u8
*in_arg
,
695 u8
*iv_arg
, size_t nbytes
, int encdec
)
697 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
698 volatile struct aes_t
*aes
= (volatile struct aes_t
*) AES_START
;
699 struct aes_ctx
*ctx
= (struct aes_ctx
*)ctx_arg
;
701 /*~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ */
704 int byte_cnt
= nbytes
;
708 aes_set_key_hw (ctx_arg
);
710 aes
->controlr
.E_D
= !encdec
; //encryption
711 aes
->controlr
.O
= 1; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR - CBC mode for xts
714 while (byte_cnt
>= 16) {
717 if (((byte_cnt
% 16) > 0) && (byte_cnt
< (2*XTS_BLOCK_SIZE
))) {
718 memcpy(oldiv
, iv_arg
, 16);
719 gf128mul_x_ble((le128
*)iv_arg
, (le128
*)iv_arg
);
721 u128_xor((u128
*)((u32
*) in_arg
+ (i
* 4) + 0), (u128
*)((u32
*) in_arg
+ (i
* 4) + 0), (u128
*)iv_arg
);
724 aes
->IV3R
= DEU_ENDIAN_SWAP(*(u32
*) iv_arg
);
725 aes
->IV2R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 1));
726 aes
->IV1R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 2));
727 aes
->IV0R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 3));
729 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 0));
730 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 1));
731 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 2));
732 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) in_arg
+ (i
* 4) + 3)); /* start crypto */
734 while (aes
->controlr
.BUS
) {
735 // this will not take long
738 *((volatile u32
*) out_arg
+ (i
* 4) + 0) = aes
->OD3R
;
739 *((volatile u32
*) out_arg
+ (i
* 4) + 1) = aes
->OD2R
;
740 *((volatile u32
*) out_arg
+ (i
* 4) + 2) = aes
->OD1R
;
741 *((volatile u32
*) out_arg
+ (i
* 4) + 3) = aes
->OD0R
;
744 u128_xor((u128
*)((volatile u32
*) out_arg
+ (i
* 4) + 0), (u128
*)((volatile u32
*) out_arg
+ (i
* 4) + 0), (u128
*)iv_arg
);
746 gf128mul_x_ble((le128
*)iv_arg
, (le128
*)iv_arg
);
752 u8 state
[XTS_BLOCK_SIZE
] = {0,};
754 if (!encdec
) memcpy(iv_arg
, oldiv
, 16);
756 aes
->IV3R
= DEU_ENDIAN_SWAP(*(u32
*) iv_arg
);
757 aes
->IV2R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 1));
758 aes
->IV1R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 2));
759 aes
->IV0R
= DEU_ENDIAN_SWAP(*((u32
*) iv_arg
+ 3));
761 memcpy(state
, ((u32
*) in_arg
+ (i
* 4) + 0), byte_cnt
);
762 memcpy((state
+ byte_cnt
), (out_arg
+ ((i
- 1) * 16) + byte_cnt
), (XTS_BLOCK_SIZE
- byte_cnt
));
764 u128_xor((u128
*)state
, (u128
*)state
, (u128
*)iv_arg
);
767 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) state
+ 0));
768 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) state
+ 1));
769 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) state
+ 2));
770 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) state
+ 3)); /* start crypto */
772 memcpy(((u32
*) out_arg
+ (i
* 4) + 0), ((u32
*) out_arg
+ ((i
- 1) * 4) + 0), byte_cnt
);
774 while (aes
->controlr
.BUS
) {
775 // this will not take long
778 *((volatile u32
*) out_arg
+ ((i
-1) * 4) + 0) = aes
->OD3R
;
779 *((volatile u32
*) out_arg
+ ((i
-1) * 4) + 1) = aes
->OD2R
;
780 *((volatile u32
*) out_arg
+ ((i
-1) * 4) + 2) = aes
->OD1R
;
781 *((volatile u32
*) out_arg
+ ((i
-1) * 4) + 3) = aes
->OD0R
;
784 u128_xor((u128
*)((volatile u32
*) out_arg
+ ((i
-1) * 4) + 0), (u128
*)((volatile u32
*) out_arg
+ ((i
-1) * 4) + 0), (u128
*)iv_arg
);
791 /*! \fn int xts_aes_encrypt(struct skcipher_req *req)
792 * \ingroup IFX_AES_FUNCTIONS
793 * \brief XTS AES encrypt using linux crypto skcipher
794 * \param req skcipher request
797 int xts_aes_encrypt(struct skcipher_request
*req
)
799 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
800 struct skcipher_walk walk
;
802 unsigned int enc_bytes
, nbytes
, processed
;
804 err
= skcipher_walk_virt(&walk
, req
, false);
806 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
810 aes_encrypt(req
->base
.tfm
, walk
.iv
, walk
.iv
);
814 while ((nbytes
= walk
.nbytes
) && (walk
.nbytes
>= (XTS_BLOCK_SIZE
* 2)) ) {
816 if (nbytes
== walk
.total
) {
819 enc_bytes
= nbytes
& ~(XTS_BLOCK_SIZE
- 1);
820 if ((req
->cryptlen
- processed
- enc_bytes
) < (XTS_BLOCK_SIZE
)) {
821 if (enc_bytes
> (2 * XTS_BLOCK_SIZE
)) {
822 enc_bytes
-= XTS_BLOCK_SIZE
;
828 ifx_deu_aes_xts(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
829 iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
);
830 err
= skcipher_walk_done(&walk
, nbytes
- enc_bytes
);
831 processed
+= enc_bytes
;
836 nbytes
= req
->cryptlen
- processed
;
837 scatterwalk_map_and_copy(ctx
->lastbuffer
, req
->src
, (req
->cryptlen
- nbytes
), nbytes
, 0);
838 ifx_deu_aes_xts(ctx
, ctx
->lastbuffer
, ctx
->lastbuffer
,
839 iv
, nbytes
, CRYPTO_DIR_ENCRYPT
);
840 scatterwalk_map_and_copy(ctx
->lastbuffer
, req
->dst
, (req
->cryptlen
- nbytes
), nbytes
, 1);
841 skcipher_request_complete(req
, 0);
847 /*! \fn int xts_aes_decrypt(struct skcipher_req *req)
848 * \ingroup IFX_AES_FUNCTIONS
849 * \brief XTS AES decrypt using linux crypto skcipher
850 * \param req skcipher request
853 int xts_aes_decrypt(struct skcipher_request
*req
)
855 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
856 struct skcipher_walk walk
;
858 unsigned int dec_bytes
, nbytes
, processed
;
860 err
= skcipher_walk_virt(&walk
, req
, false);
862 if (req
->cryptlen
< XTS_BLOCK_SIZE
)
866 aes_encrypt(req
->base
.tfm
, walk
.iv
, walk
.iv
);
870 while ((nbytes
= walk
.nbytes
) && (walk
.nbytes
>= (XTS_BLOCK_SIZE
* 2))) {
872 if (nbytes
== walk
.total
) {
875 dec_bytes
= nbytes
& ~(XTS_BLOCK_SIZE
- 1);
876 if ((req
->cryptlen
- processed
- dec_bytes
) < (XTS_BLOCK_SIZE
)) {
877 if (dec_bytes
> (2 * XTS_BLOCK_SIZE
)) {
878 dec_bytes
-= XTS_BLOCK_SIZE
;
884 ifx_deu_aes_xts(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
885 iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
);
886 err
= skcipher_walk_done(&walk
, nbytes
- dec_bytes
);
887 processed
+= dec_bytes
;
892 nbytes
= req
->cryptlen
- processed
;
893 scatterwalk_map_and_copy(ctx
->lastbuffer
, req
->src
, (req
->cryptlen
- nbytes
), nbytes
, 0);
894 ifx_deu_aes_xts(ctx
, ctx
->lastbuffer
, ctx
->lastbuffer
,
895 iv
, nbytes
, CRYPTO_DIR_DECRYPT
);
896 scatterwalk_map_and_copy(ctx
->lastbuffer
, req
->dst
, (req
->cryptlen
- nbytes
), nbytes
, 1);
897 skcipher_request_complete(req
, 0);
903 /*! \fn int xts_aes_set_key_skcipher (struct crypto_tfm *tfm, const uint8_t *in_key, unsigned int key_len)
904 * \ingroup IFX_AES_FUNCTIONS
905 * \brief sets the AES keys for XTS
906 * \param tfm linux crypto algo transform
907 * \param in_key input key
908 * \param key_len key lengths of 16, 24 and 32 bytes supported
909 * \return -EINVAL - bad key length, 0 - SUCCESS
911 int xts_aes_set_key_skcipher (struct crypto_skcipher
*tfm
, const u8
*in_key
, unsigned int key_len
)
913 struct aes_ctx
*ctx
= crypto_tfm_ctx(crypto_skcipher_tfm(tfm
));
914 unsigned int keylen
= (key_len
/ 2);
916 if (key_len
% 2) return -EINVAL
;
918 if (keylen
!= 16 && keylen
!= 24 && keylen
!= 32) {
922 ctx
->key_length
= keylen
;
924 DPRINTF(0, "ctx @%p, key_len %d, ctx->key_length %d\n", ctx
, key_len
, ctx
->key_length
);
925 memcpy ((u8
*) (ctx
->buf
), in_key
, keylen
);
926 memcpy ((u8
*) (ctx
->tweakkey
), in_key
+ keylen
, keylen
);
932 * \brief AES function mappings
934 struct skcipher_alg ifxdeu_xts_aes_alg
= {
935 .base
.cra_name
= "xts(aes)",
936 .base
.cra_driver_name
= "ifxdeu-xts(aes)",
937 .base
.cra_priority
= 400,
938 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
939 .base
.cra_blocksize
= XTS_BLOCK_SIZE
,
940 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
941 .base
.cra_module
= THIS_MODULE
,
942 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_xts_aes_alg
.base
.cra_list
),
943 .min_keysize
= AES_MIN_KEY_SIZE
* 2,
944 .max_keysize
= AES_MAX_KEY_SIZE
* 2,
945 .ivsize
= XTS_BLOCK_SIZE
,
946 .walksize
= 2 * XTS_BLOCK_SIZE
,
947 .setkey
= xts_aes_set_key_skcipher
,
948 .encrypt
= xts_aes_encrypt
,
949 .decrypt
= xts_aes_decrypt
,
952 /*! \fn int ofb_aes_encrypt(struct skcipher_req *req)
953 * \ingroup IFX_AES_FUNCTIONS
954 * \brief OFB AES encrypt using linux crypto skcipher
955 * \param req skcipher request
958 int ofb_aes_encrypt(struct skcipher_request
*req
)
960 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
961 struct skcipher_walk walk
;
963 unsigned int enc_bytes
, nbytes
;
965 err
= skcipher_walk_virt(&walk
, req
, false);
967 while ((nbytes
= enc_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
968 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
969 ifx_deu_aes_ofb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
970 walk
.iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
971 nbytes
&= AES_BLOCK_SIZE
- 1;
972 err
= skcipher_walk_done(&walk
, nbytes
);
975 /* to handle remaining bytes < AES_BLOCK_SIZE */
977 ifx_deu_aes_ofb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
978 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_ENCRYPT
, 0);
979 err
= skcipher_walk_done(&walk
, 0);
985 /*! \fn int ofb_aes_decrypt(struct skcipher_req *req)
986 * \ingroup IFX_AES_FUNCTIONS
987 * \brief OFB AES decrypt using linux crypto skcipher
988 * \param req skcipher request
991 int ofb_aes_decrypt(struct skcipher_request
*req
)
993 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
994 struct skcipher_walk walk
;
996 unsigned int dec_bytes
, nbytes
;
998 err
= skcipher_walk_virt(&walk
, req
, false);
1000 while ((nbytes
= dec_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1001 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1002 ifx_deu_aes_ofb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1003 walk
.iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
1004 nbytes
&= AES_BLOCK_SIZE
- 1;
1005 err
= skcipher_walk_done(&walk
, nbytes
);
1008 /* to handle remaining bytes < AES_BLOCK_SIZE */
1010 ifx_deu_aes_ofb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1011 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_DECRYPT
, 0);
1012 err
= skcipher_walk_done(&walk
, 0);
1019 * \brief AES function mappings
1021 struct skcipher_alg ifxdeu_ofb_aes_alg
= {
1022 .base
.cra_name
= "ofb(aes)",
1023 .base
.cra_driver_name
= "ifxdeu-ofb(aes)",
1024 .base
.cra_priority
= 400,
1025 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1026 .base
.cra_blocksize
= 1,
1027 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
1028 .base
.cra_module
= THIS_MODULE
,
1029 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_ofb_aes_alg
.base
.cra_list
),
1030 .min_keysize
= AES_MIN_KEY_SIZE
,
1031 .max_keysize
= AES_MAX_KEY_SIZE
,
1032 .ivsize
= AES_BLOCK_SIZE
,
1033 .chunksize
= AES_BLOCK_SIZE
,
1034 .walksize
= AES_BLOCK_SIZE
,
1035 .setkey
= aes_set_key_skcipher
,
1036 .encrypt
= ofb_aes_encrypt
,
1037 .decrypt
= ofb_aes_decrypt
,
1040 /*! \fn int cfb_aes_encrypt(struct skcipher_req *req)
1041 * \ingroup IFX_AES_FUNCTIONS
1042 * \brief CFB AES encrypt using linux crypto skcipher
1043 * \param req skcipher request
1046 int cfb_aes_encrypt(struct skcipher_request
*req
)
1048 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1049 struct skcipher_walk walk
;
1051 unsigned int enc_bytes
, nbytes
;
1053 err
= skcipher_walk_virt(&walk
, req
, false);
1055 while ((nbytes
= enc_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1056 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1057 ifx_deu_aes_cfb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1058 walk
.iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
1059 nbytes
&= AES_BLOCK_SIZE
- 1;
1060 err
= skcipher_walk_done(&walk
, nbytes
);
1063 /* to handle remaining bytes < AES_BLOCK_SIZE */
1065 ifx_deu_aes_cfb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1066 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_ENCRYPT
, 0);
1067 err
= skcipher_walk_done(&walk
, 0);
1073 /*! \fn int cfb_aes_decrypt(struct skcipher_req *req)
1074 * \ingroup IFX_AES_FUNCTIONS
1075 * \brief CFB AES decrypt using linux crypto skcipher
1076 * \param req skcipher request
1079 int cfb_aes_decrypt(struct skcipher_request
*req
)
1081 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1082 struct skcipher_walk walk
;
1084 unsigned int dec_bytes
, nbytes
;
1086 err
= skcipher_walk_virt(&walk
, req
, false);
1088 while ((nbytes
= dec_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1089 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1090 ifx_deu_aes_cfb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1091 walk
.iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
1092 nbytes
&= AES_BLOCK_SIZE
- 1;
1093 err
= skcipher_walk_done(&walk
, nbytes
);
1096 /* to handle remaining bytes < AES_BLOCK_SIZE */
1098 ifx_deu_aes_cfb(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1099 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_DECRYPT
, 0);
1100 err
= skcipher_walk_done(&walk
, 0);
1107 * \brief AES function mappings
1109 struct skcipher_alg ifxdeu_cfb_aes_alg
= {
1110 .base
.cra_name
= "cfb(aes)",
1111 .base
.cra_driver_name
= "ifxdeu-cfb(aes)",
1112 .base
.cra_priority
= 400,
1113 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1114 .base
.cra_blocksize
= 1,
1115 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
1116 .base
.cra_module
= THIS_MODULE
,
1117 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_cfb_aes_alg
.base
.cra_list
),
1118 .min_keysize
= AES_MIN_KEY_SIZE
,
1119 .max_keysize
= AES_MAX_KEY_SIZE
,
1120 .ivsize
= AES_BLOCK_SIZE
,
1121 .chunksize
= AES_BLOCK_SIZE
,
1122 .walksize
= AES_BLOCK_SIZE
,
1123 .setkey
= aes_set_key_skcipher
,
1124 .encrypt
= cfb_aes_encrypt
,
1125 .decrypt
= cfb_aes_decrypt
,
1128 /*! \fn int ctr_basic_aes_encrypt(struct skcipher_req *req)
1129 * \ingroup IFX_AES_FUNCTIONS
1130 * \brief Counter mode AES encrypt using linux crypto skcipher
1131 * \param req skcipher request
1134 int ctr_basic_aes_encrypt(struct skcipher_request
*req
)
1136 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1137 struct skcipher_walk walk
;
1139 unsigned int enc_bytes
, nbytes
;
1141 err
= skcipher_walk_virt(&walk
, req
, false);
1143 while ((nbytes
= enc_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1144 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1145 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1146 walk
.iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
1147 nbytes
&= AES_BLOCK_SIZE
- 1;
1148 err
= skcipher_walk_done(&walk
, nbytes
);
1151 /* to handle remaining bytes < AES_BLOCK_SIZE */
1153 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1154 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_ENCRYPT
, 0);
1155 err
= skcipher_walk_done(&walk
, 0);
1161 /*! \fn int ctr_basic_aes_encrypt(struct skcipher_req *req)
1162 * \ingroup IFX_AES_FUNCTIONS
1163 * \brief Counter mode AES decrypt using linux crypto skcipher
1164 * \param req skcipher request
1167 int ctr_basic_aes_decrypt(struct skcipher_request
*req
)
1169 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1170 struct skcipher_walk walk
;
1172 unsigned int dec_bytes
, nbytes
;
1174 err
= skcipher_walk_virt(&walk
, req
, false);
1176 while ((nbytes
= dec_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1177 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1178 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1179 walk
.iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
1180 nbytes
&= AES_BLOCK_SIZE
- 1;
1181 err
= skcipher_walk_done(&walk
, nbytes
);
1184 /* to handle remaining bytes < AES_BLOCK_SIZE */
1186 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1187 walk
.iv
, walk
.nbytes
, CRYPTO_DIR_DECRYPT
, 0);
1188 err
= skcipher_walk_done(&walk
, 0);
1195 * \brief AES function mappings
1197 struct skcipher_alg ifxdeu_ctr_basic_aes_alg
= {
1198 .base
.cra_name
= "ctr(aes)",
1199 .base
.cra_driver_name
= "ifxdeu-ctr(aes)",
1200 .base
.cra_priority
= 400,
1201 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1202 .base
.cra_blocksize
= 1,
1203 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
1204 .base
.cra_module
= THIS_MODULE
,
1205 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_ctr_basic_aes_alg
.base
.cra_list
),
1206 .min_keysize
= AES_MIN_KEY_SIZE
,
1207 .max_keysize
= AES_MAX_KEY_SIZE
,
1208 .ivsize
= AES_BLOCK_SIZE
,
1209 .walksize
= AES_BLOCK_SIZE
,
1210 .setkey
= aes_set_key_skcipher
,
1211 .encrypt
= ctr_basic_aes_encrypt
,
1212 .decrypt
= ctr_basic_aes_decrypt
,
1215 /*! \fn int ctr_rfc3686_aes_encrypt(struct skcipher_req *req)
1216 * \ingroup IFX_AES_FUNCTIONS
1217 * \brief Counter mode AES (rfc3686) encrypt using linux crypto skcipher
1218 * \param req skcipher request
1221 int ctr_rfc3686_aes_encrypt(struct skcipher_request
*req
)
1223 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1224 struct skcipher_walk walk
;
1225 unsigned int nbytes
, enc_bytes
;
1229 err
= skcipher_walk_virt(&walk
, req
, false);
1230 nbytes
= walk
.nbytes
;
1232 /* set up counter block */
1233 memcpy(rfc3686_iv
, ctx
->nonce
, CTR_RFC3686_NONCE_SIZE
);
1234 memcpy(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
, walk
.iv
, CTR_RFC3686_IV_SIZE
);
1236 /* initialize counter portion of counter block */
1237 *(__be32
*)(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
+ CTR_RFC3686_IV_SIZE
) =
1240 while ((nbytes
= enc_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1241 enc_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1242 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1243 rfc3686_iv
, enc_bytes
, CRYPTO_DIR_ENCRYPT
, 0);
1244 nbytes
&= AES_BLOCK_SIZE
- 1;
1245 err
= skcipher_walk_done(&walk
, nbytes
);
1248 /* to handle remaining bytes < AES_BLOCK_SIZE */
1250 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1251 rfc3686_iv
, walk
.nbytes
, CRYPTO_DIR_ENCRYPT
, 0);
1252 err
= skcipher_walk_done(&walk
, 0);
1258 /*! \fn int ctr_rfc3686_aes_decrypt(struct skcipher_req *req)
1259 * \ingroup IFX_AES_FUNCTIONS
1260 * \brief Counter mode AES (rfc3686) decrypt using linux crypto skcipher
1261 * \param req skcipher request
1264 int ctr_rfc3686_aes_decrypt(struct skcipher_request
*req
)
1266 struct aes_ctx
*ctx
= crypto_tfm_ctx(req
->base
.tfm
);
1267 struct skcipher_walk walk
;
1268 unsigned int nbytes
, dec_bytes
;
1272 err
= skcipher_walk_virt(&walk
, req
, false);
1273 nbytes
= walk
.nbytes
;
1275 /* set up counter block */
1276 memcpy(rfc3686_iv
, ctx
->nonce
, CTR_RFC3686_NONCE_SIZE
);
1277 memcpy(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
, walk
.iv
, CTR_RFC3686_IV_SIZE
);
1279 /* initialize counter portion of counter block */
1280 *(__be32
*)(rfc3686_iv
+ CTR_RFC3686_NONCE_SIZE
+ CTR_RFC3686_IV_SIZE
) =
1283 while ((nbytes
= dec_bytes
= walk
.nbytes
) && (walk
.nbytes
>= AES_BLOCK_SIZE
)) {
1284 dec_bytes
-= (nbytes
% AES_BLOCK_SIZE
);
1285 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1286 rfc3686_iv
, dec_bytes
, CRYPTO_DIR_DECRYPT
, 0);
1287 nbytes
&= AES_BLOCK_SIZE
- 1;
1288 err
= skcipher_walk_done(&walk
, nbytes
);
1291 /* to handle remaining bytes < AES_BLOCK_SIZE */
1293 ifx_deu_aes_ctr(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
1294 rfc3686_iv
, walk
.nbytes
, CRYPTO_DIR_DECRYPT
, 0);
1295 err
= skcipher_walk_done(&walk
, 0);
1302 * \brief AES function mappings
1304 struct skcipher_alg ifxdeu_ctr_rfc3686_aes_alg
= {
1305 .base
.cra_name
= "rfc3686(ctr(aes))",
1306 .base
.cra_driver_name
= "ifxdeu-ctr-rfc3686(aes)",
1307 .base
.cra_priority
= 400,
1308 .base
.cra_flags
= CRYPTO_ALG_TYPE_SKCIPHER
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1309 .base
.cra_blocksize
= 1,
1310 .base
.cra_ctxsize
= sizeof(struct aes_ctx
),
1311 .base
.cra_module
= THIS_MODULE
,
1312 .base
.cra_list
= LIST_HEAD_INIT(ifxdeu_ctr_rfc3686_aes_alg
.base
.cra_list
),
1313 .min_keysize
= CTR_RFC3686_MIN_KEY_SIZE
,
1314 .max_keysize
= CTR_RFC3686_MAX_KEY_SIZE
,
1315 .ivsize
= CTR_RFC3686_IV_SIZE
,
1316 .walksize
= AES_BLOCK_SIZE
,
1317 .setkey
= ctr_rfc3686_aes_set_key_skcipher
,
1318 .encrypt
= ctr_rfc3686_aes_encrypt
,
1319 .decrypt
= ctr_rfc3686_aes_decrypt
,
1322 static int aes_cbcmac_final_impl(struct shash_desc
*desc
, u8
*out
, bool hash_final
);
1324 /*! \fn static void aes_cbcmac_transform(struct shash_desc *desc, u8 const *in)
1325 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1326 * \brief save input block to context
1327 * \param desc linux crypto shash descriptor
1328 * \param in 16-byte block of input
1330 static void aes_cbcmac_transform(struct shash_desc
*desc
, u8
const *in
)
1332 struct aes_ctx
*mctx
= crypto_shash_ctx(desc
->tfm
);
1334 if ( ((mctx
->dbn
)+1) > AES_CBCMAC_DBN_TEMP_SIZE
)
1336 //printk("aes_cbcmac_DBN_TEMP_SIZE exceeded\n");
1337 aes_cbcmac_final_impl(desc
, (u8
*)mctx
->hash
, false);
1340 memcpy(&mctx
->temp
[mctx
->dbn
], in
, 16); //dbn workaround
1344 /*! \fn int aes_cbcmac_setkey(struct crypto_shash *tfm, const u8 *key, unsigned int keylen)
1345 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1346 * \brief sets cbcmac aes key
1347 * \param tfm linux crypto shash transform
1348 * \param key input key
1351 static int aes_cbcmac_setkey(struct crypto_shash
*tfm
, const u8
*key
, unsigned int keylen
)
1353 return aes_set_key(crypto_shash_tfm(tfm
), key
, keylen
);
1358 /*! \fn void aes_cbcmac_init(struct shash_desc *desc)
1359 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1360 * \brief initialize md5 hmac context
1361 * \param desc linux crypto shash descriptor
1363 static int aes_cbcmac_init(struct shash_desc
*desc
)
1366 struct aes_ctx
*mctx
= crypto_shash_ctx(desc
->tfm
);
1368 mctx
->dbn
= 0; //dbn workaround
1370 mctx
->byte_count
= 0;
1371 memset(mctx
->hash
, 0, AES_BLOCK_SIZE
);
1376 /*! \fn void aes_cbcmac_update(struct shash_desc *desc, const u8 *data, unsigned int len)
1377 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1378 * \brief on-the-fly cbcmac aes computation
1379 * \param desc linux crypto shash descriptor
1380 * \param data input data
1381 * \param len size of input data
1383 static int aes_cbcmac_update(struct shash_desc
*desc
, const u8
*data
, unsigned int len
)
1385 struct aes_ctx
*mctx
= crypto_shash_ctx(desc
->tfm
);
1386 const u32 avail
= sizeof(mctx
->block
) - (mctx
->byte_count
& 0x0f);
1388 mctx
->byte_count
+= len
;
1391 memcpy((char *)mctx
->block
+ (sizeof(mctx
->block
) - avail
),
1396 memcpy((char *)mctx
->block
+ (sizeof(mctx
->block
) - avail
),
1399 aes_cbcmac_transform(desc
, mctx
->block
);
1403 while (len
>= sizeof(mctx
->block
)) {
1404 memcpy(mctx
->block
, data
, sizeof(mctx
->block
));
1405 aes_cbcmac_transform(desc
, mctx
->block
);
1406 data
+= sizeof(mctx
->block
);
1407 len
-= sizeof(mctx
->block
);
1410 memcpy(mctx
->block
, data
, len
);
1414 /*! \fn static int aes_cbcmac_final_impl(struct shash_desc *desc, u8 *out, bool hash_final)
1415 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1416 * \brief compute final or intermediate md5 hmac value
1417 * \param desc linux crypto shash descriptor
1418 * \param out final cbcmac aes output value
1419 * \param in finalize or intermediate processing
1421 static int aes_cbcmac_final_impl(struct shash_desc
*desc
, u8
*out
, bool hash_final
)
1423 struct aes_ctx
*mctx
= crypto_shash_ctx(desc
->tfm
);
1424 const unsigned int offset
= mctx
->byte_count
& 0x0f;
1425 char *p
= (char *)mctx
->block
+ offset
;
1426 volatile struct aes_t
*aes
= (volatile struct aes_t
*) AES_START
;
1430 u32
*in
= mctx
->temp
[0];
1434 aes_set_key_hw (mctx
);
1436 aes
->controlr
.E_D
= !CRYPTO_DIR_ENCRYPT
; //encryption
1437 aes
->controlr
.O
= 1; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
1439 //aes->controlr.F = 128; //default; only for CFB and OFB modes; change only for customer-specific apps
1441 //printk("\ndbn = %d\n", mctx->dbn);
1443 if (mctx
->started
) {
1444 aes
->IV3R
= DEU_ENDIAN_SWAP(*(u32
*) mctx
->hash
);
1445 aes
->IV2R
= DEU_ENDIAN_SWAP(*((u32
*) mctx
->hash
+ 1));
1446 aes
->IV1R
= DEU_ENDIAN_SWAP(*((u32
*) mctx
->hash
+ 2));
1447 aes
->IV0R
= DEU_ENDIAN_SWAP(*((u32
*) mctx
->hash
+ 3));
1457 for (dbn
= 0; dbn
< mctx
->dbn
; dbn
++)
1459 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) in
+ (i
* 4) + 0));
1460 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) in
+ (i
* 4) + 1));
1461 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) in
+ (i
* 4) + 2));
1462 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) in
+ (i
* 4) + 3)); /* start crypto */
1464 while (aes
->controlr
.BUS
) {
1465 // this will not take long
1471 *((u32
*) mctx
->hash
) = DEU_ENDIAN_SWAP(aes
->IV3R
);
1472 *((u32
*) mctx
->hash
+ 1) = DEU_ENDIAN_SWAP(aes
->IV2R
);
1473 *((u32
*) mctx
->hash
+ 2) = DEU_ENDIAN_SWAP(aes
->IV1R
);
1474 *((u32
*) mctx
->hash
+ 3) = DEU_ENDIAN_SWAP(aes
->IV0R
);
1476 if (hash_final
&& offset
) {
1477 aes
->controlr
.O
= 0; //0 ECB 1 CBC 2 OFB 3 CFB 4 CTR
1478 crypto_xor(mctx
->block
, mctx
->hash
, offset
);
1480 memcpy(p
, mctx
->hash
+ offset
, (AES_BLOCK_SIZE
- offset
));
1482 aes
->ID3R
= INPUT_ENDIAN_SWAP(*((u32
*) mctx
->block
+ 0));
1483 aes
->ID2R
= INPUT_ENDIAN_SWAP(*((u32
*) mctx
->block
+ 1));
1484 aes
->ID1R
= INPUT_ENDIAN_SWAP(*((u32
*) mctx
->block
+ 2));
1485 aes
->ID0R
= INPUT_ENDIAN_SWAP(*((u32
*) mctx
->block
+ 3)); /* start crypto */
1487 while (aes
->controlr
.BUS
) {
1488 // this will not take long
1491 *((u32
*) mctx
->hash
) = DEU_ENDIAN_SWAP(aes
->OD3R
);
1492 *((u32
*) mctx
->hash
+ 1) = DEU_ENDIAN_SWAP(aes
->OD2R
);
1493 *((u32
*) mctx
->hash
+ 2) = DEU_ENDIAN_SWAP(aes
->OD1R
);
1494 *((u32
*) mctx
->hash
+ 3) = DEU_ENDIAN_SWAP(aes
->OD0R
);
1500 memcpy(out
, mctx
->hash
, AES_BLOCK_SIZE
);
1501 /* reset the context after we finish with the hash */
1502 aes_cbcmac_init(desc
);
1509 /*! \fn static int aes_cbcmac_final(struct crypto_tfm *tfm, u8 *out)
1510 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1511 * \brief call aes_cbcmac_final_impl with hash_final true
1512 * \param tfm linux crypto algo transform
1513 * \param out final md5 hmac output value
1515 static int aes_cbcmac_final(struct shash_desc
*desc
, u8
*out
)
1517 return aes_cbcmac_final_impl(desc
, out
, true);
1520 /*! \fn void aes_cbcmac_init_tfm(struct crypto_tfm *tfm)
1521 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1522 * \brief initialize pointers in aes_ctx
1523 * \param tfm linux crypto shash transform
1525 static int aes_cbcmac_init_tfm(struct crypto_tfm
*tfm
)
1527 struct aes_ctx
*mctx
= crypto_tfm_ctx(tfm
);
1528 mctx
->temp
= kzalloc(AES_BLOCK_SIZE
* AES_CBCMAC_DBN_TEMP_SIZE
, GFP_KERNEL
);
1529 if (IS_ERR(mctx
->temp
)) return PTR_ERR(mctx
->temp
);
1534 /*! \fn void aes_cbcmac_exit_tfm(struct crypto_tfm *tfm)
1535 * \ingroup IFX_aes_cbcmac_FUNCTIONS
1536 * \brief free pointers in aes_ctx
1537 * \param tfm linux crypto shash transform
1539 static void aes_cbcmac_exit_tfm(struct crypto_tfm
*tfm
)
1541 struct aes_ctx
*mctx
= crypto_tfm_ctx(tfm
);
1546 * \brief aes_cbcmac function mappings
1548 static struct shash_alg ifxdeu_cbcmac_aes_alg
= {
1549 .digestsize
= AES_BLOCK_SIZE
,
1550 .init
= aes_cbcmac_init
,
1551 .update
= aes_cbcmac_update
,
1552 .final
= aes_cbcmac_final
,
1553 .setkey
= aes_cbcmac_setkey
,
1554 .descsize
= sizeof(struct aes_ctx
),
1556 .cra_name
= "cbcmac(aes)",
1557 .cra_driver_name
= "ifxdeu-cbcmac(aes)",
1558 .cra_priority
= 400,
1559 .cra_ctxsize
= sizeof(struct aes_ctx
),
1560 .cra_flags
= CRYPTO_ALG_TYPE_HASH
| CRYPTO_ALG_KERN_DRIVER_ONLY
,
1562 .cra_module
= THIS_MODULE
,
1563 .cra_init
= aes_cbcmac_init_tfm
,
1564 .cra_exit
= aes_cbcmac_exit_tfm
,
1569 /*! \fn int ifxdeu_init_aes (void)
1570 * \ingroup IFX_AES_FUNCTIONS
1571 * \brief function to initialize AES driver
1574 int ifxdeu_init_aes (void)
1580 if ((ret
= crypto_register_alg(&ifxdeu_aes_alg
)))
1583 if ((ret
= crypto_register_skcipher(&ifxdeu_ecb_aes_alg
)))
1586 if ((ret
= crypto_register_skcipher(&ifxdeu_cbc_aes_alg
)))
1589 if ((ret
= crypto_register_skcipher(&ifxdeu_xts_aes_alg
)))
1592 if ((ret
= crypto_register_skcipher(&ifxdeu_ofb_aes_alg
)))
1595 if ((ret
= crypto_register_skcipher(&ifxdeu_cfb_aes_alg
)))
1598 if ((ret
= crypto_register_skcipher(&ifxdeu_ctr_basic_aes_alg
)))
1599 goto ctr_basic_aes_err
;
1601 if ((ret
= crypto_register_skcipher(&ifxdeu_ctr_rfc3686_aes_alg
)))
1602 goto ctr_rfc3686_aes_err
;
1604 if ((ret
= crypto_register_shash(&ifxdeu_cbcmac_aes_alg
)))
1605 goto cbcmac_aes_err
;
1610 printk (KERN_NOTICE
"IFX DEU AES initialized%s%s.\n", disable_multiblock
? "" : " (multiblock)", disable_deudma
? "" : " (DMA)");
1614 crypto_unregister_shash(&ifxdeu_cbcmac_aes_alg
);
1615 printk (KERN_ERR
"IFX cbcmac_aes initialization failed!\n");
1617 ctr_rfc3686_aes_err
:
1618 crypto_unregister_skcipher(&ifxdeu_ctr_rfc3686_aes_alg
);
1619 printk (KERN_ERR
"IFX ctr_rfc3686_aes initialization failed!\n");
1622 crypto_unregister_skcipher(&ifxdeu_ctr_basic_aes_alg
);
1623 printk (KERN_ERR
"IFX ctr_basic_aes initialization failed!\n");
1626 crypto_unregister_skcipher(&ifxdeu_cfb_aes_alg
);
1627 printk (KERN_ERR
"IFX cfb_aes initialization failed!\n");
1630 crypto_unregister_skcipher(&ifxdeu_ofb_aes_alg
);
1631 printk (KERN_ERR
"IFX ofb_aes initialization failed!\n");
1634 crypto_unregister_skcipher(&ifxdeu_xts_aes_alg
);
1635 printk (KERN_ERR
"IFX xts_aes initialization failed!\n");
1638 crypto_unregister_skcipher(&ifxdeu_cbc_aes_alg
);
1639 printk (KERN_ERR
"IFX cbc_aes initialization failed!\n");
1642 crypto_unregister_skcipher(&ifxdeu_ecb_aes_alg
);
1643 printk (KERN_ERR
"IFX aes initialization failed!\n");
1646 printk(KERN_ERR
"IFX DEU AES initialization failed!\n");
1651 /*! \fn void ifxdeu_fini_aes (void)
1652 * \ingroup IFX_AES_FUNCTIONS
1653 * \brief unregister aes driver
1655 void ifxdeu_fini_aes (void)
1657 crypto_unregister_alg (&ifxdeu_aes_alg
);
1658 crypto_unregister_skcipher (&ifxdeu_ecb_aes_alg
);
1659 crypto_unregister_skcipher (&ifxdeu_cbc_aes_alg
);
1660 crypto_unregister_skcipher (&ifxdeu_xts_aes_alg
);
1661 crypto_unregister_skcipher (&ifxdeu_ofb_aes_alg
);
1662 crypto_unregister_skcipher (&ifxdeu_cfb_aes_alg
);
1663 crypto_unregister_skcipher (&ifxdeu_ctr_basic_aes_alg
);
1664 crypto_unregister_skcipher (&ifxdeu_ctr_rfc3686_aes_alg
);
1665 crypto_unregister_shash (&ifxdeu_cbcmac_aes_alg
);