2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
7 * Based on talitos crypto API driver.
9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 * --------------- ---------------
12 * | JobDesc #1 |-------------------->| ShareDesc |
13 * | *(packet 1) | | (PDB) |
14 * --------------- |------------->| (hashKey) |
16 * . | |-------->| (operation) |
17 * --------------- | | ---------------
18 * | JobDesc #2 |------| |
24 * | JobDesc #3 |------------
28 * The SharedDesc never changes for a connection unless rekeyed, but
29 * each packet will likely be in a different place. So all we need
30 * to know to process the packet is where the input is, where the
31 * output goes, and what context we want to process with. Context is
32 * in the SharedDesc, packet references in the JobDesc.
34 * So, a job desc looks like:
36 * ---------------------
38 * | ShareDesc Pointer |
45 * ---------------------
52 #include "desc_constr.h"
55 #include "sg_sw_sec4.h"
57 #include "caamalg_desc.h"
62 #define CAAM_CRA_PRIORITY 3000
63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
65 CTR_RFC3686_NONCE_SIZE + \
66 SHA512_DIGEST_SIZE * 2)
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
78 /* for print_hex_dumps with line references */
79 #define debug(format, arg...) printk(format, arg)
81 #define debug(format, arg...)
85 #include <linux/highmem.h>
87 static void dbg_dump_sg(const char *level, const char *prefix_str,
88 int prefix_type, int rowsize, int groupsize,
89 struct scatterlist *sg, size_t tlen, bool ascii)
91 struct scatterlist *it;
96 for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) {
98 * make sure the scatterlist's page
99 * has a valid virtual memory mapping
101 it_page = kmap_atomic(sg_page(it));
102 if (unlikely(!it_page)) {
103 printk(KERN_ERR "dbg_dump_sg: kmap failed\n");
107 buf = it_page + it->offset;
108 len = min_t(size_t, tlen, it->length);
109 print_hex_dump(level, prefix_str, prefix_type, rowsize,
110 groupsize, buf, len, ascii);
113 kunmap_atomic(it_page);
118 static struct list_head alg_list;
120 struct caam_alg_entry {
127 struct caam_aead_alg {
128 struct aead_alg aead;
129 struct caam_alg_entry caam;
134 * per-session context
137 u32 sh_desc_enc[DESC_MAX_USED_LEN];
138 u32 sh_desc_dec[DESC_MAX_USED_LEN];
139 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
140 u8 key[CAAM_MAX_KEY_SIZE];
141 dma_addr_t sh_desc_enc_dma;
142 dma_addr_t sh_desc_dec_dma;
143 dma_addr_t sh_desc_givenc_dma;
145 struct device *jrdev;
146 struct alginfo adata;
147 struct alginfo cdata;
148 unsigned int authsize;
151 static int aead_null_set_sh_desc(struct crypto_aead *aead)
153 struct caam_ctx *ctx = crypto_aead_ctx(aead);
154 struct device *jrdev = ctx->jrdev;
156 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
157 ctx->adata.keylen_pad;
160 * Job Descriptor and Shared Descriptors
161 * must all fit into the 64-word Descriptor h/w Buffer
163 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
164 ctx->adata.key_inline = true;
165 ctx->adata.key_virt = ctx->key;
167 ctx->adata.key_inline = false;
168 ctx->adata.key_dma = ctx->key_dma;
171 /* aead_encrypt shared descriptor */
172 desc = ctx->sh_desc_enc;
173 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
174 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
175 desc_bytes(desc), DMA_TO_DEVICE);
178 * Job Descriptor and Shared Descriptors
179 * must all fit into the 64-word Descriptor h/w Buffer
181 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
182 ctx->adata.key_inline = true;
183 ctx->adata.key_virt = ctx->key;
185 ctx->adata.key_inline = false;
186 ctx->adata.key_dma = ctx->key_dma;
189 /* aead_decrypt shared descriptor */
190 desc = ctx->sh_desc_dec;
191 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
192 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
193 desc_bytes(desc), DMA_TO_DEVICE);
198 static int aead_set_sh_desc(struct crypto_aead *aead)
200 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
201 struct caam_aead_alg, aead);
202 unsigned int ivsize = crypto_aead_ivsize(aead);
203 struct caam_ctx *ctx = crypto_aead_ctx(aead);
204 struct device *jrdev = ctx->jrdev;
206 u32 *desc, *nonce = NULL;
208 unsigned int data_len[2];
209 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
210 OP_ALG_AAI_CTR_MOD128);
211 const bool is_rfc3686 = alg->caam.rfc3686;
216 /* NULL encryption / decryption */
217 if (!ctx->cdata.keylen)
218 return aead_null_set_sh_desc(aead);
221 * AES-CTR needs to load IV in CONTEXT1 reg
222 * at an offset of 128bits (16bytes)
223 * CONTEXT1[255:128] = IV
230 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
233 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
234 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
235 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
238 data_len[0] = ctx->adata.keylen_pad;
239 data_len[1] = ctx->cdata.keylen;
245 * Job Descriptor and Shared Descriptors
246 * must all fit into the 64-word Descriptor h/w Buffer
248 if (desc_inline_query(DESC_AEAD_ENC_LEN +
249 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
250 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
251 ARRAY_SIZE(data_len)) < 0)
255 ctx->adata.key_virt = ctx->key;
257 ctx->adata.key_dma = ctx->key_dma;
260 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
262 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
264 ctx->adata.key_inline = !!(inl_mask & 1);
265 ctx->cdata.key_inline = !!(inl_mask & 2);
267 /* aead_encrypt shared descriptor */
268 desc = ctx->sh_desc_enc;
269 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
270 ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
272 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
273 desc_bytes(desc), DMA_TO_DEVICE);
277 * Job Descriptor and Shared Descriptors
278 * must all fit into the 64-word Descriptor h/w Buffer
280 if (desc_inline_query(DESC_AEAD_DEC_LEN +
281 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
282 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
283 ARRAY_SIZE(data_len)) < 0)
287 ctx->adata.key_virt = ctx->key;
289 ctx->adata.key_dma = ctx->key_dma;
292 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
294 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
296 ctx->adata.key_inline = !!(inl_mask & 1);
297 ctx->cdata.key_inline = !!(inl_mask & 2);
299 /* aead_decrypt shared descriptor */
300 desc = ctx->sh_desc_dec;
301 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
302 ctx->authsize, alg->caam.geniv, is_rfc3686,
303 nonce, ctx1_iv_off, false);
304 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
305 desc_bytes(desc), DMA_TO_DEVICE);
307 if (!alg->caam.geniv)
311 * Job Descriptor and Shared Descriptors
312 * must all fit into the 64-word Descriptor h/w Buffer
314 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
315 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
316 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
317 ARRAY_SIZE(data_len)) < 0)
321 ctx->adata.key_virt = ctx->key;
323 ctx->adata.key_dma = ctx->key_dma;
326 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
328 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
330 ctx->adata.key_inline = !!(inl_mask & 1);
331 ctx->cdata.key_inline = !!(inl_mask & 2);
333 /* aead_givencrypt shared descriptor */
334 desc = ctx->sh_desc_enc;
335 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
336 ctx->authsize, is_rfc3686, nonce,
338 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
339 desc_bytes(desc), DMA_TO_DEVICE);
345 static int aead_setauthsize(struct crypto_aead *authenc,
346 unsigned int authsize)
348 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
350 ctx->authsize = authsize;
351 aead_set_sh_desc(authenc);
356 static int gcm_set_sh_desc(struct crypto_aead *aead)
358 struct caam_ctx *ctx = crypto_aead_ctx(aead);
359 struct device *jrdev = ctx->jrdev;
361 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
364 if (!ctx->cdata.keylen || !ctx->authsize)
368 * AES GCM encrypt shared descriptor
369 * Job Descriptor and Shared Descriptor
370 * must fit into the 64-word Descriptor h/w Buffer
372 if (rem_bytes >= DESC_GCM_ENC_LEN) {
373 ctx->cdata.key_inline = true;
374 ctx->cdata.key_virt = ctx->key;
376 ctx->cdata.key_inline = false;
377 ctx->cdata.key_dma = ctx->key_dma;
380 desc = ctx->sh_desc_enc;
381 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
382 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
383 desc_bytes(desc), DMA_TO_DEVICE);
386 * Job Descriptor and Shared Descriptors
387 * must all fit into the 64-word Descriptor h/w Buffer
389 if (rem_bytes >= DESC_GCM_DEC_LEN) {
390 ctx->cdata.key_inline = true;
391 ctx->cdata.key_virt = ctx->key;
393 ctx->cdata.key_inline = false;
394 ctx->cdata.key_dma = ctx->key_dma;
397 desc = ctx->sh_desc_dec;
398 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
399 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
400 desc_bytes(desc), DMA_TO_DEVICE);
405 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
407 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
409 ctx->authsize = authsize;
410 gcm_set_sh_desc(authenc);
415 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
417 struct caam_ctx *ctx = crypto_aead_ctx(aead);
418 struct device *jrdev = ctx->jrdev;
420 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
423 if (!ctx->cdata.keylen || !ctx->authsize)
427 * RFC4106 encrypt shared descriptor
428 * Job Descriptor and Shared Descriptor
429 * must fit into the 64-word Descriptor h/w Buffer
431 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
432 ctx->cdata.key_inline = true;
433 ctx->cdata.key_virt = ctx->key;
435 ctx->cdata.key_inline = false;
436 ctx->cdata.key_dma = ctx->key_dma;
439 desc = ctx->sh_desc_enc;
440 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
441 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
442 desc_bytes(desc), DMA_TO_DEVICE);
445 * Job Descriptor and Shared Descriptors
446 * must all fit into the 64-word Descriptor h/w Buffer
448 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
449 ctx->cdata.key_inline = true;
450 ctx->cdata.key_virt = ctx->key;
452 ctx->cdata.key_inline = false;
453 ctx->cdata.key_dma = ctx->key_dma;
456 desc = ctx->sh_desc_dec;
457 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
458 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
459 desc_bytes(desc), DMA_TO_DEVICE);
464 static int rfc4106_setauthsize(struct crypto_aead *authenc,
465 unsigned int authsize)
467 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
469 ctx->authsize = authsize;
470 rfc4106_set_sh_desc(authenc);
475 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
477 struct caam_ctx *ctx = crypto_aead_ctx(aead);
478 struct device *jrdev = ctx->jrdev;
480 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
483 if (!ctx->cdata.keylen || !ctx->authsize)
487 * RFC4543 encrypt shared descriptor
488 * Job Descriptor and Shared Descriptor
489 * must fit into the 64-word Descriptor h/w Buffer
491 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
492 ctx->cdata.key_inline = true;
493 ctx->cdata.key_virt = ctx->key;
495 ctx->cdata.key_inline = false;
496 ctx->cdata.key_dma = ctx->key_dma;
499 desc = ctx->sh_desc_enc;
500 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
501 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
502 desc_bytes(desc), DMA_TO_DEVICE);
505 * Job Descriptor and Shared Descriptors
506 * must all fit into the 64-word Descriptor h/w Buffer
508 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
509 ctx->cdata.key_inline = true;
510 ctx->cdata.key_virt = ctx->key;
512 ctx->cdata.key_inline = false;
513 ctx->cdata.key_dma = ctx->key_dma;
516 desc = ctx->sh_desc_dec;
517 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
518 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
519 desc_bytes(desc), DMA_TO_DEVICE);
524 static int rfc4543_setauthsize(struct crypto_aead *authenc,
525 unsigned int authsize)
527 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
529 ctx->authsize = authsize;
530 rfc4543_set_sh_desc(authenc);
535 static int aead_setkey(struct crypto_aead *aead,
536 const u8 *key, unsigned int keylen)
538 struct caam_ctx *ctx = crypto_aead_ctx(aead);
539 struct device *jrdev = ctx->jrdev;
540 struct crypto_authenc_keys keys;
543 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
547 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
548 keys.authkeylen + keys.enckeylen, keys.enckeylen,
550 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
551 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
554 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
555 keys.authkeylen, CAAM_MAX_KEY_SIZE -
561 /* postpend encryption key to auth split key */
562 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
563 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
564 keys.enckeylen, DMA_TO_DEVICE);
566 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
567 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
568 ctx->adata.keylen_pad + keys.enckeylen, 1);
570 ctx->cdata.keylen = keys.enckeylen;
571 return aead_set_sh_desc(aead);
573 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
577 static int gcm_setkey(struct crypto_aead *aead,
578 const u8 *key, unsigned int keylen)
580 struct caam_ctx *ctx = crypto_aead_ctx(aead);
581 struct device *jrdev = ctx->jrdev;
584 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
585 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
588 memcpy(ctx->key, key, keylen);
589 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
590 ctx->cdata.keylen = keylen;
592 return gcm_set_sh_desc(aead);
595 static int rfc4106_setkey(struct crypto_aead *aead,
596 const u8 *key, unsigned int keylen)
598 struct caam_ctx *ctx = crypto_aead_ctx(aead);
599 struct device *jrdev = ctx->jrdev;
605 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
606 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
609 memcpy(ctx->key, key, keylen);
612 * The last four bytes of the key material are used as the salt value
613 * in the nonce. Update the AES key length.
615 ctx->cdata.keylen = keylen - 4;
616 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
618 return rfc4106_set_sh_desc(aead);
621 static int rfc4543_setkey(struct crypto_aead *aead,
622 const u8 *key, unsigned int keylen)
624 struct caam_ctx *ctx = crypto_aead_ctx(aead);
625 struct device *jrdev = ctx->jrdev;
631 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
632 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
635 memcpy(ctx->key, key, keylen);
638 * The last four bytes of the key material are used as the salt value
639 * in the nonce. Update the AES key length.
641 ctx->cdata.keylen = keylen - 4;
642 dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
644 return rfc4543_set_sh_desc(aead);
647 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
648 const u8 *key, unsigned int keylen)
650 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
651 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
652 const char *alg_name = crypto_tfm_alg_name(tfm);
653 struct device *jrdev = ctx->jrdev;
654 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
657 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
658 OP_ALG_AAI_CTR_MOD128);
659 const bool is_rfc3686 = (ctr_mode &&
660 (strstr(alg_name, "rfc3686") != NULL));
662 memcpy(ctx->key, key, keylen);
664 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
665 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
668 * AES-CTR needs to load IV in CONTEXT1 reg
669 * at an offset of 128bits (16bytes)
670 * CONTEXT1[255:128] = IV
677 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
678 * | *key = {KEY, NONCE}
681 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
682 keylen -= CTR_RFC3686_NONCE_SIZE;
685 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
686 ctx->cdata.keylen = keylen;
687 ctx->cdata.key_virt = ctx->key;
688 ctx->cdata.key_inline = true;
690 /* ablkcipher_encrypt shared descriptor */
691 desc = ctx->sh_desc_enc;
692 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
694 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
695 desc_bytes(desc), DMA_TO_DEVICE);
697 /* ablkcipher_decrypt shared descriptor */
698 desc = ctx->sh_desc_dec;
699 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
701 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
702 desc_bytes(desc), DMA_TO_DEVICE);
704 /* ablkcipher_givencrypt shared descriptor */
705 desc = ctx->sh_desc_givenc;
706 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
708 dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
709 desc_bytes(desc), DMA_TO_DEVICE);
714 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
715 const u8 *key, unsigned int keylen)
717 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
718 struct device *jrdev = ctx->jrdev;
721 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
722 crypto_ablkcipher_set_flags(ablkcipher,
723 CRYPTO_TFM_RES_BAD_KEY_LEN);
724 dev_err(jrdev, "key size mismatch\n");
728 memcpy(ctx->key, key, keylen);
729 dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, DMA_TO_DEVICE);
730 ctx->cdata.keylen = keylen;
731 ctx->cdata.key_virt = ctx->key;
732 ctx->cdata.key_inline = true;
734 /* xts_ablkcipher_encrypt shared descriptor */
735 desc = ctx->sh_desc_enc;
736 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
737 dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
738 desc_bytes(desc), DMA_TO_DEVICE);
740 /* xts_ablkcipher_decrypt shared descriptor */
741 desc = ctx->sh_desc_dec;
742 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
743 dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
744 desc_bytes(desc), DMA_TO_DEVICE);
750 * aead_edesc - s/w-extended aead descriptor
751 * @src_nents: number of segments in input s/w scatterlist
752 * @dst_nents: number of segments in output s/w scatterlist
753 * @sec4_sg_bytes: length of dma mapped sec4_sg space
754 * @sec4_sg_dma: bus physical mapped address of h/w link table
755 * @sec4_sg: pointer to h/w link table
756 * @hw_desc: the h/w job descriptor followed by any referenced link tables
762 dma_addr_t sec4_sg_dma;
763 struct sec4_sg_entry *sec4_sg;
768 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
769 * @src_nents: number of segments in input s/w scatterlist
770 * @dst_nents: number of segments in output s/w scatterlist
771 * @iv_dma: dma address of iv for checking continuity and link table
772 * @sec4_sg_bytes: length of dma mapped sec4_sg space
773 * @sec4_sg_dma: bus physical mapped address of h/w link table
774 * @sec4_sg: pointer to h/w link table
775 * @hw_desc: the h/w job descriptor followed by any referenced link tables
777 struct ablkcipher_edesc {
782 dma_addr_t sec4_sg_dma;
783 struct sec4_sg_entry *sec4_sg;
787 static void caam_unmap(struct device *dev, struct scatterlist *src,
788 struct scatterlist *dst, int src_nents,
790 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
795 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
796 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
798 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
802 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
804 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
808 static void aead_unmap(struct device *dev,
809 struct aead_edesc *edesc,
810 struct aead_request *req)
812 caam_unmap(dev, req->src, req->dst,
813 edesc->src_nents, edesc->dst_nents, 0, 0,
814 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
817 static void ablkcipher_unmap(struct device *dev,
818 struct ablkcipher_edesc *edesc,
819 struct ablkcipher_request *req)
821 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
822 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
824 caam_unmap(dev, req->src, req->dst,
825 edesc->src_nents, edesc->dst_nents,
826 edesc->iv_dma, ivsize,
827 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
830 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
833 struct aead_request *req = context;
834 struct aead_edesc *edesc;
837 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
840 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
843 caam_jr_strstatus(jrdev, err);
845 aead_unmap(jrdev, edesc, req);
849 aead_request_complete(req, err);
852 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
855 struct aead_request *req = context;
856 struct aead_edesc *edesc;
859 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
862 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
865 caam_jr_strstatus(jrdev, err);
867 aead_unmap(jrdev, edesc, req);
870 * verify hw auth check passed else return -EBADMSG
872 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
877 aead_request_complete(req, err);
880 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
883 struct ablkcipher_request *req = context;
884 struct ablkcipher_edesc *edesc;
885 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
886 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
889 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
892 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
895 caam_jr_strstatus(jrdev, err);
898 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
899 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
900 edesc->src_nents > 1 ? 100 : ivsize, 1);
901 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
902 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
903 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
906 ablkcipher_unmap(jrdev, edesc, req);
909 * The crypto API expects us to set the IV (req->info) to the last
910 * ciphertext block. This is used e.g. by the CTS mode.
912 scatterwalk_map_and_copy(req->info, req->dst, req->nbytes - ivsize,
917 ablkcipher_request_complete(req, err);
920 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
923 struct ablkcipher_request *req = context;
924 struct ablkcipher_edesc *edesc;
925 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
926 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
929 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
932 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
934 caam_jr_strstatus(jrdev, err);
937 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
938 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
940 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
941 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
942 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
945 ablkcipher_unmap(jrdev, edesc, req);
948 * The crypto API expects us to set the IV (req->info) to the last
951 scatterwalk_map_and_copy(req->info, req->src, req->nbytes - ivsize,
956 ablkcipher_request_complete(req, err);
960 * Fill in aead job descriptor
962 static void init_aead_job(struct aead_request *req,
963 struct aead_edesc *edesc,
964 bool all_contig, bool encrypt)
966 struct crypto_aead *aead = crypto_aead_reqtfm(req);
967 struct caam_ctx *ctx = crypto_aead_ctx(aead);
968 int authsize = ctx->authsize;
969 u32 *desc = edesc->hw_desc;
970 u32 out_options, in_options;
971 dma_addr_t dst_dma, src_dma;
972 int len, sec4_sg_index = 0;
976 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
977 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
979 len = desc_len(sh_desc);
980 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
983 src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
986 src_dma = edesc->sec4_sg_dma;
987 sec4_sg_index += edesc->src_nents;
988 in_options = LDST_SGF;
991 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
995 out_options = in_options;
997 if (unlikely(req->src != req->dst)) {
998 if (edesc->dst_nents == 1) {
999 dst_dma = sg_dma_address(req->dst);
1001 dst_dma = edesc->sec4_sg_dma +
1003 sizeof(struct sec4_sg_entry);
1004 out_options = LDST_SGF;
1009 append_seq_out_ptr(desc, dst_dma,
1010 req->assoclen + req->cryptlen + authsize,
1013 append_seq_out_ptr(desc, dst_dma,
1014 req->assoclen + req->cryptlen - authsize,
1017 /* REG3 = assoclen */
1018 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1021 static void init_gcm_job(struct aead_request *req,
1022 struct aead_edesc *edesc,
1023 bool all_contig, bool encrypt)
1025 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1026 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1027 unsigned int ivsize = crypto_aead_ivsize(aead);
1028 u32 *desc = edesc->hw_desc;
1029 bool generic_gcm = (ivsize == 12);
1032 init_aead_job(req, edesc, all_contig, encrypt);
1034 /* BUG This should not be specific to generic GCM. */
1036 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1037 last = FIFOLD_TYPE_LAST1;
1040 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1041 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
1044 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1046 append_data(desc, req->iv, ivsize);
1047 /* End of blank commands */
1050 static void init_authenc_job(struct aead_request *req,
1051 struct aead_edesc *edesc,
1052 bool all_contig, bool encrypt)
1054 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1055 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1056 struct caam_aead_alg, aead);
1057 unsigned int ivsize = crypto_aead_ivsize(aead);
1058 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1059 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1060 OP_ALG_AAI_CTR_MOD128);
1061 const bool is_rfc3686 = alg->caam.rfc3686;
1062 u32 *desc = edesc->hw_desc;
1066 * AES-CTR needs to load IV in CONTEXT1 reg
1067 * at an offset of 128bits (16bytes)
1068 * CONTEXT1[255:128] = IV
1075 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1078 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1080 init_aead_job(req, edesc, all_contig, encrypt);
1082 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1083 append_load_as_imm(desc, req->iv, ivsize,
1085 LDST_SRCDST_BYTE_CONTEXT |
1086 (ivoffset << LDST_OFFSET_SHIFT));
1090 * Fill in ablkcipher job descriptor
1092 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
1093 struct ablkcipher_edesc *edesc,
1094 struct ablkcipher_request *req,
1097 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1098 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1099 u32 *desc = edesc->hw_desc;
1100 u32 out_options = 0, in_options;
1101 dma_addr_t dst_dma, src_dma;
1102 int len, sec4_sg_index = 0;
1105 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1106 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1108 pr_err("asked=%d, nbytes%d\n",
1109 (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
1110 dbg_dump_sg(KERN_ERR, "src @"__stringify(__LINE__)": ",
1111 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1112 edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1115 len = desc_len(sh_desc);
1116 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1119 src_dma = edesc->iv_dma;
1122 src_dma = edesc->sec4_sg_dma;
1123 sec4_sg_index += edesc->src_nents + 1;
1124 in_options = LDST_SGF;
1126 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
1128 if (likely(req->src == req->dst)) {
1129 if (edesc->src_nents == 1 && iv_contig) {
1130 dst_dma = sg_dma_address(req->src);
1132 dst_dma = edesc->sec4_sg_dma +
1133 sizeof(struct sec4_sg_entry);
1134 out_options = LDST_SGF;
1137 if (edesc->dst_nents == 1) {
1138 dst_dma = sg_dma_address(req->dst);
1140 dst_dma = edesc->sec4_sg_dma +
1141 sec4_sg_index * sizeof(struct sec4_sg_entry);
1142 out_options = LDST_SGF;
1145 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
1149 * Fill in ablkcipher givencrypt job descriptor
1151 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
1152 struct ablkcipher_edesc *edesc,
1153 struct ablkcipher_request *req,
1156 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1157 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1158 u32 *desc = edesc->hw_desc;
1159 u32 out_options, in_options;
1160 dma_addr_t dst_dma, src_dma;
1161 int len, sec4_sg_index = 0;
1164 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
1165 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1167 dbg_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
1168 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1169 edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1172 len = desc_len(sh_desc);
1173 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1175 if (edesc->src_nents == 1) {
1176 src_dma = sg_dma_address(req->src);
1179 src_dma = edesc->sec4_sg_dma;
1180 sec4_sg_index += edesc->src_nents;
1181 in_options = LDST_SGF;
1183 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
1186 dst_dma = edesc->iv_dma;
1189 dst_dma = edesc->sec4_sg_dma +
1190 sec4_sg_index * sizeof(struct sec4_sg_entry);
1191 out_options = LDST_SGF;
1193 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
1197 * allocate and map the aead extended descriptor
1199 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1200 int desc_bytes, bool *all_contig_ptr,
1203 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1204 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1205 struct device *jrdev = ctx->jrdev;
1206 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1207 GFP_KERNEL : GFP_ATOMIC;
1208 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1209 struct aead_edesc *edesc;
1210 int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1211 unsigned int authsize = ctx->authsize;
1213 if (unlikely(req->dst != req->src)) {
1214 src_nents = sg_nents_for_len(req->src, req->assoclen +
1216 if (unlikely(src_nents < 0)) {
1217 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1218 req->assoclen + req->cryptlen);
1219 return ERR_PTR(src_nents);
1222 dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1224 (encrypt ? authsize :
1226 if (unlikely(dst_nents < 0)) {
1227 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1228 req->assoclen + req->cryptlen +
1229 (encrypt ? authsize : (-authsize)));
1230 return ERR_PTR(dst_nents);
1233 src_nents = sg_nents_for_len(req->src, req->assoclen +
1235 (encrypt ? authsize : 0));
1236 if (unlikely(src_nents < 0)) {
1237 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1238 req->assoclen + req->cryptlen +
1239 (encrypt ? authsize : 0));
1240 return ERR_PTR(src_nents);
1244 if (likely(req->src == req->dst)) {
1245 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1247 if (unlikely(!mapped_src_nents)) {
1248 dev_err(jrdev, "unable to map source\n");
1249 return ERR_PTR(-ENOMEM);
1252 /* Cover also the case of null (zero length) input data */
1254 mapped_src_nents = dma_map_sg(jrdev, req->src,
1255 src_nents, DMA_TO_DEVICE);
1256 if (unlikely(!mapped_src_nents)) {
1257 dev_err(jrdev, "unable to map source\n");
1258 return ERR_PTR(-ENOMEM);
1261 mapped_src_nents = 0;
1264 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1266 if (unlikely(!mapped_dst_nents)) {
1267 dev_err(jrdev, "unable to map destination\n");
1268 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1269 return ERR_PTR(-ENOMEM);
1273 sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1274 sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1275 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1277 /* allocate space for base edesc and hw desc commands, link tables */
1278 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1281 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1283 return ERR_PTR(-ENOMEM);
1286 edesc->src_nents = src_nents;
1287 edesc->dst_nents = dst_nents;
1288 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1290 *all_contig_ptr = !(mapped_src_nents > 1);
1293 if (mapped_src_nents > 1) {
1294 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1295 edesc->sec4_sg + sec4_sg_index, 0);
1296 sec4_sg_index += mapped_src_nents;
1298 if (mapped_dst_nents > 1) {
1299 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1300 edesc->sec4_sg + sec4_sg_index, 0);
1306 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1307 sec4_sg_bytes, DMA_TO_DEVICE);
1308 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1309 dev_err(jrdev, "unable to map S/G table\n");
1310 aead_unmap(jrdev, edesc, req);
1312 return ERR_PTR(-ENOMEM);
1315 edesc->sec4_sg_bytes = sec4_sg_bytes;
1320 static int gcm_encrypt(struct aead_request *req)
1322 struct aead_edesc *edesc;
1323 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1324 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1325 struct device *jrdev = ctx->jrdev;
1330 /* allocate extended descriptor */
1331 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1333 return PTR_ERR(edesc);
1335 /* Create and submit job descriptor */
1336 init_gcm_job(req, edesc, all_contig, true);
1338 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1339 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1340 desc_bytes(edesc->hw_desc), 1);
1343 desc = edesc->hw_desc;
1344 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1348 aead_unmap(jrdev, edesc, req);
1355 static int ipsec_gcm_encrypt(struct aead_request *req)
1357 if (req->assoclen < 8)
1360 return gcm_encrypt(req);
1363 static int aead_encrypt(struct aead_request *req)
1365 struct aead_edesc *edesc;
1366 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1367 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1368 struct device *jrdev = ctx->jrdev;
1373 /* allocate extended descriptor */
1374 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1377 return PTR_ERR(edesc);
1379 /* Create and submit job descriptor */
1380 init_authenc_job(req, edesc, all_contig, true);
1382 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1383 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1384 desc_bytes(edesc->hw_desc), 1);
1387 desc = edesc->hw_desc;
1388 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1392 aead_unmap(jrdev, edesc, req);
1399 static int gcm_decrypt(struct aead_request *req)
1401 struct aead_edesc *edesc;
1402 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1403 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1404 struct device *jrdev = ctx->jrdev;
1409 /* allocate extended descriptor */
1410 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1412 return PTR_ERR(edesc);
1414 /* Create and submit job descriptor*/
1415 init_gcm_job(req, edesc, all_contig, false);
1417 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1418 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1419 desc_bytes(edesc->hw_desc), 1);
1422 desc = edesc->hw_desc;
1423 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1427 aead_unmap(jrdev, edesc, req);
1434 static int ipsec_gcm_decrypt(struct aead_request *req)
1436 if (req->assoclen < 8)
1439 return gcm_decrypt(req);
1442 static int aead_decrypt(struct aead_request *req)
1444 struct aead_edesc *edesc;
1445 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1446 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1447 struct device *jrdev = ctx->jrdev;
1453 dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
1454 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1455 req->assoclen + req->cryptlen, 1);
1458 /* allocate extended descriptor */
1459 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1460 &all_contig, false);
1462 return PTR_ERR(edesc);
1464 /* Create and submit job descriptor*/
1465 init_authenc_job(req, edesc, all_contig, false);
1467 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1468 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1469 desc_bytes(edesc->hw_desc), 1);
1472 desc = edesc->hw_desc;
1473 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1477 aead_unmap(jrdev, edesc, req);
1485 * allocate and map the ablkcipher extended descriptor for ablkcipher
1487 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
1488 *req, int desc_bytes,
1489 bool *iv_contig_out)
1491 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1492 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1493 struct device *jrdev = ctx->jrdev;
1494 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1495 GFP_KERNEL : GFP_ATOMIC;
1496 int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1497 struct ablkcipher_edesc *edesc;
1498 dma_addr_t iv_dma = 0;
1500 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1501 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1503 src_nents = sg_nents_for_len(req->src, req->nbytes);
1504 if (unlikely(src_nents < 0)) {
1505 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1507 return ERR_PTR(src_nents);
1510 if (req->dst != req->src) {
1511 dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1512 if (unlikely(dst_nents < 0)) {
1513 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1515 return ERR_PTR(dst_nents);
1519 if (likely(req->src == req->dst)) {
1520 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1522 if (unlikely(!mapped_src_nents)) {
1523 dev_err(jrdev, "unable to map source\n");
1524 return ERR_PTR(-ENOMEM);
1527 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1529 if (unlikely(!mapped_src_nents)) {
1530 dev_err(jrdev, "unable to map source\n");
1531 return ERR_PTR(-ENOMEM);
1534 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1536 if (unlikely(!mapped_dst_nents)) {
1537 dev_err(jrdev, "unable to map destination\n");
1538 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1539 return ERR_PTR(-ENOMEM);
1543 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
1544 if (dma_mapping_error(jrdev, iv_dma)) {
1545 dev_err(jrdev, "unable to map IV\n");
1546 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1548 return ERR_PTR(-ENOMEM);
1551 if (mapped_src_nents == 1 &&
1552 iv_dma + ivsize == sg_dma_address(req->src)) {
1557 sec4_sg_ents = 1 + mapped_src_nents;
1559 dst_sg_idx = sec4_sg_ents;
1560 sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1561 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1563 /* allocate space for base edesc and hw desc commands, link tables */
1564 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1567 dev_err(jrdev, "could not allocate extended descriptor\n");
1568 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1569 iv_dma, ivsize, 0, 0);
1570 return ERR_PTR(-ENOMEM);
1573 edesc->src_nents = src_nents;
1574 edesc->dst_nents = dst_nents;
1575 edesc->sec4_sg_bytes = sec4_sg_bytes;
1576 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1580 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1581 sg_to_sec4_sg_last(req->src, mapped_src_nents,
1582 edesc->sec4_sg + 1, 0);
1585 if (mapped_dst_nents > 1) {
1586 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1587 edesc->sec4_sg + dst_sg_idx, 0);
1590 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1591 sec4_sg_bytes, DMA_TO_DEVICE);
1592 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1593 dev_err(jrdev, "unable to map S/G table\n");
1594 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1595 iv_dma, ivsize, 0, 0);
1597 return ERR_PTR(-ENOMEM);
1600 edesc->iv_dma = iv_dma;
1603 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
1604 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1608 *iv_contig_out = in_contig;
1612 static int ablkcipher_encrypt(struct ablkcipher_request *req)
1614 struct ablkcipher_edesc *edesc;
1615 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1616 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1617 struct device *jrdev = ctx->jrdev;
1622 /* allocate extended descriptor */
1623 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1624 CAAM_CMD_SZ, &iv_contig);
1626 return PTR_ERR(edesc);
1628 /* Create and submit job descriptor*/
1629 init_ablkcipher_job(ctx->sh_desc_enc,
1630 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
1632 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1633 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1634 desc_bytes(edesc->hw_desc), 1);
1636 desc = edesc->hw_desc;
1637 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1642 ablkcipher_unmap(jrdev, edesc, req);
1649 static int ablkcipher_decrypt(struct ablkcipher_request *req)
1651 struct ablkcipher_edesc *edesc;
1652 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1653 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1654 struct device *jrdev = ctx->jrdev;
1659 /* allocate extended descriptor */
1660 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1661 CAAM_CMD_SZ, &iv_contig);
1663 return PTR_ERR(edesc);
1665 /* Create and submit job descriptor*/
1666 init_ablkcipher_job(ctx->sh_desc_dec,
1667 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
1668 desc = edesc->hw_desc;
1670 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1671 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1672 desc_bytes(edesc->hw_desc), 1);
1675 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
1679 ablkcipher_unmap(jrdev, edesc, req);
1687 * allocate and map the ablkcipher extended descriptor
1688 * for ablkcipher givencrypt
1690 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
1691 struct skcipher_givcrypt_request *greq,
1693 bool *iv_contig_out)
1695 struct ablkcipher_request *req = &greq->creq;
1696 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1697 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1698 struct device *jrdev = ctx->jrdev;
1699 gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1700 GFP_KERNEL : GFP_ATOMIC;
1701 int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
1702 struct ablkcipher_edesc *edesc;
1703 dma_addr_t iv_dma = 0;
1705 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1706 int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1708 src_nents = sg_nents_for_len(req->src, req->nbytes);
1709 if (unlikely(src_nents < 0)) {
1710 dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1712 return ERR_PTR(src_nents);
1715 if (likely(req->src == req->dst)) {
1716 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1718 if (unlikely(!mapped_src_nents)) {
1719 dev_err(jrdev, "unable to map source\n");
1720 return ERR_PTR(-ENOMEM);
1723 dst_nents = src_nents;
1724 mapped_dst_nents = src_nents;
1726 mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1728 if (unlikely(!mapped_src_nents)) {
1729 dev_err(jrdev, "unable to map source\n");
1730 return ERR_PTR(-ENOMEM);
1733 dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1734 if (unlikely(dst_nents < 0)) {
1735 dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1737 return ERR_PTR(dst_nents);
1740 mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1742 if (unlikely(!mapped_dst_nents)) {
1743 dev_err(jrdev, "unable to map destination\n");
1744 dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1745 return ERR_PTR(-ENOMEM);
1750 * Check if iv can be contiguous with source and destination.
1751 * If so, include it. If not, create scatterlist.
1753 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
1754 if (dma_mapping_error(jrdev, iv_dma)) {
1755 dev_err(jrdev, "unable to map IV\n");
1756 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1758 return ERR_PTR(-ENOMEM);
1761 sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
1762 dst_sg_idx = sec4_sg_ents;
1763 if (mapped_dst_nents == 1 &&
1764 iv_dma + ivsize == sg_dma_address(req->dst)) {
1768 sec4_sg_ents += 1 + mapped_dst_nents;
1771 /* allocate space for base edesc and hw desc commands, link tables */
1772 sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1773 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1776 dev_err(jrdev, "could not allocate extended descriptor\n");
1777 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1778 iv_dma, ivsize, 0, 0);
1779 return ERR_PTR(-ENOMEM);
1782 edesc->src_nents = src_nents;
1783 edesc->dst_nents = dst_nents;
1784 edesc->sec4_sg_bytes = sec4_sg_bytes;
1785 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1788 if (mapped_src_nents > 1)
1789 sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
1793 dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx,
1795 sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1796 edesc->sec4_sg + dst_sg_idx + 1, 0);
1799 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1800 sec4_sg_bytes, DMA_TO_DEVICE);
1801 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1802 dev_err(jrdev, "unable to map S/G table\n");
1803 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1804 iv_dma, ivsize, 0, 0);
1806 return ERR_PTR(-ENOMEM);
1808 edesc->iv_dma = iv_dma;
1811 print_hex_dump(KERN_ERR,
1812 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
1813 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1817 *iv_contig_out = out_contig;
1821 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
1823 struct ablkcipher_request *req = &creq->creq;
1824 struct ablkcipher_edesc *edesc;
1825 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1826 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1827 struct device *jrdev = ctx->jrdev;
1828 bool iv_contig = false;
1832 /* allocate extended descriptor */
1833 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
1834 CAAM_CMD_SZ, &iv_contig);
1836 return PTR_ERR(edesc);
1838 /* Create and submit job descriptor*/
1839 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
1840 edesc, req, iv_contig);
1842 print_hex_dump(KERN_ERR,
1843 "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
1844 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1845 desc_bytes(edesc->hw_desc), 1);
1847 desc = edesc->hw_desc;
1848 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1853 ablkcipher_unmap(jrdev, edesc, req);
1860 #define template_aead template_u.aead
1861 #define template_ablkcipher template_u.ablkcipher
1862 struct caam_alg_template {
1863 char name[CRYPTO_MAX_ALG_NAME];
1864 char driver_name[CRYPTO_MAX_ALG_NAME];
1865 unsigned int blocksize;
1868 struct ablkcipher_alg ablkcipher;
1870 u32 class1_alg_type;
1871 u32 class2_alg_type;
1874 static struct caam_alg_template driver_algs[] = {
1875 /* ablkcipher descriptor */
1878 .driver_name = "cbc-aes-caam",
1879 .blocksize = AES_BLOCK_SIZE,
1880 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1881 .template_ablkcipher = {
1882 .setkey = ablkcipher_setkey,
1883 .encrypt = ablkcipher_encrypt,
1884 .decrypt = ablkcipher_decrypt,
1885 .givencrypt = ablkcipher_givencrypt,
1886 .geniv = "<built-in>",
1887 .min_keysize = AES_MIN_KEY_SIZE,
1888 .max_keysize = AES_MAX_KEY_SIZE,
1889 .ivsize = AES_BLOCK_SIZE,
1891 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1894 .name = "cbc(des3_ede)",
1895 .driver_name = "cbc-3des-caam",
1896 .blocksize = DES3_EDE_BLOCK_SIZE,
1897 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1898 .template_ablkcipher = {
1899 .setkey = ablkcipher_setkey,
1900 .encrypt = ablkcipher_encrypt,
1901 .decrypt = ablkcipher_decrypt,
1902 .givencrypt = ablkcipher_givencrypt,
1903 .geniv = "<built-in>",
1904 .min_keysize = DES3_EDE_KEY_SIZE,
1905 .max_keysize = DES3_EDE_KEY_SIZE,
1906 .ivsize = DES3_EDE_BLOCK_SIZE,
1908 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1912 .driver_name = "cbc-des-caam",
1913 .blocksize = DES_BLOCK_SIZE,
1914 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1915 .template_ablkcipher = {
1916 .setkey = ablkcipher_setkey,
1917 .encrypt = ablkcipher_encrypt,
1918 .decrypt = ablkcipher_decrypt,
1919 .givencrypt = ablkcipher_givencrypt,
1920 .geniv = "<built-in>",
1921 .min_keysize = DES_KEY_SIZE,
1922 .max_keysize = DES_KEY_SIZE,
1923 .ivsize = DES_BLOCK_SIZE,
1925 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1929 .driver_name = "ctr-aes-caam",
1931 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1932 .template_ablkcipher = {
1933 .setkey = ablkcipher_setkey,
1934 .encrypt = ablkcipher_encrypt,
1935 .decrypt = ablkcipher_decrypt,
1937 .min_keysize = AES_MIN_KEY_SIZE,
1938 .max_keysize = AES_MAX_KEY_SIZE,
1939 .ivsize = AES_BLOCK_SIZE,
1941 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1944 .name = "rfc3686(ctr(aes))",
1945 .driver_name = "rfc3686-ctr-aes-caam",
1947 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1948 .template_ablkcipher = {
1949 .setkey = ablkcipher_setkey,
1950 .encrypt = ablkcipher_encrypt,
1951 .decrypt = ablkcipher_decrypt,
1952 .givencrypt = ablkcipher_givencrypt,
1953 .geniv = "<built-in>",
1954 .min_keysize = AES_MIN_KEY_SIZE +
1955 CTR_RFC3686_NONCE_SIZE,
1956 .max_keysize = AES_MAX_KEY_SIZE +
1957 CTR_RFC3686_NONCE_SIZE,
1958 .ivsize = CTR_RFC3686_IV_SIZE,
1960 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
1964 .driver_name = "xts-aes-caam",
1965 .blocksize = AES_BLOCK_SIZE,
1966 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1967 .template_ablkcipher = {
1968 .setkey = xts_ablkcipher_setkey,
1969 .encrypt = ablkcipher_encrypt,
1970 .decrypt = ablkcipher_decrypt,
1972 .min_keysize = 2 * AES_MIN_KEY_SIZE,
1973 .max_keysize = 2 * AES_MAX_KEY_SIZE,
1974 .ivsize = AES_BLOCK_SIZE,
1976 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
1980 static struct caam_aead_alg driver_aeads[] = {
1984 .cra_name = "rfc4106(gcm(aes))",
1985 .cra_driver_name = "rfc4106-gcm-aes-caam",
1988 .setkey = rfc4106_setkey,
1989 .setauthsize = rfc4106_setauthsize,
1990 .encrypt = ipsec_gcm_encrypt,
1991 .decrypt = ipsec_gcm_decrypt,
1993 .maxauthsize = AES_BLOCK_SIZE,
1996 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2002 .cra_name = "rfc4543(gcm(aes))",
2003 .cra_driver_name = "rfc4543-gcm-aes-caam",
2006 .setkey = rfc4543_setkey,
2007 .setauthsize = rfc4543_setauthsize,
2008 .encrypt = ipsec_gcm_encrypt,
2009 .decrypt = ipsec_gcm_decrypt,
2011 .maxauthsize = AES_BLOCK_SIZE,
2014 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2017 /* Galois Counter Mode */
2021 .cra_name = "gcm(aes)",
2022 .cra_driver_name = "gcm-aes-caam",
2025 .setkey = gcm_setkey,
2026 .setauthsize = gcm_setauthsize,
2027 .encrypt = gcm_encrypt,
2028 .decrypt = gcm_decrypt,
2030 .maxauthsize = AES_BLOCK_SIZE,
2033 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2036 /* single-pass ipsec_esp descriptor */
2040 .cra_name = "authenc(hmac(md5),"
2041 "ecb(cipher_null))",
2042 .cra_driver_name = "authenc-hmac-md5-"
2043 "ecb-cipher_null-caam",
2044 .cra_blocksize = NULL_BLOCK_SIZE,
2046 .setkey = aead_setkey,
2047 .setauthsize = aead_setauthsize,
2048 .encrypt = aead_encrypt,
2049 .decrypt = aead_decrypt,
2050 .ivsize = NULL_IV_SIZE,
2051 .maxauthsize = MD5_DIGEST_SIZE,
2054 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2055 OP_ALG_AAI_HMAC_PRECOMP,
2061 .cra_name = "authenc(hmac(sha1),"
2062 "ecb(cipher_null))",
2063 .cra_driver_name = "authenc-hmac-sha1-"
2064 "ecb-cipher_null-caam",
2065 .cra_blocksize = NULL_BLOCK_SIZE,
2067 .setkey = aead_setkey,
2068 .setauthsize = aead_setauthsize,
2069 .encrypt = aead_encrypt,
2070 .decrypt = aead_decrypt,
2071 .ivsize = NULL_IV_SIZE,
2072 .maxauthsize = SHA1_DIGEST_SIZE,
2075 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2076 OP_ALG_AAI_HMAC_PRECOMP,
2082 .cra_name = "authenc(hmac(sha224),"
2083 "ecb(cipher_null))",
2084 .cra_driver_name = "authenc-hmac-sha224-"
2085 "ecb-cipher_null-caam",
2086 .cra_blocksize = NULL_BLOCK_SIZE,
2088 .setkey = aead_setkey,
2089 .setauthsize = aead_setauthsize,
2090 .encrypt = aead_encrypt,
2091 .decrypt = aead_decrypt,
2092 .ivsize = NULL_IV_SIZE,
2093 .maxauthsize = SHA224_DIGEST_SIZE,
2096 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2097 OP_ALG_AAI_HMAC_PRECOMP,
2103 .cra_name = "authenc(hmac(sha256),"
2104 "ecb(cipher_null))",
2105 .cra_driver_name = "authenc-hmac-sha256-"
2106 "ecb-cipher_null-caam",
2107 .cra_blocksize = NULL_BLOCK_SIZE,
2109 .setkey = aead_setkey,
2110 .setauthsize = aead_setauthsize,
2111 .encrypt = aead_encrypt,
2112 .decrypt = aead_decrypt,
2113 .ivsize = NULL_IV_SIZE,
2114 .maxauthsize = SHA256_DIGEST_SIZE,
2117 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2118 OP_ALG_AAI_HMAC_PRECOMP,
2124 .cra_name = "authenc(hmac(sha384),"
2125 "ecb(cipher_null))",
2126 .cra_driver_name = "authenc-hmac-sha384-"
2127 "ecb-cipher_null-caam",
2128 .cra_blocksize = NULL_BLOCK_SIZE,
2130 .setkey = aead_setkey,
2131 .setauthsize = aead_setauthsize,
2132 .encrypt = aead_encrypt,
2133 .decrypt = aead_decrypt,
2134 .ivsize = NULL_IV_SIZE,
2135 .maxauthsize = SHA384_DIGEST_SIZE,
2138 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2139 OP_ALG_AAI_HMAC_PRECOMP,
2145 .cra_name = "authenc(hmac(sha512),"
2146 "ecb(cipher_null))",
2147 .cra_driver_name = "authenc-hmac-sha512-"
2148 "ecb-cipher_null-caam",
2149 .cra_blocksize = NULL_BLOCK_SIZE,
2151 .setkey = aead_setkey,
2152 .setauthsize = aead_setauthsize,
2153 .encrypt = aead_encrypt,
2154 .decrypt = aead_decrypt,
2155 .ivsize = NULL_IV_SIZE,
2156 .maxauthsize = SHA512_DIGEST_SIZE,
2159 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2160 OP_ALG_AAI_HMAC_PRECOMP,
2166 .cra_name = "authenc(hmac(md5),cbc(aes))",
2167 .cra_driver_name = "authenc-hmac-md5-"
2169 .cra_blocksize = AES_BLOCK_SIZE,
2171 .setkey = aead_setkey,
2172 .setauthsize = aead_setauthsize,
2173 .encrypt = aead_encrypt,
2174 .decrypt = aead_decrypt,
2175 .ivsize = AES_BLOCK_SIZE,
2176 .maxauthsize = MD5_DIGEST_SIZE,
2179 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2180 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2181 OP_ALG_AAI_HMAC_PRECOMP,
2187 .cra_name = "echainiv(authenc(hmac(md5),"
2189 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2191 .cra_blocksize = AES_BLOCK_SIZE,
2193 .setkey = aead_setkey,
2194 .setauthsize = aead_setauthsize,
2195 .encrypt = aead_encrypt,
2196 .decrypt = aead_decrypt,
2197 .ivsize = AES_BLOCK_SIZE,
2198 .maxauthsize = MD5_DIGEST_SIZE,
2201 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2202 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2203 OP_ALG_AAI_HMAC_PRECOMP,
2210 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2211 .cra_driver_name = "authenc-hmac-sha1-"
2213 .cra_blocksize = AES_BLOCK_SIZE,
2215 .setkey = aead_setkey,
2216 .setauthsize = aead_setauthsize,
2217 .encrypt = aead_encrypt,
2218 .decrypt = aead_decrypt,
2219 .ivsize = AES_BLOCK_SIZE,
2220 .maxauthsize = SHA1_DIGEST_SIZE,
2223 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2224 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2225 OP_ALG_AAI_HMAC_PRECOMP,
2231 .cra_name = "echainiv(authenc(hmac(sha1),"
2233 .cra_driver_name = "echainiv-authenc-"
2234 "hmac-sha1-cbc-aes-caam",
2235 .cra_blocksize = AES_BLOCK_SIZE,
2237 .setkey = aead_setkey,
2238 .setauthsize = aead_setauthsize,
2239 .encrypt = aead_encrypt,
2240 .decrypt = aead_decrypt,
2241 .ivsize = AES_BLOCK_SIZE,
2242 .maxauthsize = SHA1_DIGEST_SIZE,
2245 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2246 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2247 OP_ALG_AAI_HMAC_PRECOMP,
2254 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2255 .cra_driver_name = "authenc-hmac-sha224-"
2257 .cra_blocksize = AES_BLOCK_SIZE,
2259 .setkey = aead_setkey,
2260 .setauthsize = aead_setauthsize,
2261 .encrypt = aead_encrypt,
2262 .decrypt = aead_decrypt,
2263 .ivsize = AES_BLOCK_SIZE,
2264 .maxauthsize = SHA224_DIGEST_SIZE,
2267 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2268 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2269 OP_ALG_AAI_HMAC_PRECOMP,
2275 .cra_name = "echainiv(authenc(hmac(sha224),"
2277 .cra_driver_name = "echainiv-authenc-"
2278 "hmac-sha224-cbc-aes-caam",
2279 .cra_blocksize = AES_BLOCK_SIZE,
2281 .setkey = aead_setkey,
2282 .setauthsize = aead_setauthsize,
2283 .encrypt = aead_encrypt,
2284 .decrypt = aead_decrypt,
2285 .ivsize = AES_BLOCK_SIZE,
2286 .maxauthsize = SHA224_DIGEST_SIZE,
2289 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2290 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2291 OP_ALG_AAI_HMAC_PRECOMP,
2298 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2299 .cra_driver_name = "authenc-hmac-sha256-"
2301 .cra_blocksize = AES_BLOCK_SIZE,
2303 .setkey = aead_setkey,
2304 .setauthsize = aead_setauthsize,
2305 .encrypt = aead_encrypt,
2306 .decrypt = aead_decrypt,
2307 .ivsize = AES_BLOCK_SIZE,
2308 .maxauthsize = SHA256_DIGEST_SIZE,
2311 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2312 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2313 OP_ALG_AAI_HMAC_PRECOMP,
2319 .cra_name = "echainiv(authenc(hmac(sha256),"
2321 .cra_driver_name = "echainiv-authenc-"
2322 "hmac-sha256-cbc-aes-caam",
2323 .cra_blocksize = AES_BLOCK_SIZE,
2325 .setkey = aead_setkey,
2326 .setauthsize = aead_setauthsize,
2327 .encrypt = aead_encrypt,
2328 .decrypt = aead_decrypt,
2329 .ivsize = AES_BLOCK_SIZE,
2330 .maxauthsize = SHA256_DIGEST_SIZE,
2333 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2334 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2335 OP_ALG_AAI_HMAC_PRECOMP,
2342 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2343 .cra_driver_name = "authenc-hmac-sha384-"
2345 .cra_blocksize = AES_BLOCK_SIZE,
2347 .setkey = aead_setkey,
2348 .setauthsize = aead_setauthsize,
2349 .encrypt = aead_encrypt,
2350 .decrypt = aead_decrypt,
2351 .ivsize = AES_BLOCK_SIZE,
2352 .maxauthsize = SHA384_DIGEST_SIZE,
2355 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2356 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2357 OP_ALG_AAI_HMAC_PRECOMP,
2363 .cra_name = "echainiv(authenc(hmac(sha384),"
2365 .cra_driver_name = "echainiv-authenc-"
2366 "hmac-sha384-cbc-aes-caam",
2367 .cra_blocksize = AES_BLOCK_SIZE,
2369 .setkey = aead_setkey,
2370 .setauthsize = aead_setauthsize,
2371 .encrypt = aead_encrypt,
2372 .decrypt = aead_decrypt,
2373 .ivsize = AES_BLOCK_SIZE,
2374 .maxauthsize = SHA384_DIGEST_SIZE,
2377 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2378 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2379 OP_ALG_AAI_HMAC_PRECOMP,
2386 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2387 .cra_driver_name = "authenc-hmac-sha512-"
2389 .cra_blocksize = AES_BLOCK_SIZE,
2391 .setkey = aead_setkey,
2392 .setauthsize = aead_setauthsize,
2393 .encrypt = aead_encrypt,
2394 .decrypt = aead_decrypt,
2395 .ivsize = AES_BLOCK_SIZE,
2396 .maxauthsize = SHA512_DIGEST_SIZE,
2399 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2400 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2401 OP_ALG_AAI_HMAC_PRECOMP,
2407 .cra_name = "echainiv(authenc(hmac(sha512),"
2409 .cra_driver_name = "echainiv-authenc-"
2410 "hmac-sha512-cbc-aes-caam",
2411 .cra_blocksize = AES_BLOCK_SIZE,
2413 .setkey = aead_setkey,
2414 .setauthsize = aead_setauthsize,
2415 .encrypt = aead_encrypt,
2416 .decrypt = aead_decrypt,
2417 .ivsize = AES_BLOCK_SIZE,
2418 .maxauthsize = SHA512_DIGEST_SIZE,
2421 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2422 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2423 OP_ALG_AAI_HMAC_PRECOMP,
2430 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2431 .cra_driver_name = "authenc-hmac-md5-"
2432 "cbc-des3_ede-caam",
2433 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2435 .setkey = aead_setkey,
2436 .setauthsize = aead_setauthsize,
2437 .encrypt = aead_encrypt,
2438 .decrypt = aead_decrypt,
2439 .ivsize = DES3_EDE_BLOCK_SIZE,
2440 .maxauthsize = MD5_DIGEST_SIZE,
2443 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2444 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2445 OP_ALG_AAI_HMAC_PRECOMP,
2451 .cra_name = "echainiv(authenc(hmac(md5),"
2453 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2454 "cbc-des3_ede-caam",
2455 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2457 .setkey = aead_setkey,
2458 .setauthsize = aead_setauthsize,
2459 .encrypt = aead_encrypt,
2460 .decrypt = aead_decrypt,
2461 .ivsize = DES3_EDE_BLOCK_SIZE,
2462 .maxauthsize = MD5_DIGEST_SIZE,
2465 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2466 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2467 OP_ALG_AAI_HMAC_PRECOMP,
2474 .cra_name = "authenc(hmac(sha1),"
2476 .cra_driver_name = "authenc-hmac-sha1-"
2477 "cbc-des3_ede-caam",
2478 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2480 .setkey = aead_setkey,
2481 .setauthsize = aead_setauthsize,
2482 .encrypt = aead_encrypt,
2483 .decrypt = aead_decrypt,
2484 .ivsize = DES3_EDE_BLOCK_SIZE,
2485 .maxauthsize = SHA1_DIGEST_SIZE,
2488 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2489 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2490 OP_ALG_AAI_HMAC_PRECOMP,
2496 .cra_name = "echainiv(authenc(hmac(sha1),"
2498 .cra_driver_name = "echainiv-authenc-"
2500 "cbc-des3_ede-caam",
2501 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2503 .setkey = aead_setkey,
2504 .setauthsize = aead_setauthsize,
2505 .encrypt = aead_encrypt,
2506 .decrypt = aead_decrypt,
2507 .ivsize = DES3_EDE_BLOCK_SIZE,
2508 .maxauthsize = SHA1_DIGEST_SIZE,
2511 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2512 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2513 OP_ALG_AAI_HMAC_PRECOMP,
2520 .cra_name = "authenc(hmac(sha224),"
2522 .cra_driver_name = "authenc-hmac-sha224-"
2523 "cbc-des3_ede-caam",
2524 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2526 .setkey = aead_setkey,
2527 .setauthsize = aead_setauthsize,
2528 .encrypt = aead_encrypt,
2529 .decrypt = aead_decrypt,
2530 .ivsize = DES3_EDE_BLOCK_SIZE,
2531 .maxauthsize = SHA224_DIGEST_SIZE,
2534 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2535 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2536 OP_ALG_AAI_HMAC_PRECOMP,
2542 .cra_name = "echainiv(authenc(hmac(sha224),"
2544 .cra_driver_name = "echainiv-authenc-"
2546 "cbc-des3_ede-caam",
2547 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2549 .setkey = aead_setkey,
2550 .setauthsize = aead_setauthsize,
2551 .encrypt = aead_encrypt,
2552 .decrypt = aead_decrypt,
2553 .ivsize = DES3_EDE_BLOCK_SIZE,
2554 .maxauthsize = SHA224_DIGEST_SIZE,
2557 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2558 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2559 OP_ALG_AAI_HMAC_PRECOMP,
2566 .cra_name = "authenc(hmac(sha256),"
2568 .cra_driver_name = "authenc-hmac-sha256-"
2569 "cbc-des3_ede-caam",
2570 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2572 .setkey = aead_setkey,
2573 .setauthsize = aead_setauthsize,
2574 .encrypt = aead_encrypt,
2575 .decrypt = aead_decrypt,
2576 .ivsize = DES3_EDE_BLOCK_SIZE,
2577 .maxauthsize = SHA256_DIGEST_SIZE,
2580 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2581 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2582 OP_ALG_AAI_HMAC_PRECOMP,
2588 .cra_name = "echainiv(authenc(hmac(sha256),"
2590 .cra_driver_name = "echainiv-authenc-"
2592 "cbc-des3_ede-caam",
2593 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2595 .setkey = aead_setkey,
2596 .setauthsize = aead_setauthsize,
2597 .encrypt = aead_encrypt,
2598 .decrypt = aead_decrypt,
2599 .ivsize = DES3_EDE_BLOCK_SIZE,
2600 .maxauthsize = SHA256_DIGEST_SIZE,
2603 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2604 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2605 OP_ALG_AAI_HMAC_PRECOMP,
2612 .cra_name = "authenc(hmac(sha384),"
2614 .cra_driver_name = "authenc-hmac-sha384-"
2615 "cbc-des3_ede-caam",
2616 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2618 .setkey = aead_setkey,
2619 .setauthsize = aead_setauthsize,
2620 .encrypt = aead_encrypt,
2621 .decrypt = aead_decrypt,
2622 .ivsize = DES3_EDE_BLOCK_SIZE,
2623 .maxauthsize = SHA384_DIGEST_SIZE,
2626 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2627 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2628 OP_ALG_AAI_HMAC_PRECOMP,
2634 .cra_name = "echainiv(authenc(hmac(sha384),"
2636 .cra_driver_name = "echainiv-authenc-"
2638 "cbc-des3_ede-caam",
2639 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2641 .setkey = aead_setkey,
2642 .setauthsize = aead_setauthsize,
2643 .encrypt = aead_encrypt,
2644 .decrypt = aead_decrypt,
2645 .ivsize = DES3_EDE_BLOCK_SIZE,
2646 .maxauthsize = SHA384_DIGEST_SIZE,
2649 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2650 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2651 OP_ALG_AAI_HMAC_PRECOMP,
2658 .cra_name = "authenc(hmac(sha512),"
2660 .cra_driver_name = "authenc-hmac-sha512-"
2661 "cbc-des3_ede-caam",
2662 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2664 .setkey = aead_setkey,
2665 .setauthsize = aead_setauthsize,
2666 .encrypt = aead_encrypt,
2667 .decrypt = aead_decrypt,
2668 .ivsize = DES3_EDE_BLOCK_SIZE,
2669 .maxauthsize = SHA512_DIGEST_SIZE,
2672 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2673 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2674 OP_ALG_AAI_HMAC_PRECOMP,
2680 .cra_name = "echainiv(authenc(hmac(sha512),"
2682 .cra_driver_name = "echainiv-authenc-"
2684 "cbc-des3_ede-caam",
2685 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2687 .setkey = aead_setkey,
2688 .setauthsize = aead_setauthsize,
2689 .encrypt = aead_encrypt,
2690 .decrypt = aead_decrypt,
2691 .ivsize = DES3_EDE_BLOCK_SIZE,
2692 .maxauthsize = SHA512_DIGEST_SIZE,
2695 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2696 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2697 OP_ALG_AAI_HMAC_PRECOMP,
2704 .cra_name = "authenc(hmac(md5),cbc(des))",
2705 .cra_driver_name = "authenc-hmac-md5-"
2707 .cra_blocksize = DES_BLOCK_SIZE,
2709 .setkey = aead_setkey,
2710 .setauthsize = aead_setauthsize,
2711 .encrypt = aead_encrypt,
2712 .decrypt = aead_decrypt,
2713 .ivsize = DES_BLOCK_SIZE,
2714 .maxauthsize = MD5_DIGEST_SIZE,
2717 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2718 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2719 OP_ALG_AAI_HMAC_PRECOMP,
2725 .cra_name = "echainiv(authenc(hmac(md5),"
2727 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2729 .cra_blocksize = DES_BLOCK_SIZE,
2731 .setkey = aead_setkey,
2732 .setauthsize = aead_setauthsize,
2733 .encrypt = aead_encrypt,
2734 .decrypt = aead_decrypt,
2735 .ivsize = DES_BLOCK_SIZE,
2736 .maxauthsize = MD5_DIGEST_SIZE,
2739 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2740 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2741 OP_ALG_AAI_HMAC_PRECOMP,
2748 .cra_name = "authenc(hmac(sha1),cbc(des))",
2749 .cra_driver_name = "authenc-hmac-sha1-"
2751 .cra_blocksize = DES_BLOCK_SIZE,
2753 .setkey = aead_setkey,
2754 .setauthsize = aead_setauthsize,
2755 .encrypt = aead_encrypt,
2756 .decrypt = aead_decrypt,
2757 .ivsize = DES_BLOCK_SIZE,
2758 .maxauthsize = SHA1_DIGEST_SIZE,
2761 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2762 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2763 OP_ALG_AAI_HMAC_PRECOMP,
2769 .cra_name = "echainiv(authenc(hmac(sha1),"
2771 .cra_driver_name = "echainiv-authenc-"
2772 "hmac-sha1-cbc-des-caam",
2773 .cra_blocksize = DES_BLOCK_SIZE,
2775 .setkey = aead_setkey,
2776 .setauthsize = aead_setauthsize,
2777 .encrypt = aead_encrypt,
2778 .decrypt = aead_decrypt,
2779 .ivsize = DES_BLOCK_SIZE,
2780 .maxauthsize = SHA1_DIGEST_SIZE,
2783 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2784 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2785 OP_ALG_AAI_HMAC_PRECOMP,
2792 .cra_name = "authenc(hmac(sha224),cbc(des))",
2793 .cra_driver_name = "authenc-hmac-sha224-"
2795 .cra_blocksize = DES_BLOCK_SIZE,
2797 .setkey = aead_setkey,
2798 .setauthsize = aead_setauthsize,
2799 .encrypt = aead_encrypt,
2800 .decrypt = aead_decrypt,
2801 .ivsize = DES_BLOCK_SIZE,
2802 .maxauthsize = SHA224_DIGEST_SIZE,
2805 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2806 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2807 OP_ALG_AAI_HMAC_PRECOMP,
2813 .cra_name = "echainiv(authenc(hmac(sha224),"
2815 .cra_driver_name = "echainiv-authenc-"
2816 "hmac-sha224-cbc-des-caam",
2817 .cra_blocksize = DES_BLOCK_SIZE,
2819 .setkey = aead_setkey,
2820 .setauthsize = aead_setauthsize,
2821 .encrypt = aead_encrypt,
2822 .decrypt = aead_decrypt,
2823 .ivsize = DES_BLOCK_SIZE,
2824 .maxauthsize = SHA224_DIGEST_SIZE,
2827 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2828 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2829 OP_ALG_AAI_HMAC_PRECOMP,
2836 .cra_name = "authenc(hmac(sha256),cbc(des))",
2837 .cra_driver_name = "authenc-hmac-sha256-"
2839 .cra_blocksize = DES_BLOCK_SIZE,
2841 .setkey = aead_setkey,
2842 .setauthsize = aead_setauthsize,
2843 .encrypt = aead_encrypt,
2844 .decrypt = aead_decrypt,
2845 .ivsize = DES_BLOCK_SIZE,
2846 .maxauthsize = SHA256_DIGEST_SIZE,
2849 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2850 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2851 OP_ALG_AAI_HMAC_PRECOMP,
2857 .cra_name = "echainiv(authenc(hmac(sha256),"
2859 .cra_driver_name = "echainiv-authenc-"
2860 "hmac-sha256-cbc-des-caam",
2861 .cra_blocksize = DES_BLOCK_SIZE,
2863 .setkey = aead_setkey,
2864 .setauthsize = aead_setauthsize,
2865 .encrypt = aead_encrypt,
2866 .decrypt = aead_decrypt,
2867 .ivsize = DES_BLOCK_SIZE,
2868 .maxauthsize = SHA256_DIGEST_SIZE,
2871 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2872 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2873 OP_ALG_AAI_HMAC_PRECOMP,
2880 .cra_name = "authenc(hmac(sha384),cbc(des))",
2881 .cra_driver_name = "authenc-hmac-sha384-"
2883 .cra_blocksize = DES_BLOCK_SIZE,
2885 .setkey = aead_setkey,
2886 .setauthsize = aead_setauthsize,
2887 .encrypt = aead_encrypt,
2888 .decrypt = aead_decrypt,
2889 .ivsize = DES_BLOCK_SIZE,
2890 .maxauthsize = SHA384_DIGEST_SIZE,
2893 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2894 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2895 OP_ALG_AAI_HMAC_PRECOMP,
2901 .cra_name = "echainiv(authenc(hmac(sha384),"
2903 .cra_driver_name = "echainiv-authenc-"
2904 "hmac-sha384-cbc-des-caam",
2905 .cra_blocksize = DES_BLOCK_SIZE,
2907 .setkey = aead_setkey,
2908 .setauthsize = aead_setauthsize,
2909 .encrypt = aead_encrypt,
2910 .decrypt = aead_decrypt,
2911 .ivsize = DES_BLOCK_SIZE,
2912 .maxauthsize = SHA384_DIGEST_SIZE,
2915 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2916 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2917 OP_ALG_AAI_HMAC_PRECOMP,
2924 .cra_name = "authenc(hmac(sha512),cbc(des))",
2925 .cra_driver_name = "authenc-hmac-sha512-"
2927 .cra_blocksize = DES_BLOCK_SIZE,
2929 .setkey = aead_setkey,
2930 .setauthsize = aead_setauthsize,
2931 .encrypt = aead_encrypt,
2932 .decrypt = aead_decrypt,
2933 .ivsize = DES_BLOCK_SIZE,
2934 .maxauthsize = SHA512_DIGEST_SIZE,
2937 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2938 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2939 OP_ALG_AAI_HMAC_PRECOMP,
2945 .cra_name = "echainiv(authenc(hmac(sha512),"
2947 .cra_driver_name = "echainiv-authenc-"
2948 "hmac-sha512-cbc-des-caam",
2949 .cra_blocksize = DES_BLOCK_SIZE,
2951 .setkey = aead_setkey,
2952 .setauthsize = aead_setauthsize,
2953 .encrypt = aead_encrypt,
2954 .decrypt = aead_decrypt,
2955 .ivsize = DES_BLOCK_SIZE,
2956 .maxauthsize = SHA512_DIGEST_SIZE,
2959 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2960 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2961 OP_ALG_AAI_HMAC_PRECOMP,
2968 .cra_name = "authenc(hmac(md5),"
2969 "rfc3686(ctr(aes)))",
2970 .cra_driver_name = "authenc-hmac-md5-"
2971 "rfc3686-ctr-aes-caam",
2974 .setkey = aead_setkey,
2975 .setauthsize = aead_setauthsize,
2976 .encrypt = aead_encrypt,
2977 .decrypt = aead_decrypt,
2978 .ivsize = CTR_RFC3686_IV_SIZE,
2979 .maxauthsize = MD5_DIGEST_SIZE,
2982 .class1_alg_type = OP_ALG_ALGSEL_AES |
2983 OP_ALG_AAI_CTR_MOD128,
2984 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2985 OP_ALG_AAI_HMAC_PRECOMP,
2992 .cra_name = "seqiv(authenc("
2993 "hmac(md5),rfc3686(ctr(aes))))",
2994 .cra_driver_name = "seqiv-authenc-hmac-md5-"
2995 "rfc3686-ctr-aes-caam",
2998 .setkey = aead_setkey,
2999 .setauthsize = aead_setauthsize,
3000 .encrypt = aead_encrypt,
3001 .decrypt = aead_decrypt,
3002 .ivsize = CTR_RFC3686_IV_SIZE,
3003 .maxauthsize = MD5_DIGEST_SIZE,
3006 .class1_alg_type = OP_ALG_ALGSEL_AES |
3007 OP_ALG_AAI_CTR_MOD128,
3008 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3009 OP_ALG_AAI_HMAC_PRECOMP,
3017 .cra_name = "authenc(hmac(sha1),"
3018 "rfc3686(ctr(aes)))",
3019 .cra_driver_name = "authenc-hmac-sha1-"
3020 "rfc3686-ctr-aes-caam",
3023 .setkey = aead_setkey,
3024 .setauthsize = aead_setauthsize,
3025 .encrypt = aead_encrypt,
3026 .decrypt = aead_decrypt,
3027 .ivsize = CTR_RFC3686_IV_SIZE,
3028 .maxauthsize = SHA1_DIGEST_SIZE,
3031 .class1_alg_type = OP_ALG_ALGSEL_AES |
3032 OP_ALG_AAI_CTR_MOD128,
3033 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3034 OP_ALG_AAI_HMAC_PRECOMP,
3041 .cra_name = "seqiv(authenc("
3042 "hmac(sha1),rfc3686(ctr(aes))))",
3043 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3044 "rfc3686-ctr-aes-caam",
3047 .setkey = aead_setkey,
3048 .setauthsize = aead_setauthsize,
3049 .encrypt = aead_encrypt,
3050 .decrypt = aead_decrypt,
3051 .ivsize = CTR_RFC3686_IV_SIZE,
3052 .maxauthsize = SHA1_DIGEST_SIZE,
3055 .class1_alg_type = OP_ALG_ALGSEL_AES |
3056 OP_ALG_AAI_CTR_MOD128,
3057 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3058 OP_ALG_AAI_HMAC_PRECOMP,
3066 .cra_name = "authenc(hmac(sha224),"
3067 "rfc3686(ctr(aes)))",
3068 .cra_driver_name = "authenc-hmac-sha224-"
3069 "rfc3686-ctr-aes-caam",
3072 .setkey = aead_setkey,
3073 .setauthsize = aead_setauthsize,
3074 .encrypt = aead_encrypt,
3075 .decrypt = aead_decrypt,
3076 .ivsize = CTR_RFC3686_IV_SIZE,
3077 .maxauthsize = SHA224_DIGEST_SIZE,
3080 .class1_alg_type = OP_ALG_ALGSEL_AES |
3081 OP_ALG_AAI_CTR_MOD128,
3082 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3083 OP_ALG_AAI_HMAC_PRECOMP,
3090 .cra_name = "seqiv(authenc("
3091 "hmac(sha224),rfc3686(ctr(aes))))",
3092 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3093 "rfc3686-ctr-aes-caam",
3096 .setkey = aead_setkey,
3097 .setauthsize = aead_setauthsize,
3098 .encrypt = aead_encrypt,
3099 .decrypt = aead_decrypt,
3100 .ivsize = CTR_RFC3686_IV_SIZE,
3101 .maxauthsize = SHA224_DIGEST_SIZE,
3104 .class1_alg_type = OP_ALG_ALGSEL_AES |
3105 OP_ALG_AAI_CTR_MOD128,
3106 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3107 OP_ALG_AAI_HMAC_PRECOMP,
3115 .cra_name = "authenc(hmac(sha256),"
3116 "rfc3686(ctr(aes)))",
3117 .cra_driver_name = "authenc-hmac-sha256-"
3118 "rfc3686-ctr-aes-caam",
3121 .setkey = aead_setkey,
3122 .setauthsize = aead_setauthsize,
3123 .encrypt = aead_encrypt,
3124 .decrypt = aead_decrypt,
3125 .ivsize = CTR_RFC3686_IV_SIZE,
3126 .maxauthsize = SHA256_DIGEST_SIZE,
3129 .class1_alg_type = OP_ALG_ALGSEL_AES |
3130 OP_ALG_AAI_CTR_MOD128,
3131 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3132 OP_ALG_AAI_HMAC_PRECOMP,
3139 .cra_name = "seqiv(authenc(hmac(sha256),"
3140 "rfc3686(ctr(aes))))",
3141 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3142 "rfc3686-ctr-aes-caam",
3145 .setkey = aead_setkey,
3146 .setauthsize = aead_setauthsize,
3147 .encrypt = aead_encrypt,
3148 .decrypt = aead_decrypt,
3149 .ivsize = CTR_RFC3686_IV_SIZE,
3150 .maxauthsize = SHA256_DIGEST_SIZE,
3153 .class1_alg_type = OP_ALG_ALGSEL_AES |
3154 OP_ALG_AAI_CTR_MOD128,
3155 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3156 OP_ALG_AAI_HMAC_PRECOMP,
3164 .cra_name = "authenc(hmac(sha384),"
3165 "rfc3686(ctr(aes)))",
3166 .cra_driver_name = "authenc-hmac-sha384-"
3167 "rfc3686-ctr-aes-caam",
3170 .setkey = aead_setkey,
3171 .setauthsize = aead_setauthsize,
3172 .encrypt = aead_encrypt,
3173 .decrypt = aead_decrypt,
3174 .ivsize = CTR_RFC3686_IV_SIZE,
3175 .maxauthsize = SHA384_DIGEST_SIZE,
3178 .class1_alg_type = OP_ALG_ALGSEL_AES |
3179 OP_ALG_AAI_CTR_MOD128,
3180 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3181 OP_ALG_AAI_HMAC_PRECOMP,
3188 .cra_name = "seqiv(authenc(hmac(sha384),"
3189 "rfc3686(ctr(aes))))",
3190 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3191 "rfc3686-ctr-aes-caam",
3194 .setkey = aead_setkey,
3195 .setauthsize = aead_setauthsize,
3196 .encrypt = aead_encrypt,
3197 .decrypt = aead_decrypt,
3198 .ivsize = CTR_RFC3686_IV_SIZE,
3199 .maxauthsize = SHA384_DIGEST_SIZE,
3202 .class1_alg_type = OP_ALG_ALGSEL_AES |
3203 OP_ALG_AAI_CTR_MOD128,
3204 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3205 OP_ALG_AAI_HMAC_PRECOMP,
3213 .cra_name = "authenc(hmac(sha512),"
3214 "rfc3686(ctr(aes)))",
3215 .cra_driver_name = "authenc-hmac-sha512-"
3216 "rfc3686-ctr-aes-caam",
3219 .setkey = aead_setkey,
3220 .setauthsize = aead_setauthsize,
3221 .encrypt = aead_encrypt,
3222 .decrypt = aead_decrypt,
3223 .ivsize = CTR_RFC3686_IV_SIZE,
3224 .maxauthsize = SHA512_DIGEST_SIZE,
3227 .class1_alg_type = OP_ALG_ALGSEL_AES |
3228 OP_ALG_AAI_CTR_MOD128,
3229 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3230 OP_ALG_AAI_HMAC_PRECOMP,
3237 .cra_name = "seqiv(authenc(hmac(sha512),"
3238 "rfc3686(ctr(aes))))",
3239 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3240 "rfc3686-ctr-aes-caam",
3243 .setkey = aead_setkey,
3244 .setauthsize = aead_setauthsize,
3245 .encrypt = aead_encrypt,
3246 .decrypt = aead_decrypt,
3247 .ivsize = CTR_RFC3686_IV_SIZE,
3248 .maxauthsize = SHA512_DIGEST_SIZE,
3251 .class1_alg_type = OP_ALG_ALGSEL_AES |
3252 OP_ALG_AAI_CTR_MOD128,
3253 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3254 OP_ALG_AAI_HMAC_PRECOMP,
3261 struct caam_crypto_alg {
3262 struct crypto_alg crypto_alg;
3263 struct list_head entry;
3264 struct caam_alg_entry caam;
3267 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
3269 dma_addr_t dma_addr;
3271 ctx->jrdev = caam_jr_alloc();
3272 if (IS_ERR(ctx->jrdev)) {
3273 pr_err("Job Ring Device allocation for transform failed\n");
3274 return PTR_ERR(ctx->jrdev);
3277 dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3278 offsetof(struct caam_ctx,
3280 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
3281 if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3282 dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3283 caam_jr_free(ctx->jrdev);
3287 ctx->sh_desc_enc_dma = dma_addr;
3288 ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3290 ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
3292 ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3294 /* copy descriptor header template value */
3295 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3296 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3301 static int caam_cra_init(struct crypto_tfm *tfm)
3303 struct crypto_alg *alg = tfm->__crt_alg;
3304 struct caam_crypto_alg *caam_alg =
3305 container_of(alg, struct caam_crypto_alg, crypto_alg);
3306 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
3308 return caam_init_common(ctx, &caam_alg->caam);
3311 static int caam_aead_init(struct crypto_aead *tfm)
3313 struct aead_alg *alg = crypto_aead_alg(tfm);
3314 struct caam_aead_alg *caam_alg =
3315 container_of(alg, struct caam_aead_alg, aead);
3316 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3318 return caam_init_common(ctx, &caam_alg->caam);
3321 static void caam_exit_common(struct caam_ctx *ctx)
3323 dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3324 offsetof(struct caam_ctx, sh_desc_enc_dma),
3325 DMA_TO_DEVICE, DMA_ATTR_SKIP_CPU_SYNC);
3326 caam_jr_free(ctx->jrdev);
3329 static void caam_cra_exit(struct crypto_tfm *tfm)
3331 caam_exit_common(crypto_tfm_ctx(tfm));
3334 static void caam_aead_exit(struct crypto_aead *tfm)
3336 caam_exit_common(crypto_aead_ctx(tfm));
3339 static void __exit caam_algapi_exit(void)
3342 struct caam_crypto_alg *t_alg, *n;
3345 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3346 struct caam_aead_alg *t_alg = driver_aeads + i;
3348 if (t_alg->registered)
3349 crypto_unregister_aead(&t_alg->aead);
3355 list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
3356 crypto_unregister_alg(&t_alg->crypto_alg);
3357 list_del(&t_alg->entry);
3362 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
3365 struct caam_crypto_alg *t_alg;
3366 struct crypto_alg *alg;
3368 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
3370 pr_err("failed to allocate t_alg\n");
3371 return ERR_PTR(-ENOMEM);
3374 alg = &t_alg->crypto_alg;
3376 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
3377 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
3378 template->driver_name);
3379 alg->cra_module = THIS_MODULE;
3380 alg->cra_init = caam_cra_init;
3381 alg->cra_exit = caam_cra_exit;
3382 alg->cra_priority = CAAM_CRA_PRIORITY;
3383 alg->cra_blocksize = template->blocksize;
3384 alg->cra_alignmask = 0;
3385 alg->cra_ctxsize = sizeof(struct caam_ctx);
3386 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
3388 switch (template->type) {
3389 case CRYPTO_ALG_TYPE_GIVCIPHER:
3390 alg->cra_type = &crypto_givcipher_type;
3391 alg->cra_ablkcipher = template->template_ablkcipher;
3393 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3394 alg->cra_type = &crypto_ablkcipher_type;
3395 alg->cra_ablkcipher = template->template_ablkcipher;
3399 t_alg->caam.class1_alg_type = template->class1_alg_type;
3400 t_alg->caam.class2_alg_type = template->class2_alg_type;
3405 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3407 struct aead_alg *alg = &t_alg->aead;
3409 alg->base.cra_module = THIS_MODULE;
3410 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3411 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3412 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3414 alg->init = caam_aead_init;
3415 alg->exit = caam_aead_exit;
3418 static int __init caam_algapi_init(void)
3420 struct device_node *dev_node;
3421 struct platform_device *pdev;
3422 struct device *ctrldev;
3423 struct caam_drv_private *priv;
3425 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3426 unsigned int md_limit = SHA512_DIGEST_SIZE;
3427 bool registered = false;
3429 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3431 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3436 pdev = of_find_device_by_node(dev_node);
3438 of_node_put(dev_node);
3442 ctrldev = &pdev->dev;
3443 priv = dev_get_drvdata(ctrldev);
3444 of_node_put(dev_node);
3447 * If priv is NULL, it's probably because the caam driver wasn't
3448 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3454 INIT_LIST_HEAD(&alg_list);
3457 * Register crypto algorithms the device supports.
3458 * First, detect presence and attributes of DES, AES, and MD blocks.
3460 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3461 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3462 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3463 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3464 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3466 /* If MD is present, limit digest size based on LP256 */
3467 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3468 md_limit = SHA256_DIGEST_SIZE;
3470 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3471 struct caam_crypto_alg *t_alg;
3472 struct caam_alg_template *alg = driver_algs + i;
3473 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
3475 /* Skip DES algorithms if not supported by device */
3477 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3478 (alg_sel == OP_ALG_ALGSEL_DES)))
3481 /* Skip AES algorithms if not supported by device */
3482 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3486 * Check support for AES modes not available
3489 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3490 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
3494 t_alg = caam_alg_alloc(alg);
3495 if (IS_ERR(t_alg)) {
3496 err = PTR_ERR(t_alg);
3497 pr_warn("%s alg allocation failed\n", alg->driver_name);
3501 err = crypto_register_alg(&t_alg->crypto_alg);
3503 pr_warn("%s alg registration failed\n",
3504 t_alg->crypto_alg.cra_driver_name);
3509 list_add_tail(&t_alg->entry, &alg_list);
3513 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3514 struct caam_aead_alg *t_alg = driver_aeads + i;
3515 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3517 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3519 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3521 /* Skip DES algorithms if not supported by device */
3523 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3524 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3527 /* Skip AES algorithms if not supported by device */
3528 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3532 * Check support for AES algorithms not available
3535 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3536 if (alg_aai == OP_ALG_AAI_GCM)
3540 * Skip algorithms requiring message digests
3541 * if MD or MD size is not supported by device.
3544 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3547 caam_aead_alg_init(t_alg);
3549 err = crypto_register_aead(&t_alg->aead);
3551 pr_warn("%s alg registration failed\n",
3552 t_alg->aead.base.cra_driver_name);
3556 t_alg->registered = true;
3561 pr_info("caam algorithms registered in /proc/crypto\n");
3566 module_init(caam_algapi_init);
3567 module_exit(caam_algapi_exit);
3569 MODULE_LICENSE("GPL");
3570 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3571 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");