2 * caam - Freescale FSL CAAM support for crypto API
4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
7 * Based on talitos crypto API driver.
9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
11 * --------------- ---------------
12 * | JobDesc #1 |-------------------->| ShareDesc |
13 * | *(packet 1) | | (PDB) |
14 * --------------- |------------->| (hashKey) |
16 * . | |-------->| (operation) |
17 * --------------- | | ---------------
18 * | JobDesc #2 |------| |
24 * | JobDesc #3 |------------
28 * The SharedDesc never changes for a connection unless rekeyed, but
29 * each packet will likely be in a different place. So all we need
30 * to know to process the packet is where the input is, where the
31 * output goes, and what context we want to process with. Context is
32 * in the SharedDesc, packet references in the JobDesc.
34 * So, a job desc looks like:
36 * ---------------------
38 * | ShareDesc Pointer |
45 * ---------------------
52 #include "desc_constr.h"
55 #include "sg_sw_sec4.h"
57 #include "caamalg_desc.h"
62 #define CAAM_CRA_PRIORITY 3000
63 /* max key is sum of AES_MAX_KEY_SIZE, max split key size */
64 #define CAAM_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + \
65 CTR_RFC3686_NONCE_SIZE + \
66 SHA512_DIGEST_SIZE * 2)
68 #define AEAD_DESC_JOB_IO_LEN (DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
69 #define GCM_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
71 #define AUTHENC_DESC_JOB_IO_LEN (AEAD_DESC_JOB_IO_LEN + \
74 #define DESC_MAX_USED_BYTES (CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
75 #define DESC_MAX_USED_LEN (DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
78 /* for print_hex_dumps with line references */
79 #define debug(format, arg...) printk(format, arg)
81 #define debug(format, arg...)
85 #include <linux/highmem.h>
87 static void dbg_dump_sg(const char *level, const char *prefix_str,
88 int prefix_type, int rowsize, int groupsize,
89 struct scatterlist *sg, size_t tlen, bool ascii)
91 struct scatterlist *it;
96 for (it = sg; it != NULL && tlen > 0 ; it = sg_next(sg)) {
98 * make sure the scatterlist's page
99 * has a valid virtual memory mapping
101 it_page = kmap_atomic(sg_page(it));
102 if (unlikely(!it_page)) {
103 printk(KERN_ERR "dbg_dump_sg: kmap failed\n");
107 buf = it_page + it->offset;
108 len = min_t(size_t, tlen, it->length);
109 print_hex_dump(level, prefix_str, prefix_type, rowsize,
110 groupsize, buf, len, ascii);
113 kunmap_atomic(it_page);
118 static struct list_head alg_list;
120 struct caam_alg_entry {
127 struct caam_aead_alg {
128 struct aead_alg aead;
129 struct caam_alg_entry caam;
134 * per-session context
137 struct device *jrdev;
138 u32 sh_desc_enc[DESC_MAX_USED_LEN];
139 u32 sh_desc_dec[DESC_MAX_USED_LEN];
140 u32 sh_desc_givenc[DESC_MAX_USED_LEN];
141 dma_addr_t sh_desc_enc_dma;
142 dma_addr_t sh_desc_dec_dma;
143 dma_addr_t sh_desc_givenc_dma;
144 u8 key[CAAM_MAX_KEY_SIZE];
146 struct alginfo adata;
147 struct alginfo cdata;
148 unsigned int authsize;
151 static int aead_null_set_sh_desc(struct crypto_aead *aead)
153 struct caam_ctx *ctx = crypto_aead_ctx(aead);
154 struct device *jrdev = ctx->jrdev;
156 int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
157 ctx->adata.keylen_pad;
160 * Job Descriptor and Shared Descriptors
161 * must all fit into the 64-word Descriptor h/w Buffer
163 if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
164 ctx->adata.key_inline = true;
165 ctx->adata.key_virt = ctx->key;
167 ctx->adata.key_inline = false;
168 ctx->adata.key_dma = ctx->key_dma;
171 /* aead_encrypt shared descriptor */
172 desc = ctx->sh_desc_enc;
173 cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize);
174 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
177 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
178 dev_err(jrdev, "unable to map shared descriptor\n");
183 * Job Descriptor and Shared Descriptors
184 * must all fit into the 64-word Descriptor h/w Buffer
186 if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
187 ctx->adata.key_inline = true;
188 ctx->adata.key_virt = ctx->key;
190 ctx->adata.key_inline = false;
191 ctx->adata.key_dma = ctx->key_dma;
194 /* aead_decrypt shared descriptor */
195 desc = ctx->sh_desc_dec;
196 cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize);
197 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
200 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
201 dev_err(jrdev, "unable to map shared descriptor\n");
208 static int aead_set_sh_desc(struct crypto_aead *aead)
210 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
211 struct caam_aead_alg, aead);
212 unsigned int ivsize = crypto_aead_ivsize(aead);
213 struct caam_ctx *ctx = crypto_aead_ctx(aead);
214 struct device *jrdev = ctx->jrdev;
216 u32 *desc, *nonce = NULL;
218 unsigned int data_len[2];
219 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
220 OP_ALG_AAI_CTR_MOD128);
221 const bool is_rfc3686 = alg->caam.rfc3686;
226 /* NULL encryption / decryption */
227 if (!ctx->cdata.keylen)
228 return aead_null_set_sh_desc(aead);
231 * AES-CTR needs to load IV in CONTEXT1 reg
232 * at an offset of 128bits (16bytes)
233 * CONTEXT1[255:128] = IV
240 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
243 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
244 nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
245 ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
248 data_len[0] = ctx->adata.keylen_pad;
249 data_len[1] = ctx->cdata.keylen;
255 * Job Descriptor and Shared Descriptors
256 * must all fit into the 64-word Descriptor h/w Buffer
258 if (desc_inline_query(DESC_AEAD_ENC_LEN +
259 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
260 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
261 ARRAY_SIZE(data_len)) < 0)
265 ctx->adata.key_virt = ctx->key;
267 ctx->adata.key_dma = ctx->key_dma;
270 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
272 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
274 ctx->adata.key_inline = !!(inl_mask & 1);
275 ctx->cdata.key_inline = !!(inl_mask & 2);
277 /* aead_encrypt shared descriptor */
278 desc = ctx->sh_desc_enc;
279 cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ctx->authsize,
280 is_rfc3686, nonce, ctx1_iv_off);
281 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
284 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
285 dev_err(jrdev, "unable to map shared descriptor\n");
291 * Job Descriptor and Shared Descriptors
292 * must all fit into the 64-word Descriptor h/w Buffer
294 if (desc_inline_query(DESC_AEAD_DEC_LEN +
295 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
296 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
297 ARRAY_SIZE(data_len)) < 0)
301 ctx->adata.key_virt = ctx->key;
303 ctx->adata.key_dma = ctx->key_dma;
306 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
308 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
310 ctx->adata.key_inline = !!(inl_mask & 1);
311 ctx->cdata.key_inline = !!(inl_mask & 2);
313 /* aead_decrypt shared descriptor */
314 desc = ctx->sh_desc_dec;
315 cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
316 ctx->authsize, alg->caam.geniv, is_rfc3686,
318 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
321 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
322 dev_err(jrdev, "unable to map shared descriptor\n");
326 if (!alg->caam.geniv)
330 * Job Descriptor and Shared Descriptors
331 * must all fit into the 64-word Descriptor h/w Buffer
333 if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
334 (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
335 AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
336 ARRAY_SIZE(data_len)) < 0)
340 ctx->adata.key_virt = ctx->key;
342 ctx->adata.key_dma = ctx->key_dma;
345 ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
347 ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
349 ctx->adata.key_inline = !!(inl_mask & 1);
350 ctx->cdata.key_inline = !!(inl_mask & 2);
352 /* aead_givencrypt shared descriptor */
353 desc = ctx->sh_desc_enc;
354 cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
355 ctx->authsize, is_rfc3686, nonce,
357 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
360 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
361 dev_err(jrdev, "unable to map shared descriptor\n");
369 static int aead_setauthsize(struct crypto_aead *authenc,
370 unsigned int authsize)
372 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
374 ctx->authsize = authsize;
375 aead_set_sh_desc(authenc);
380 static int gcm_set_sh_desc(struct crypto_aead *aead)
382 struct caam_ctx *ctx = crypto_aead_ctx(aead);
383 struct device *jrdev = ctx->jrdev;
385 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
388 if (!ctx->cdata.keylen || !ctx->authsize)
392 * AES GCM encrypt shared descriptor
393 * Job Descriptor and Shared Descriptor
394 * must fit into the 64-word Descriptor h/w Buffer
396 if (rem_bytes >= DESC_GCM_ENC_LEN) {
397 ctx->cdata.key_inline = true;
398 ctx->cdata.key_virt = ctx->key;
400 ctx->cdata.key_inline = false;
401 ctx->cdata.key_dma = ctx->key_dma;
404 desc = ctx->sh_desc_enc;
405 cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ctx->authsize);
406 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
409 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
410 dev_err(jrdev, "unable to map shared descriptor\n");
415 * Job Descriptor and Shared Descriptors
416 * must all fit into the 64-word Descriptor h/w Buffer
418 if (rem_bytes >= DESC_GCM_DEC_LEN) {
419 ctx->cdata.key_inline = true;
420 ctx->cdata.key_virt = ctx->key;
422 ctx->cdata.key_inline = false;
423 ctx->cdata.key_dma = ctx->key_dma;
426 desc = ctx->sh_desc_dec;
427 cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ctx->authsize);
428 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
431 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
432 dev_err(jrdev, "unable to map shared descriptor\n");
439 static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
441 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
443 ctx->authsize = authsize;
444 gcm_set_sh_desc(authenc);
449 static int rfc4106_set_sh_desc(struct crypto_aead *aead)
451 struct caam_ctx *ctx = crypto_aead_ctx(aead);
452 struct device *jrdev = ctx->jrdev;
454 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
457 if (!ctx->cdata.keylen || !ctx->authsize)
461 * RFC4106 encrypt shared descriptor
462 * Job Descriptor and Shared Descriptor
463 * must fit into the 64-word Descriptor h/w Buffer
465 if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
466 ctx->cdata.key_inline = true;
467 ctx->cdata.key_virt = ctx->key;
469 ctx->cdata.key_inline = false;
470 ctx->cdata.key_dma = ctx->key_dma;
473 desc = ctx->sh_desc_enc;
474 cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ctx->authsize);
475 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
478 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
479 dev_err(jrdev, "unable to map shared descriptor\n");
484 * Job Descriptor and Shared Descriptors
485 * must all fit into the 64-word Descriptor h/w Buffer
487 if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
488 ctx->cdata.key_inline = true;
489 ctx->cdata.key_virt = ctx->key;
491 ctx->cdata.key_inline = false;
492 ctx->cdata.key_dma = ctx->key_dma;
495 desc = ctx->sh_desc_dec;
496 cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ctx->authsize);
497 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
500 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
501 dev_err(jrdev, "unable to map shared descriptor\n");
508 static int rfc4106_setauthsize(struct crypto_aead *authenc,
509 unsigned int authsize)
511 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
513 ctx->authsize = authsize;
514 rfc4106_set_sh_desc(authenc);
519 static int rfc4543_set_sh_desc(struct crypto_aead *aead)
521 struct caam_ctx *ctx = crypto_aead_ctx(aead);
522 struct device *jrdev = ctx->jrdev;
524 int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
527 if (!ctx->cdata.keylen || !ctx->authsize)
531 * RFC4543 encrypt shared descriptor
532 * Job Descriptor and Shared Descriptor
533 * must fit into the 64-word Descriptor h/w Buffer
535 if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
536 ctx->cdata.key_inline = true;
537 ctx->cdata.key_virt = ctx->key;
539 ctx->cdata.key_inline = false;
540 ctx->cdata.key_dma = ctx->key_dma;
543 desc = ctx->sh_desc_enc;
544 cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ctx->authsize);
545 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
548 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
549 dev_err(jrdev, "unable to map shared descriptor\n");
554 * Job Descriptor and Shared Descriptors
555 * must all fit into the 64-word Descriptor h/w Buffer
557 if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
558 ctx->cdata.key_inline = true;
559 ctx->cdata.key_virt = ctx->key;
561 ctx->cdata.key_inline = false;
562 ctx->cdata.key_dma = ctx->key_dma;
565 desc = ctx->sh_desc_dec;
566 cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ctx->authsize);
567 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
570 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
571 dev_err(jrdev, "unable to map shared descriptor\n");
578 static int rfc4543_setauthsize(struct crypto_aead *authenc,
579 unsigned int authsize)
581 struct caam_ctx *ctx = crypto_aead_ctx(authenc);
583 ctx->authsize = authsize;
584 rfc4543_set_sh_desc(authenc);
589 static int aead_setkey(struct crypto_aead *aead,
590 const u8 *key, unsigned int keylen)
592 struct caam_ctx *ctx = crypto_aead_ctx(aead);
593 struct device *jrdev = ctx->jrdev;
594 struct crypto_authenc_keys keys;
597 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
601 printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
602 keys.authkeylen + keys.enckeylen, keys.enckeylen,
604 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
605 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
608 ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
609 keys.authkeylen, CAAM_MAX_KEY_SIZE -
615 /* postpend encryption key to auth split key */
616 memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
618 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->adata.keylen_pad +
619 keys.enckeylen, DMA_TO_DEVICE);
620 if (dma_mapping_error(jrdev, ctx->key_dma)) {
621 dev_err(jrdev, "unable to map key i/o memory\n");
625 print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
626 DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
627 ctx->adata.keylen_pad + keys.enckeylen, 1);
630 ctx->cdata.keylen = keys.enckeylen;
632 ret = aead_set_sh_desc(aead);
634 dma_unmap_single(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
635 keys.enckeylen, DMA_TO_DEVICE);
640 crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
644 static int gcm_setkey(struct crypto_aead *aead,
645 const u8 *key, unsigned int keylen)
647 struct caam_ctx *ctx = crypto_aead_ctx(aead);
648 struct device *jrdev = ctx->jrdev;
652 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
653 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
656 memcpy(ctx->key, key, keylen);
657 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
659 if (dma_mapping_error(jrdev, ctx->key_dma)) {
660 dev_err(jrdev, "unable to map key i/o memory\n");
663 ctx->cdata.keylen = keylen;
665 ret = gcm_set_sh_desc(aead);
667 dma_unmap_single(jrdev, ctx->key_dma, ctx->cdata.keylen,
674 static int rfc4106_setkey(struct crypto_aead *aead,
675 const u8 *key, unsigned int keylen)
677 struct caam_ctx *ctx = crypto_aead_ctx(aead);
678 struct device *jrdev = ctx->jrdev;
685 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
686 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
689 memcpy(ctx->key, key, keylen);
692 * The last four bytes of the key material are used as the salt value
693 * in the nonce. Update the AES key length.
695 ctx->cdata.keylen = keylen - 4;
697 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->cdata.keylen,
699 if (dma_mapping_error(jrdev, ctx->key_dma)) {
700 dev_err(jrdev, "unable to map key i/o memory\n");
704 ret = rfc4106_set_sh_desc(aead);
706 dma_unmap_single(jrdev, ctx->key_dma, ctx->cdata.keylen,
713 static int rfc4543_setkey(struct crypto_aead *aead,
714 const u8 *key, unsigned int keylen)
716 struct caam_ctx *ctx = crypto_aead_ctx(aead);
717 struct device *jrdev = ctx->jrdev;
724 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
725 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
728 memcpy(ctx->key, key, keylen);
731 * The last four bytes of the key material are used as the salt value
732 * in the nonce. Update the AES key length.
734 ctx->cdata.keylen = keylen - 4;
736 ctx->key_dma = dma_map_single(jrdev, ctx->key, ctx->cdata.keylen,
738 if (dma_mapping_error(jrdev, ctx->key_dma)) {
739 dev_err(jrdev, "unable to map key i/o memory\n");
743 ret = rfc4543_set_sh_desc(aead);
745 dma_unmap_single(jrdev, ctx->key_dma, ctx->cdata.keylen,
752 static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
753 const u8 *key, unsigned int keylen)
755 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
756 struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
757 const char *alg_name = crypto_tfm_alg_name(tfm);
758 struct device *jrdev = ctx->jrdev;
759 unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
762 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
763 OP_ALG_AAI_CTR_MOD128);
764 const bool is_rfc3686 = (ctr_mode &&
765 (strstr(alg_name, "rfc3686") != NULL));
767 memcpy(ctx->key, key, keylen);
769 print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
770 DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
773 * AES-CTR needs to load IV in CONTEXT1 reg
774 * at an offset of 128bits (16bytes)
775 * CONTEXT1[255:128] = IV
782 * | CONTEXT1[255:128] = {NONCE, IV, COUNTER}
783 * | *key = {KEY, NONCE}
786 ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
787 keylen -= CTR_RFC3686_NONCE_SIZE;
790 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen,
792 if (dma_mapping_error(jrdev, ctx->key_dma)) {
793 dev_err(jrdev, "unable to map key i/o memory\n");
796 ctx->cdata.keylen = keylen;
797 ctx->cdata.key_virt = ctx->key;
798 ctx->cdata.key_inline = true;
800 /* ablkcipher_encrypt shared descriptor */
801 desc = ctx->sh_desc_enc;
802 cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
804 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc,
807 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
808 dev_err(jrdev, "unable to map shared descriptor\n");
812 /* ablkcipher_decrypt shared descriptor */
813 desc = ctx->sh_desc_dec;
814 cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
816 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc,
819 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
820 dev_err(jrdev, "unable to map shared descriptor\n");
824 /* ablkcipher_givencrypt shared descriptor */
825 desc = ctx->sh_desc_givenc;
826 cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
828 ctx->sh_desc_givenc_dma = dma_map_single(jrdev, desc,
831 if (dma_mapping_error(jrdev, ctx->sh_desc_givenc_dma)) {
832 dev_err(jrdev, "unable to map shared descriptor\n");
839 static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
840 const u8 *key, unsigned int keylen)
842 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
843 struct device *jrdev = ctx->jrdev;
846 if (keylen != 2 * AES_MIN_KEY_SIZE && keylen != 2 * AES_MAX_KEY_SIZE) {
847 crypto_ablkcipher_set_flags(ablkcipher,
848 CRYPTO_TFM_RES_BAD_KEY_LEN);
849 dev_err(jrdev, "key size mismatch\n");
853 memcpy(ctx->key, key, keylen);
854 ctx->key_dma = dma_map_single(jrdev, ctx->key, keylen, DMA_TO_DEVICE);
855 if (dma_mapping_error(jrdev, ctx->key_dma)) {
856 dev_err(jrdev, "unable to map key i/o memory\n");
859 ctx->cdata.keylen = keylen;
860 ctx->cdata.key_virt = ctx->key;
861 ctx->cdata.key_inline = true;
863 /* xts_ablkcipher_encrypt shared descriptor */
864 desc = ctx->sh_desc_enc;
865 cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
866 ctx->sh_desc_enc_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
868 if (dma_mapping_error(jrdev, ctx->sh_desc_enc_dma)) {
869 dev_err(jrdev, "unable to map shared descriptor\n");
873 /* xts_ablkcipher_decrypt shared descriptor */
874 desc = ctx->sh_desc_dec;
875 cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
876 ctx->sh_desc_dec_dma = dma_map_single(jrdev, desc, desc_bytes(desc),
878 if (dma_mapping_error(jrdev, ctx->sh_desc_dec_dma)) {
879 dma_unmap_single(jrdev, ctx->sh_desc_enc_dma,
880 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
881 dev_err(jrdev, "unable to map shared descriptor\n");
889 * aead_edesc - s/w-extended aead descriptor
890 * @src_nents: number of segments in input scatterlist
891 * @dst_nents: number of segments in output scatterlist
892 * @sec4_sg_bytes: length of dma mapped sec4_sg space
893 * @sec4_sg_dma: bus physical mapped address of h/w link table
894 * @sec4_sg: pointer to h/w link table
895 * @hw_desc: the h/w job descriptor followed by any referenced link tables
901 dma_addr_t sec4_sg_dma;
902 struct sec4_sg_entry *sec4_sg;
907 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
908 * @src_nents: number of segments in input scatterlist
909 * @dst_nents: number of segments in output scatterlist
910 * @iv_dma: dma address of iv for checking continuity and link table
911 * @sec4_sg_bytes: length of dma mapped sec4_sg space
912 * @sec4_sg_dma: bus physical mapped address of h/w link table
913 * @sec4_sg: pointer to h/w link table
914 * @hw_desc: the h/w job descriptor followed by any referenced link tables
916 struct ablkcipher_edesc {
921 dma_addr_t sec4_sg_dma;
922 struct sec4_sg_entry *sec4_sg;
926 static void caam_unmap(struct device *dev, struct scatterlist *src,
927 struct scatterlist *dst, int src_nents,
929 dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
933 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_TO_DEVICE);
934 dma_unmap_sg(dev, dst, dst_nents ? : 1, DMA_FROM_DEVICE);
936 dma_unmap_sg(dev, src, src_nents ? : 1, DMA_BIDIRECTIONAL);
940 dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
942 dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
946 static void aead_unmap(struct device *dev,
947 struct aead_edesc *edesc,
948 struct aead_request *req)
950 caam_unmap(dev, req->src, req->dst,
951 edesc->src_nents, edesc->dst_nents, 0, 0,
952 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
955 static void ablkcipher_unmap(struct device *dev,
956 struct ablkcipher_edesc *edesc,
957 struct ablkcipher_request *req)
959 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
960 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
962 caam_unmap(dev, req->src, req->dst,
963 edesc->src_nents, edesc->dst_nents,
964 edesc->iv_dma, ivsize,
965 edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
968 static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
971 struct aead_request *req = context;
972 struct aead_edesc *edesc;
975 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
978 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
981 caam_jr_strstatus(jrdev, err);
983 aead_unmap(jrdev, edesc, req);
987 aead_request_complete(req, err);
990 static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
993 struct aead_request *req = context;
994 struct aead_edesc *edesc;
997 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1000 edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
1003 caam_jr_strstatus(jrdev, err);
1005 aead_unmap(jrdev, edesc, req);
1008 * verify hw auth check passed else return -EBADMSG
1010 if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
1015 aead_request_complete(req, err);
1018 static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
1021 struct ablkcipher_request *req = context;
1022 struct ablkcipher_edesc *edesc;
1024 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1025 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1027 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1030 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
1033 caam_jr_strstatus(jrdev, err);
1036 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
1037 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1038 edesc->src_nents > 1 ? 100 : ivsize, 1);
1039 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
1040 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1041 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
1044 ablkcipher_unmap(jrdev, edesc, req);
1047 ablkcipher_request_complete(req, err);
1050 static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
1053 struct ablkcipher_request *req = context;
1054 struct ablkcipher_edesc *edesc;
1056 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1057 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1059 dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
1062 edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
1064 caam_jr_strstatus(jrdev, err);
1067 print_hex_dump(KERN_ERR, "dstiv @"__stringify(__LINE__)": ",
1068 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1070 dbg_dump_sg(KERN_ERR, "dst @"__stringify(__LINE__)": ",
1071 DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1072 edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
1075 ablkcipher_unmap(jrdev, edesc, req);
1078 ablkcipher_request_complete(req, err);
1082 * Fill in aead job descriptor
1084 static void init_aead_job(struct aead_request *req,
1085 struct aead_edesc *edesc,
1086 bool all_contig, bool encrypt)
1088 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1089 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1090 int authsize = ctx->authsize;
1091 u32 *desc = edesc->hw_desc;
1092 u32 out_options, in_options;
1093 dma_addr_t dst_dma, src_dma;
1094 int len, sec4_sg_index = 0;
1098 sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1099 ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1101 len = desc_len(sh_desc);
1102 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1105 src_dma = sg_dma_address(req->src);
1108 src_dma = edesc->sec4_sg_dma;
1109 sec4_sg_index += edesc->src_nents;
1110 in_options = LDST_SGF;
1113 append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1117 out_options = in_options;
1119 if (unlikely(req->src != req->dst)) {
1120 if (!edesc->dst_nents) {
1121 dst_dma = sg_dma_address(req->dst);
1123 dst_dma = edesc->sec4_sg_dma +
1125 sizeof(struct sec4_sg_entry);
1126 out_options = LDST_SGF;
1131 append_seq_out_ptr(desc, dst_dma,
1132 req->assoclen + req->cryptlen + authsize,
1135 append_seq_out_ptr(desc, dst_dma,
1136 req->assoclen + req->cryptlen - authsize,
1139 /* REG3 = assoclen */
1140 append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1143 static void init_gcm_job(struct aead_request *req,
1144 struct aead_edesc *edesc,
1145 bool all_contig, bool encrypt)
1147 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1148 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1149 unsigned int ivsize = crypto_aead_ivsize(aead);
1150 u32 *desc = edesc->hw_desc;
1151 bool generic_gcm = (ivsize == 12);
1154 init_aead_job(req, edesc, all_contig, encrypt);
1156 /* BUG This should not be specific to generic GCM. */
1158 if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1159 last = FIFOLD_TYPE_LAST1;
1162 append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1163 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | 12 | last);
1166 append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1168 append_data(desc, req->iv, ivsize);
1169 /* End of blank commands */
1172 static void init_authenc_job(struct aead_request *req,
1173 struct aead_edesc *edesc,
1174 bool all_contig, bool encrypt)
1176 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1177 struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1178 struct caam_aead_alg, aead);
1179 unsigned int ivsize = crypto_aead_ivsize(aead);
1180 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1181 const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1182 OP_ALG_AAI_CTR_MOD128);
1183 const bool is_rfc3686 = alg->caam.rfc3686;
1184 u32 *desc = edesc->hw_desc;
1188 * AES-CTR needs to load IV in CONTEXT1 reg
1189 * at an offset of 128bits (16bytes)
1190 * CONTEXT1[255:128] = IV
1197 * CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1200 ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1202 init_aead_job(req, edesc, all_contig, encrypt);
1204 if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1205 append_load_as_imm(desc, req->iv, ivsize,
1207 LDST_SRCDST_BYTE_CONTEXT |
1208 (ivoffset << LDST_OFFSET_SHIFT));
1212 * Fill in ablkcipher job descriptor
1214 static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
1215 struct ablkcipher_edesc *edesc,
1216 struct ablkcipher_request *req,
1219 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1220 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1221 u32 *desc = edesc->hw_desc;
1222 u32 out_options = 0, in_options;
1223 dma_addr_t dst_dma, src_dma;
1224 int len, sec4_sg_index = 0;
1227 print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1228 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1230 printk(KERN_ERR "asked=%d, nbytes%d\n", (int)edesc->src_nents ? 100 : req->nbytes, req->nbytes);
1231 dbg_dump_sg(KERN_ERR, "src @"__stringify(__LINE__)": ",
1232 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1233 edesc->src_nents ? 100 : req->nbytes, 1);
1236 len = desc_len(sh_desc);
1237 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1240 src_dma = edesc->iv_dma;
1243 src_dma = edesc->sec4_sg_dma;
1244 sec4_sg_index += edesc->src_nents + 1;
1245 in_options = LDST_SGF;
1247 append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
1249 if (likely(req->src == req->dst)) {
1250 if (!edesc->src_nents && iv_contig) {
1251 dst_dma = sg_dma_address(req->src);
1253 dst_dma = edesc->sec4_sg_dma +
1254 sizeof(struct sec4_sg_entry);
1255 out_options = LDST_SGF;
1258 if (!edesc->dst_nents) {
1259 dst_dma = sg_dma_address(req->dst);
1261 dst_dma = edesc->sec4_sg_dma +
1262 sec4_sg_index * sizeof(struct sec4_sg_entry);
1263 out_options = LDST_SGF;
1266 append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
1270 * Fill in ablkcipher givencrypt job descriptor
1272 static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
1273 struct ablkcipher_edesc *edesc,
1274 struct ablkcipher_request *req,
1277 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1278 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1279 u32 *desc = edesc->hw_desc;
1280 u32 out_options, in_options;
1281 dma_addr_t dst_dma, src_dma;
1282 int len, sec4_sg_index = 0;
1285 print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
1286 DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1288 dbg_dump_sg(KERN_ERR, "src @" __stringify(__LINE__) ": ",
1289 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1290 edesc->src_nents ? 100 : req->nbytes, 1);
1293 len = desc_len(sh_desc);
1294 init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1296 if (!edesc->src_nents) {
1297 src_dma = sg_dma_address(req->src);
1300 src_dma = edesc->sec4_sg_dma;
1301 sec4_sg_index += edesc->src_nents;
1302 in_options = LDST_SGF;
1304 append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
1307 dst_dma = edesc->iv_dma;
1310 dst_dma = edesc->sec4_sg_dma +
1311 sec4_sg_index * sizeof(struct sec4_sg_entry);
1312 out_options = LDST_SGF;
1314 append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
1318 * allocate and map the aead extended descriptor
1320 static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1321 int desc_bytes, bool *all_contig_ptr,
1324 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1325 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1326 struct device *jrdev = ctx->jrdev;
1327 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1328 CRYPTO_TFM_REQ_MAY_SLEEP)) ? GFP_KERNEL : GFP_ATOMIC;
1329 int src_nents, dst_nents = 0;
1330 struct aead_edesc *edesc;
1332 bool all_contig = true;
1333 int sec4_sg_index, sec4_sg_len = 0, sec4_sg_bytes;
1334 unsigned int authsize = ctx->authsize;
1336 if (unlikely(req->dst != req->src)) {
1337 src_nents = sg_count(req->src, req->assoclen + req->cryptlen);
1338 dst_nents = sg_count(req->dst,
1339 req->assoclen + req->cryptlen +
1340 (encrypt ? authsize : (-authsize)));
1342 src_nents = sg_count(req->src,
1343 req->assoclen + req->cryptlen +
1344 (encrypt ? authsize : 0));
1347 /* Check if data are contiguous. */
1348 all_contig = !src_nents;
1350 sec4_sg_len = src_nents;
1352 sec4_sg_len += dst_nents;
1354 sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1356 /* allocate space for base edesc and hw desc commands, link tables */
1357 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1360 dev_err(jrdev, "could not allocate extended descriptor\n");
1361 return ERR_PTR(-ENOMEM);
1364 if (likely(req->src == req->dst)) {
1365 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1367 if (unlikely(!sgc)) {
1368 dev_err(jrdev, "unable to map source\n");
1370 return ERR_PTR(-ENOMEM);
1373 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1375 if (unlikely(!sgc)) {
1376 dev_err(jrdev, "unable to map source\n");
1378 return ERR_PTR(-ENOMEM);
1381 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
1383 if (unlikely(!sgc)) {
1384 dev_err(jrdev, "unable to map destination\n");
1385 dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
1388 return ERR_PTR(-ENOMEM);
1392 edesc->src_nents = src_nents;
1393 edesc->dst_nents = dst_nents;
1394 edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1396 *all_contig_ptr = all_contig;
1400 sg_to_sec4_sg_last(req->src, src_nents,
1401 edesc->sec4_sg + sec4_sg_index, 0);
1402 sec4_sg_index += src_nents;
1405 sg_to_sec4_sg_last(req->dst, dst_nents,
1406 edesc->sec4_sg + sec4_sg_index, 0);
1412 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1413 sec4_sg_bytes, DMA_TO_DEVICE);
1414 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1415 dev_err(jrdev, "unable to map S/G table\n");
1416 aead_unmap(jrdev, edesc, req);
1418 return ERR_PTR(-ENOMEM);
1421 edesc->sec4_sg_bytes = sec4_sg_bytes;
1426 static int gcm_encrypt(struct aead_request *req)
1428 struct aead_edesc *edesc;
1429 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1430 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1431 struct device *jrdev = ctx->jrdev;
1436 /* allocate extended descriptor */
1437 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1439 return PTR_ERR(edesc);
1441 /* Create and submit job descriptor */
1442 init_gcm_job(req, edesc, all_contig, true);
1444 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1445 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1446 desc_bytes(edesc->hw_desc), 1);
1449 desc = edesc->hw_desc;
1450 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1454 aead_unmap(jrdev, edesc, req);
1461 static int ipsec_gcm_encrypt(struct aead_request *req)
1463 if (req->assoclen < 8)
1466 return gcm_encrypt(req);
1469 static int aead_encrypt(struct aead_request *req)
1471 struct aead_edesc *edesc;
1472 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1473 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1474 struct device *jrdev = ctx->jrdev;
1479 /* allocate extended descriptor */
1480 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1483 return PTR_ERR(edesc);
1485 /* Create and submit job descriptor */
1486 init_authenc_job(req, edesc, all_contig, true);
1488 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1489 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1490 desc_bytes(edesc->hw_desc), 1);
1493 desc = edesc->hw_desc;
1494 ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1498 aead_unmap(jrdev, edesc, req);
1505 static int gcm_decrypt(struct aead_request *req)
1507 struct aead_edesc *edesc;
1508 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1509 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1510 struct device *jrdev = ctx->jrdev;
1515 /* allocate extended descriptor */
1516 edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
1518 return PTR_ERR(edesc);
1520 /* Create and submit job descriptor*/
1521 init_gcm_job(req, edesc, all_contig, false);
1523 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1524 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1525 desc_bytes(edesc->hw_desc), 1);
1528 desc = edesc->hw_desc;
1529 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1533 aead_unmap(jrdev, edesc, req);
1540 static int ipsec_gcm_decrypt(struct aead_request *req)
1542 if (req->assoclen < 8)
1545 return gcm_decrypt(req);
1548 static int aead_decrypt(struct aead_request *req)
1550 struct aead_edesc *edesc;
1551 struct crypto_aead *aead = crypto_aead_reqtfm(req);
1552 struct caam_ctx *ctx = crypto_aead_ctx(aead);
1553 struct device *jrdev = ctx->jrdev;
1559 dbg_dump_sg(KERN_ERR, "dec src@"__stringify(__LINE__)": ",
1560 DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1561 req->assoclen + req->cryptlen, 1);
1564 /* allocate extended descriptor */
1565 edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1566 &all_contig, false);
1568 return PTR_ERR(edesc);
1570 /* Create and submit job descriptor*/
1571 init_authenc_job(req, edesc, all_contig, false);
1573 print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1574 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1575 desc_bytes(edesc->hw_desc), 1);
1578 desc = edesc->hw_desc;
1579 ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1583 aead_unmap(jrdev, edesc, req);
1591 * allocate and map the ablkcipher extended descriptor for ablkcipher
1593 static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
1594 *req, int desc_bytes,
1595 bool *iv_contig_out)
1597 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1598 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1599 struct device *jrdev = ctx->jrdev;
1600 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1601 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
1602 GFP_KERNEL : GFP_ATOMIC;
1603 int src_nents, dst_nents = 0, sec4_sg_bytes;
1604 struct ablkcipher_edesc *edesc;
1605 dma_addr_t iv_dma = 0;
1606 bool iv_contig = false;
1608 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1611 src_nents = sg_count(req->src, req->nbytes);
1613 if (req->dst != req->src)
1614 dst_nents = sg_count(req->dst, req->nbytes);
1616 if (likely(req->src == req->dst)) {
1617 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1619 if (unlikely(!sgc)) {
1620 dev_err(jrdev, "unable to map source\n");
1621 return ERR_PTR(-ENOMEM);
1624 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1626 if (unlikely(!sgc)) {
1627 dev_err(jrdev, "unable to map source\n");
1628 return ERR_PTR(-ENOMEM);
1631 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
1633 if (unlikely(!sgc)) {
1634 dev_err(jrdev, "unable to map destination\n");
1635 dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
1637 return ERR_PTR(-ENOMEM);
1641 iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
1642 if (dma_mapping_error(jrdev, iv_dma)) {
1643 dev_err(jrdev, "unable to map IV\n");
1644 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1646 return ERR_PTR(-ENOMEM);
1650 * Check if iv can be contiguous with source and destination.
1651 * If so, include it. If not, create scatterlist.
1653 if (!src_nents && iv_dma + ivsize == sg_dma_address(req->src))
1656 src_nents = src_nents ? : 1;
1657 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
1658 sizeof(struct sec4_sg_entry);
1660 /* allocate space for base edesc and hw desc commands, link tables */
1661 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1664 dev_err(jrdev, "could not allocate extended descriptor\n");
1665 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1666 iv_dma, ivsize, 0, 0);
1667 return ERR_PTR(-ENOMEM);
1670 edesc->src_nents = src_nents;
1671 edesc->dst_nents = dst_nents;
1672 edesc->sec4_sg_bytes = sec4_sg_bytes;
1673 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1678 dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1679 sg_to_sec4_sg_last(req->src, src_nents,
1680 edesc->sec4_sg + 1, 0);
1681 sec4_sg_index += 1 + src_nents;
1685 sg_to_sec4_sg_last(req->dst, dst_nents,
1686 edesc->sec4_sg + sec4_sg_index, 0);
1689 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1690 sec4_sg_bytes, DMA_TO_DEVICE);
1691 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1692 dev_err(jrdev, "unable to map S/G table\n");
1693 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1694 iv_dma, ivsize, 0, 0);
1696 return ERR_PTR(-ENOMEM);
1699 edesc->iv_dma = iv_dma;
1702 print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
1703 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1707 *iv_contig_out = iv_contig;
1711 static int ablkcipher_encrypt(struct ablkcipher_request *req)
1713 struct ablkcipher_edesc *edesc;
1714 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1715 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1716 struct device *jrdev = ctx->jrdev;
1721 /* allocate extended descriptor */
1722 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1723 CAAM_CMD_SZ, &iv_contig);
1725 return PTR_ERR(edesc);
1727 /* Create and submit job descriptor*/
1728 init_ablkcipher_job(ctx->sh_desc_enc,
1729 ctx->sh_desc_enc_dma, edesc, req, iv_contig);
1731 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1732 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1733 desc_bytes(edesc->hw_desc), 1);
1735 desc = edesc->hw_desc;
1736 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1741 ablkcipher_unmap(jrdev, edesc, req);
1748 static int ablkcipher_decrypt(struct ablkcipher_request *req)
1750 struct ablkcipher_edesc *edesc;
1751 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1752 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1753 struct device *jrdev = ctx->jrdev;
1758 /* allocate extended descriptor */
1759 edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1760 CAAM_CMD_SZ, &iv_contig);
1762 return PTR_ERR(edesc);
1764 /* Create and submit job descriptor*/
1765 init_ablkcipher_job(ctx->sh_desc_dec,
1766 ctx->sh_desc_dec_dma, edesc, req, iv_contig);
1767 desc = edesc->hw_desc;
1769 print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1770 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1771 desc_bytes(edesc->hw_desc), 1);
1774 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
1778 ablkcipher_unmap(jrdev, edesc, req);
1786 * allocate and map the ablkcipher extended descriptor
1787 * for ablkcipher givencrypt
1789 static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
1790 struct skcipher_givcrypt_request *greq,
1792 bool *iv_contig_out)
1794 struct ablkcipher_request *req = &greq->creq;
1795 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1796 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1797 struct device *jrdev = ctx->jrdev;
1798 gfp_t flags = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG |
1799 CRYPTO_TFM_REQ_MAY_SLEEP)) ?
1800 GFP_KERNEL : GFP_ATOMIC;
1801 int src_nents, dst_nents = 0, sec4_sg_bytes;
1802 struct ablkcipher_edesc *edesc;
1803 dma_addr_t iv_dma = 0;
1804 bool iv_contig = false;
1806 int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1809 src_nents = sg_count(req->src, req->nbytes);
1811 if (unlikely(req->dst != req->src))
1812 dst_nents = sg_count(req->dst, req->nbytes);
1814 if (likely(req->src == req->dst)) {
1815 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1817 if (unlikely(!sgc)) {
1818 dev_err(jrdev, "unable to map source\n");
1819 return ERR_PTR(-ENOMEM);
1822 sgc = dma_map_sg(jrdev, req->src, src_nents ? : 1,
1824 if (unlikely(!sgc)) {
1825 dev_err(jrdev, "unable to map source\n");
1826 return ERR_PTR(-ENOMEM);
1829 sgc = dma_map_sg(jrdev, req->dst, dst_nents ? : 1,
1831 if (unlikely(!sgc)) {
1832 dev_err(jrdev, "unable to map destination\n");
1833 dma_unmap_sg(jrdev, req->src, src_nents ? : 1,
1835 return ERR_PTR(-ENOMEM);
1840 * Check if iv can be contiguous with source and destination.
1841 * If so, include it. If not, create scatterlist.
1843 iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
1844 if (dma_mapping_error(jrdev, iv_dma)) {
1845 dev_err(jrdev, "unable to map IV\n");
1846 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1848 return ERR_PTR(-ENOMEM);
1851 if (!dst_nents && iv_dma + ivsize == sg_dma_address(req->dst))
1854 dst_nents = dst_nents ? : 1;
1855 sec4_sg_bytes = ((iv_contig ? 0 : 1) + src_nents + dst_nents) *
1856 sizeof(struct sec4_sg_entry);
1858 /* allocate space for base edesc and hw desc commands, link tables */
1859 edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1862 dev_err(jrdev, "could not allocate extended descriptor\n");
1863 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1864 iv_dma, ivsize, 0, 0);
1865 return ERR_PTR(-ENOMEM);
1868 edesc->src_nents = src_nents;
1869 edesc->dst_nents = dst_nents;
1870 edesc->sec4_sg_bytes = sec4_sg_bytes;
1871 edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1876 sg_to_sec4_sg_last(req->src, src_nents, edesc->sec4_sg, 0);
1877 sec4_sg_index += src_nents;
1881 dma_to_sec4_sg_one(edesc->sec4_sg + sec4_sg_index,
1884 sg_to_sec4_sg_last(req->dst, dst_nents,
1885 edesc->sec4_sg + sec4_sg_index, 0);
1888 edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1889 sec4_sg_bytes, DMA_TO_DEVICE);
1890 if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1891 dev_err(jrdev, "unable to map S/G table\n");
1892 caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1893 iv_dma, ivsize, 0, 0);
1895 return ERR_PTR(-ENOMEM);
1897 edesc->iv_dma = iv_dma;
1900 print_hex_dump(KERN_ERR,
1901 "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
1902 DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1906 *iv_contig_out = iv_contig;
1910 static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
1912 struct ablkcipher_request *req = &creq->creq;
1913 struct ablkcipher_edesc *edesc;
1914 struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1915 struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1916 struct device *jrdev = ctx->jrdev;
1921 /* allocate extended descriptor */
1922 edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
1923 CAAM_CMD_SZ, &iv_contig);
1925 return PTR_ERR(edesc);
1927 /* Create and submit job descriptor*/
1928 init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
1929 edesc, req, iv_contig);
1931 print_hex_dump(KERN_ERR,
1932 "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
1933 DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1934 desc_bytes(edesc->hw_desc), 1);
1936 desc = edesc->hw_desc;
1937 ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1942 ablkcipher_unmap(jrdev, edesc, req);
1949 #define template_aead template_u.aead
1950 #define template_ablkcipher template_u.ablkcipher
1951 struct caam_alg_template {
1952 char name[CRYPTO_MAX_ALG_NAME];
1953 char driver_name[CRYPTO_MAX_ALG_NAME];
1954 unsigned int blocksize;
1957 struct ablkcipher_alg ablkcipher;
1959 u32 class1_alg_type;
1960 u32 class2_alg_type;
1963 static struct caam_alg_template driver_algs[] = {
1964 /* ablkcipher descriptor */
1967 .driver_name = "cbc-aes-caam",
1968 .blocksize = AES_BLOCK_SIZE,
1969 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1970 .template_ablkcipher = {
1971 .setkey = ablkcipher_setkey,
1972 .encrypt = ablkcipher_encrypt,
1973 .decrypt = ablkcipher_decrypt,
1974 .givencrypt = ablkcipher_givencrypt,
1975 .geniv = "<built-in>",
1976 .min_keysize = AES_MIN_KEY_SIZE,
1977 .max_keysize = AES_MAX_KEY_SIZE,
1978 .ivsize = AES_BLOCK_SIZE,
1980 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1983 .name = "cbc(des3_ede)",
1984 .driver_name = "cbc-3des-caam",
1985 .blocksize = DES3_EDE_BLOCK_SIZE,
1986 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
1987 .template_ablkcipher = {
1988 .setkey = ablkcipher_setkey,
1989 .encrypt = ablkcipher_encrypt,
1990 .decrypt = ablkcipher_decrypt,
1991 .givencrypt = ablkcipher_givencrypt,
1992 .geniv = "<built-in>",
1993 .min_keysize = DES3_EDE_KEY_SIZE,
1994 .max_keysize = DES3_EDE_KEY_SIZE,
1995 .ivsize = DES3_EDE_BLOCK_SIZE,
1997 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2001 .driver_name = "cbc-des-caam",
2002 .blocksize = DES_BLOCK_SIZE,
2003 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2004 .template_ablkcipher = {
2005 .setkey = ablkcipher_setkey,
2006 .encrypt = ablkcipher_encrypt,
2007 .decrypt = ablkcipher_decrypt,
2008 .givencrypt = ablkcipher_givencrypt,
2009 .geniv = "<built-in>",
2010 .min_keysize = DES_KEY_SIZE,
2011 .max_keysize = DES_KEY_SIZE,
2012 .ivsize = DES_BLOCK_SIZE,
2014 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2018 .driver_name = "ctr-aes-caam",
2020 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2021 .template_ablkcipher = {
2022 .setkey = ablkcipher_setkey,
2023 .encrypt = ablkcipher_encrypt,
2024 .decrypt = ablkcipher_decrypt,
2026 .min_keysize = AES_MIN_KEY_SIZE,
2027 .max_keysize = AES_MAX_KEY_SIZE,
2028 .ivsize = AES_BLOCK_SIZE,
2030 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
2033 .name = "rfc3686(ctr(aes))",
2034 .driver_name = "rfc3686-ctr-aes-caam",
2036 .type = CRYPTO_ALG_TYPE_GIVCIPHER,
2037 .template_ablkcipher = {
2038 .setkey = ablkcipher_setkey,
2039 .encrypt = ablkcipher_encrypt,
2040 .decrypt = ablkcipher_decrypt,
2041 .givencrypt = ablkcipher_givencrypt,
2042 .geniv = "<built-in>",
2043 .min_keysize = AES_MIN_KEY_SIZE +
2044 CTR_RFC3686_NONCE_SIZE,
2045 .max_keysize = AES_MAX_KEY_SIZE +
2046 CTR_RFC3686_NONCE_SIZE,
2047 .ivsize = CTR_RFC3686_IV_SIZE,
2049 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
2053 .driver_name = "xts-aes-caam",
2054 .blocksize = AES_BLOCK_SIZE,
2055 .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2056 .template_ablkcipher = {
2057 .setkey = xts_ablkcipher_setkey,
2058 .encrypt = ablkcipher_encrypt,
2059 .decrypt = ablkcipher_decrypt,
2061 .min_keysize = 2 * AES_MIN_KEY_SIZE,
2062 .max_keysize = 2 * AES_MAX_KEY_SIZE,
2063 .ivsize = AES_BLOCK_SIZE,
2065 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
2069 static struct caam_aead_alg driver_aeads[] = {
2073 .cra_name = "rfc4106(gcm(aes))",
2074 .cra_driver_name = "rfc4106-gcm-aes-caam",
2077 .setkey = rfc4106_setkey,
2078 .setauthsize = rfc4106_setauthsize,
2079 .encrypt = ipsec_gcm_encrypt,
2080 .decrypt = ipsec_gcm_decrypt,
2082 .maxauthsize = AES_BLOCK_SIZE,
2085 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2091 .cra_name = "rfc4543(gcm(aes))",
2092 .cra_driver_name = "rfc4543-gcm-aes-caam",
2095 .setkey = rfc4543_setkey,
2096 .setauthsize = rfc4543_setauthsize,
2097 .encrypt = ipsec_gcm_encrypt,
2098 .decrypt = ipsec_gcm_decrypt,
2100 .maxauthsize = AES_BLOCK_SIZE,
2103 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2106 /* Galois Counter Mode */
2110 .cra_name = "gcm(aes)",
2111 .cra_driver_name = "gcm-aes-caam",
2114 .setkey = gcm_setkey,
2115 .setauthsize = gcm_setauthsize,
2116 .encrypt = gcm_encrypt,
2117 .decrypt = gcm_decrypt,
2119 .maxauthsize = AES_BLOCK_SIZE,
2122 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2125 /* single-pass ipsec_esp descriptor */
2129 .cra_name = "authenc(hmac(md5),"
2130 "ecb(cipher_null))",
2131 .cra_driver_name = "authenc-hmac-md5-"
2132 "ecb-cipher_null-caam",
2133 .cra_blocksize = NULL_BLOCK_SIZE,
2135 .setkey = aead_setkey,
2136 .setauthsize = aead_setauthsize,
2137 .encrypt = aead_encrypt,
2138 .decrypt = aead_decrypt,
2139 .ivsize = NULL_IV_SIZE,
2140 .maxauthsize = MD5_DIGEST_SIZE,
2143 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2144 OP_ALG_AAI_HMAC_PRECOMP,
2150 .cra_name = "authenc(hmac(sha1),"
2151 "ecb(cipher_null))",
2152 .cra_driver_name = "authenc-hmac-sha1-"
2153 "ecb-cipher_null-caam",
2154 .cra_blocksize = NULL_BLOCK_SIZE,
2156 .setkey = aead_setkey,
2157 .setauthsize = aead_setauthsize,
2158 .encrypt = aead_encrypt,
2159 .decrypt = aead_decrypt,
2160 .ivsize = NULL_IV_SIZE,
2161 .maxauthsize = SHA1_DIGEST_SIZE,
2164 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2165 OP_ALG_AAI_HMAC_PRECOMP,
2171 .cra_name = "authenc(hmac(sha224),"
2172 "ecb(cipher_null))",
2173 .cra_driver_name = "authenc-hmac-sha224-"
2174 "ecb-cipher_null-caam",
2175 .cra_blocksize = NULL_BLOCK_SIZE,
2177 .setkey = aead_setkey,
2178 .setauthsize = aead_setauthsize,
2179 .encrypt = aead_encrypt,
2180 .decrypt = aead_decrypt,
2181 .ivsize = NULL_IV_SIZE,
2182 .maxauthsize = SHA224_DIGEST_SIZE,
2185 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2186 OP_ALG_AAI_HMAC_PRECOMP,
2192 .cra_name = "authenc(hmac(sha256),"
2193 "ecb(cipher_null))",
2194 .cra_driver_name = "authenc-hmac-sha256-"
2195 "ecb-cipher_null-caam",
2196 .cra_blocksize = NULL_BLOCK_SIZE,
2198 .setkey = aead_setkey,
2199 .setauthsize = aead_setauthsize,
2200 .encrypt = aead_encrypt,
2201 .decrypt = aead_decrypt,
2202 .ivsize = NULL_IV_SIZE,
2203 .maxauthsize = SHA256_DIGEST_SIZE,
2206 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2207 OP_ALG_AAI_HMAC_PRECOMP,
2213 .cra_name = "authenc(hmac(sha384),"
2214 "ecb(cipher_null))",
2215 .cra_driver_name = "authenc-hmac-sha384-"
2216 "ecb-cipher_null-caam",
2217 .cra_blocksize = NULL_BLOCK_SIZE,
2219 .setkey = aead_setkey,
2220 .setauthsize = aead_setauthsize,
2221 .encrypt = aead_encrypt,
2222 .decrypt = aead_decrypt,
2223 .ivsize = NULL_IV_SIZE,
2224 .maxauthsize = SHA384_DIGEST_SIZE,
2227 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2228 OP_ALG_AAI_HMAC_PRECOMP,
2234 .cra_name = "authenc(hmac(sha512),"
2235 "ecb(cipher_null))",
2236 .cra_driver_name = "authenc-hmac-sha512-"
2237 "ecb-cipher_null-caam",
2238 .cra_blocksize = NULL_BLOCK_SIZE,
2240 .setkey = aead_setkey,
2241 .setauthsize = aead_setauthsize,
2242 .encrypt = aead_encrypt,
2243 .decrypt = aead_decrypt,
2244 .ivsize = NULL_IV_SIZE,
2245 .maxauthsize = SHA512_DIGEST_SIZE,
2248 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2249 OP_ALG_AAI_HMAC_PRECOMP,
2255 .cra_name = "authenc(hmac(md5),cbc(aes))",
2256 .cra_driver_name = "authenc-hmac-md5-"
2258 .cra_blocksize = AES_BLOCK_SIZE,
2260 .setkey = aead_setkey,
2261 .setauthsize = aead_setauthsize,
2262 .encrypt = aead_encrypt,
2263 .decrypt = aead_decrypt,
2264 .ivsize = AES_BLOCK_SIZE,
2265 .maxauthsize = MD5_DIGEST_SIZE,
2268 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2269 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2270 OP_ALG_AAI_HMAC_PRECOMP,
2276 .cra_name = "echainiv(authenc(hmac(md5),"
2278 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2280 .cra_blocksize = AES_BLOCK_SIZE,
2282 .setkey = aead_setkey,
2283 .setauthsize = aead_setauthsize,
2284 .encrypt = aead_encrypt,
2285 .decrypt = aead_decrypt,
2286 .ivsize = AES_BLOCK_SIZE,
2287 .maxauthsize = MD5_DIGEST_SIZE,
2290 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2291 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2292 OP_ALG_AAI_HMAC_PRECOMP,
2299 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2300 .cra_driver_name = "authenc-hmac-sha1-"
2302 .cra_blocksize = AES_BLOCK_SIZE,
2304 .setkey = aead_setkey,
2305 .setauthsize = aead_setauthsize,
2306 .encrypt = aead_encrypt,
2307 .decrypt = aead_decrypt,
2308 .ivsize = AES_BLOCK_SIZE,
2309 .maxauthsize = SHA1_DIGEST_SIZE,
2312 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2313 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2314 OP_ALG_AAI_HMAC_PRECOMP,
2320 .cra_name = "echainiv(authenc(hmac(sha1),"
2322 .cra_driver_name = "echainiv-authenc-"
2323 "hmac-sha1-cbc-aes-caam",
2324 .cra_blocksize = AES_BLOCK_SIZE,
2326 .setkey = aead_setkey,
2327 .setauthsize = aead_setauthsize,
2328 .encrypt = aead_encrypt,
2329 .decrypt = aead_decrypt,
2330 .ivsize = AES_BLOCK_SIZE,
2331 .maxauthsize = SHA1_DIGEST_SIZE,
2334 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2335 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2336 OP_ALG_AAI_HMAC_PRECOMP,
2343 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2344 .cra_driver_name = "authenc-hmac-sha224-"
2346 .cra_blocksize = AES_BLOCK_SIZE,
2348 .setkey = aead_setkey,
2349 .setauthsize = aead_setauthsize,
2350 .encrypt = aead_encrypt,
2351 .decrypt = aead_decrypt,
2352 .ivsize = AES_BLOCK_SIZE,
2353 .maxauthsize = SHA224_DIGEST_SIZE,
2356 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2357 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2358 OP_ALG_AAI_HMAC_PRECOMP,
2364 .cra_name = "echainiv(authenc(hmac(sha224),"
2366 .cra_driver_name = "echainiv-authenc-"
2367 "hmac-sha224-cbc-aes-caam",
2368 .cra_blocksize = AES_BLOCK_SIZE,
2370 .setkey = aead_setkey,
2371 .setauthsize = aead_setauthsize,
2372 .encrypt = aead_encrypt,
2373 .decrypt = aead_decrypt,
2374 .ivsize = AES_BLOCK_SIZE,
2375 .maxauthsize = SHA224_DIGEST_SIZE,
2378 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2379 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2380 OP_ALG_AAI_HMAC_PRECOMP,
2387 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2388 .cra_driver_name = "authenc-hmac-sha256-"
2390 .cra_blocksize = AES_BLOCK_SIZE,
2392 .setkey = aead_setkey,
2393 .setauthsize = aead_setauthsize,
2394 .encrypt = aead_encrypt,
2395 .decrypt = aead_decrypt,
2396 .ivsize = AES_BLOCK_SIZE,
2397 .maxauthsize = SHA256_DIGEST_SIZE,
2400 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2401 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2402 OP_ALG_AAI_HMAC_PRECOMP,
2408 .cra_name = "echainiv(authenc(hmac(sha256),"
2410 .cra_driver_name = "echainiv-authenc-"
2411 "hmac-sha256-cbc-aes-caam",
2412 .cra_blocksize = AES_BLOCK_SIZE,
2414 .setkey = aead_setkey,
2415 .setauthsize = aead_setauthsize,
2416 .encrypt = aead_encrypt,
2417 .decrypt = aead_decrypt,
2418 .ivsize = AES_BLOCK_SIZE,
2419 .maxauthsize = SHA256_DIGEST_SIZE,
2422 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2423 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2424 OP_ALG_AAI_HMAC_PRECOMP,
2431 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2432 .cra_driver_name = "authenc-hmac-sha384-"
2434 .cra_blocksize = AES_BLOCK_SIZE,
2436 .setkey = aead_setkey,
2437 .setauthsize = aead_setauthsize,
2438 .encrypt = aead_encrypt,
2439 .decrypt = aead_decrypt,
2440 .ivsize = AES_BLOCK_SIZE,
2441 .maxauthsize = SHA384_DIGEST_SIZE,
2444 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2445 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2446 OP_ALG_AAI_HMAC_PRECOMP,
2452 .cra_name = "echainiv(authenc(hmac(sha384),"
2454 .cra_driver_name = "echainiv-authenc-"
2455 "hmac-sha384-cbc-aes-caam",
2456 .cra_blocksize = AES_BLOCK_SIZE,
2458 .setkey = aead_setkey,
2459 .setauthsize = aead_setauthsize,
2460 .encrypt = aead_encrypt,
2461 .decrypt = aead_decrypt,
2462 .ivsize = AES_BLOCK_SIZE,
2463 .maxauthsize = SHA384_DIGEST_SIZE,
2466 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2467 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2468 OP_ALG_AAI_HMAC_PRECOMP,
2475 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2476 .cra_driver_name = "authenc-hmac-sha512-"
2478 .cra_blocksize = AES_BLOCK_SIZE,
2480 .setkey = aead_setkey,
2481 .setauthsize = aead_setauthsize,
2482 .encrypt = aead_encrypt,
2483 .decrypt = aead_decrypt,
2484 .ivsize = AES_BLOCK_SIZE,
2485 .maxauthsize = SHA512_DIGEST_SIZE,
2488 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2489 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2490 OP_ALG_AAI_HMAC_PRECOMP,
2496 .cra_name = "echainiv(authenc(hmac(sha512),"
2498 .cra_driver_name = "echainiv-authenc-"
2499 "hmac-sha512-cbc-aes-caam",
2500 .cra_blocksize = AES_BLOCK_SIZE,
2502 .setkey = aead_setkey,
2503 .setauthsize = aead_setauthsize,
2504 .encrypt = aead_encrypt,
2505 .decrypt = aead_decrypt,
2506 .ivsize = AES_BLOCK_SIZE,
2507 .maxauthsize = SHA512_DIGEST_SIZE,
2510 .class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2511 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2512 OP_ALG_AAI_HMAC_PRECOMP,
2519 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2520 .cra_driver_name = "authenc-hmac-md5-"
2521 "cbc-des3_ede-caam",
2522 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2524 .setkey = aead_setkey,
2525 .setauthsize = aead_setauthsize,
2526 .encrypt = aead_encrypt,
2527 .decrypt = aead_decrypt,
2528 .ivsize = DES3_EDE_BLOCK_SIZE,
2529 .maxauthsize = MD5_DIGEST_SIZE,
2532 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2533 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2534 OP_ALG_AAI_HMAC_PRECOMP,
2540 .cra_name = "echainiv(authenc(hmac(md5),"
2542 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2543 "cbc-des3_ede-caam",
2544 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2546 .setkey = aead_setkey,
2547 .setauthsize = aead_setauthsize,
2548 .encrypt = aead_encrypt,
2549 .decrypt = aead_decrypt,
2550 .ivsize = DES3_EDE_BLOCK_SIZE,
2551 .maxauthsize = MD5_DIGEST_SIZE,
2554 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2555 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2556 OP_ALG_AAI_HMAC_PRECOMP,
2563 .cra_name = "authenc(hmac(sha1),"
2565 .cra_driver_name = "authenc-hmac-sha1-"
2566 "cbc-des3_ede-caam",
2567 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2569 .setkey = aead_setkey,
2570 .setauthsize = aead_setauthsize,
2571 .encrypt = aead_encrypt,
2572 .decrypt = aead_decrypt,
2573 .ivsize = DES3_EDE_BLOCK_SIZE,
2574 .maxauthsize = SHA1_DIGEST_SIZE,
2577 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2578 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2579 OP_ALG_AAI_HMAC_PRECOMP,
2585 .cra_name = "echainiv(authenc(hmac(sha1),"
2587 .cra_driver_name = "echainiv-authenc-"
2589 "cbc-des3_ede-caam",
2590 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2592 .setkey = aead_setkey,
2593 .setauthsize = aead_setauthsize,
2594 .encrypt = aead_encrypt,
2595 .decrypt = aead_decrypt,
2596 .ivsize = DES3_EDE_BLOCK_SIZE,
2597 .maxauthsize = SHA1_DIGEST_SIZE,
2600 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2601 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2602 OP_ALG_AAI_HMAC_PRECOMP,
2609 .cra_name = "authenc(hmac(sha224),"
2611 .cra_driver_name = "authenc-hmac-sha224-"
2612 "cbc-des3_ede-caam",
2613 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2615 .setkey = aead_setkey,
2616 .setauthsize = aead_setauthsize,
2617 .encrypt = aead_encrypt,
2618 .decrypt = aead_decrypt,
2619 .ivsize = DES3_EDE_BLOCK_SIZE,
2620 .maxauthsize = SHA224_DIGEST_SIZE,
2623 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2624 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2625 OP_ALG_AAI_HMAC_PRECOMP,
2631 .cra_name = "echainiv(authenc(hmac(sha224),"
2633 .cra_driver_name = "echainiv-authenc-"
2635 "cbc-des3_ede-caam",
2636 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2638 .setkey = aead_setkey,
2639 .setauthsize = aead_setauthsize,
2640 .encrypt = aead_encrypt,
2641 .decrypt = aead_decrypt,
2642 .ivsize = DES3_EDE_BLOCK_SIZE,
2643 .maxauthsize = SHA224_DIGEST_SIZE,
2646 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2647 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2648 OP_ALG_AAI_HMAC_PRECOMP,
2655 .cra_name = "authenc(hmac(sha256),"
2657 .cra_driver_name = "authenc-hmac-sha256-"
2658 "cbc-des3_ede-caam",
2659 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2661 .setkey = aead_setkey,
2662 .setauthsize = aead_setauthsize,
2663 .encrypt = aead_encrypt,
2664 .decrypt = aead_decrypt,
2665 .ivsize = DES3_EDE_BLOCK_SIZE,
2666 .maxauthsize = SHA256_DIGEST_SIZE,
2669 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2670 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2671 OP_ALG_AAI_HMAC_PRECOMP,
2677 .cra_name = "echainiv(authenc(hmac(sha256),"
2679 .cra_driver_name = "echainiv-authenc-"
2681 "cbc-des3_ede-caam",
2682 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2684 .setkey = aead_setkey,
2685 .setauthsize = aead_setauthsize,
2686 .encrypt = aead_encrypt,
2687 .decrypt = aead_decrypt,
2688 .ivsize = DES3_EDE_BLOCK_SIZE,
2689 .maxauthsize = SHA256_DIGEST_SIZE,
2692 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2693 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2694 OP_ALG_AAI_HMAC_PRECOMP,
2701 .cra_name = "authenc(hmac(sha384),"
2703 .cra_driver_name = "authenc-hmac-sha384-"
2704 "cbc-des3_ede-caam",
2705 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2707 .setkey = aead_setkey,
2708 .setauthsize = aead_setauthsize,
2709 .encrypt = aead_encrypt,
2710 .decrypt = aead_decrypt,
2711 .ivsize = DES3_EDE_BLOCK_SIZE,
2712 .maxauthsize = SHA384_DIGEST_SIZE,
2715 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2716 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2717 OP_ALG_AAI_HMAC_PRECOMP,
2723 .cra_name = "echainiv(authenc(hmac(sha384),"
2725 .cra_driver_name = "echainiv-authenc-"
2727 "cbc-des3_ede-caam",
2728 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2730 .setkey = aead_setkey,
2731 .setauthsize = aead_setauthsize,
2732 .encrypt = aead_encrypt,
2733 .decrypt = aead_decrypt,
2734 .ivsize = DES3_EDE_BLOCK_SIZE,
2735 .maxauthsize = SHA384_DIGEST_SIZE,
2738 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2739 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2740 OP_ALG_AAI_HMAC_PRECOMP,
2747 .cra_name = "authenc(hmac(sha512),"
2749 .cra_driver_name = "authenc-hmac-sha512-"
2750 "cbc-des3_ede-caam",
2751 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2753 .setkey = aead_setkey,
2754 .setauthsize = aead_setauthsize,
2755 .encrypt = aead_encrypt,
2756 .decrypt = aead_decrypt,
2757 .ivsize = DES3_EDE_BLOCK_SIZE,
2758 .maxauthsize = SHA512_DIGEST_SIZE,
2761 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2762 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2763 OP_ALG_AAI_HMAC_PRECOMP,
2769 .cra_name = "echainiv(authenc(hmac(sha512),"
2771 .cra_driver_name = "echainiv-authenc-"
2773 "cbc-des3_ede-caam",
2774 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2776 .setkey = aead_setkey,
2777 .setauthsize = aead_setauthsize,
2778 .encrypt = aead_encrypt,
2779 .decrypt = aead_decrypt,
2780 .ivsize = DES3_EDE_BLOCK_SIZE,
2781 .maxauthsize = SHA512_DIGEST_SIZE,
2784 .class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2785 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2786 OP_ALG_AAI_HMAC_PRECOMP,
2793 .cra_name = "authenc(hmac(md5),cbc(des))",
2794 .cra_driver_name = "authenc-hmac-md5-"
2796 .cra_blocksize = DES_BLOCK_SIZE,
2798 .setkey = aead_setkey,
2799 .setauthsize = aead_setauthsize,
2800 .encrypt = aead_encrypt,
2801 .decrypt = aead_decrypt,
2802 .ivsize = DES_BLOCK_SIZE,
2803 .maxauthsize = MD5_DIGEST_SIZE,
2806 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2807 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2808 OP_ALG_AAI_HMAC_PRECOMP,
2814 .cra_name = "echainiv(authenc(hmac(md5),"
2816 .cra_driver_name = "echainiv-authenc-hmac-md5-"
2818 .cra_blocksize = DES_BLOCK_SIZE,
2820 .setkey = aead_setkey,
2821 .setauthsize = aead_setauthsize,
2822 .encrypt = aead_encrypt,
2823 .decrypt = aead_decrypt,
2824 .ivsize = DES_BLOCK_SIZE,
2825 .maxauthsize = MD5_DIGEST_SIZE,
2828 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2829 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
2830 OP_ALG_AAI_HMAC_PRECOMP,
2837 .cra_name = "authenc(hmac(sha1),cbc(des))",
2838 .cra_driver_name = "authenc-hmac-sha1-"
2840 .cra_blocksize = DES_BLOCK_SIZE,
2842 .setkey = aead_setkey,
2843 .setauthsize = aead_setauthsize,
2844 .encrypt = aead_encrypt,
2845 .decrypt = aead_decrypt,
2846 .ivsize = DES_BLOCK_SIZE,
2847 .maxauthsize = SHA1_DIGEST_SIZE,
2850 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2851 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2852 OP_ALG_AAI_HMAC_PRECOMP,
2858 .cra_name = "echainiv(authenc(hmac(sha1),"
2860 .cra_driver_name = "echainiv-authenc-"
2861 "hmac-sha1-cbc-des-caam",
2862 .cra_blocksize = DES_BLOCK_SIZE,
2864 .setkey = aead_setkey,
2865 .setauthsize = aead_setauthsize,
2866 .encrypt = aead_encrypt,
2867 .decrypt = aead_decrypt,
2868 .ivsize = DES_BLOCK_SIZE,
2869 .maxauthsize = SHA1_DIGEST_SIZE,
2872 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2873 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2874 OP_ALG_AAI_HMAC_PRECOMP,
2881 .cra_name = "authenc(hmac(sha224),cbc(des))",
2882 .cra_driver_name = "authenc-hmac-sha224-"
2884 .cra_blocksize = DES_BLOCK_SIZE,
2886 .setkey = aead_setkey,
2887 .setauthsize = aead_setauthsize,
2888 .encrypt = aead_encrypt,
2889 .decrypt = aead_decrypt,
2890 .ivsize = DES_BLOCK_SIZE,
2891 .maxauthsize = SHA224_DIGEST_SIZE,
2894 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2895 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2896 OP_ALG_AAI_HMAC_PRECOMP,
2902 .cra_name = "echainiv(authenc(hmac(sha224),"
2904 .cra_driver_name = "echainiv-authenc-"
2905 "hmac-sha224-cbc-des-caam",
2906 .cra_blocksize = DES_BLOCK_SIZE,
2908 .setkey = aead_setkey,
2909 .setauthsize = aead_setauthsize,
2910 .encrypt = aead_encrypt,
2911 .decrypt = aead_decrypt,
2912 .ivsize = DES_BLOCK_SIZE,
2913 .maxauthsize = SHA224_DIGEST_SIZE,
2916 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2917 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2918 OP_ALG_AAI_HMAC_PRECOMP,
2925 .cra_name = "authenc(hmac(sha256),cbc(des))",
2926 .cra_driver_name = "authenc-hmac-sha256-"
2928 .cra_blocksize = DES_BLOCK_SIZE,
2930 .setkey = aead_setkey,
2931 .setauthsize = aead_setauthsize,
2932 .encrypt = aead_encrypt,
2933 .decrypt = aead_decrypt,
2934 .ivsize = DES_BLOCK_SIZE,
2935 .maxauthsize = SHA256_DIGEST_SIZE,
2938 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2939 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2940 OP_ALG_AAI_HMAC_PRECOMP,
2946 .cra_name = "echainiv(authenc(hmac(sha256),"
2948 .cra_driver_name = "echainiv-authenc-"
2949 "hmac-sha256-cbc-des-caam",
2950 .cra_blocksize = DES_BLOCK_SIZE,
2952 .setkey = aead_setkey,
2953 .setauthsize = aead_setauthsize,
2954 .encrypt = aead_encrypt,
2955 .decrypt = aead_decrypt,
2956 .ivsize = DES_BLOCK_SIZE,
2957 .maxauthsize = SHA256_DIGEST_SIZE,
2960 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2961 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2962 OP_ALG_AAI_HMAC_PRECOMP,
2969 .cra_name = "authenc(hmac(sha384),cbc(des))",
2970 .cra_driver_name = "authenc-hmac-sha384-"
2972 .cra_blocksize = DES_BLOCK_SIZE,
2974 .setkey = aead_setkey,
2975 .setauthsize = aead_setauthsize,
2976 .encrypt = aead_encrypt,
2977 .decrypt = aead_decrypt,
2978 .ivsize = DES_BLOCK_SIZE,
2979 .maxauthsize = SHA384_DIGEST_SIZE,
2982 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2983 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2984 OP_ALG_AAI_HMAC_PRECOMP,
2990 .cra_name = "echainiv(authenc(hmac(sha384),"
2992 .cra_driver_name = "echainiv-authenc-"
2993 "hmac-sha384-cbc-des-caam",
2994 .cra_blocksize = DES_BLOCK_SIZE,
2996 .setkey = aead_setkey,
2997 .setauthsize = aead_setauthsize,
2998 .encrypt = aead_encrypt,
2999 .decrypt = aead_decrypt,
3000 .ivsize = DES_BLOCK_SIZE,
3001 .maxauthsize = SHA384_DIGEST_SIZE,
3004 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3005 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3006 OP_ALG_AAI_HMAC_PRECOMP,
3013 .cra_name = "authenc(hmac(sha512),cbc(des))",
3014 .cra_driver_name = "authenc-hmac-sha512-"
3016 .cra_blocksize = DES_BLOCK_SIZE,
3018 .setkey = aead_setkey,
3019 .setauthsize = aead_setauthsize,
3020 .encrypt = aead_encrypt,
3021 .decrypt = aead_decrypt,
3022 .ivsize = DES_BLOCK_SIZE,
3023 .maxauthsize = SHA512_DIGEST_SIZE,
3026 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3027 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3028 OP_ALG_AAI_HMAC_PRECOMP,
3034 .cra_name = "echainiv(authenc(hmac(sha512),"
3036 .cra_driver_name = "echainiv-authenc-"
3037 "hmac-sha512-cbc-des-caam",
3038 .cra_blocksize = DES_BLOCK_SIZE,
3040 .setkey = aead_setkey,
3041 .setauthsize = aead_setauthsize,
3042 .encrypt = aead_encrypt,
3043 .decrypt = aead_decrypt,
3044 .ivsize = DES_BLOCK_SIZE,
3045 .maxauthsize = SHA512_DIGEST_SIZE,
3048 .class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3049 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3050 OP_ALG_AAI_HMAC_PRECOMP,
3057 .cra_name = "authenc(hmac(md5),"
3058 "rfc3686(ctr(aes)))",
3059 .cra_driver_name = "authenc-hmac-md5-"
3060 "rfc3686-ctr-aes-caam",
3063 .setkey = aead_setkey,
3064 .setauthsize = aead_setauthsize,
3065 .encrypt = aead_encrypt,
3066 .decrypt = aead_decrypt,
3067 .ivsize = CTR_RFC3686_IV_SIZE,
3068 .maxauthsize = MD5_DIGEST_SIZE,
3071 .class1_alg_type = OP_ALG_ALGSEL_AES |
3072 OP_ALG_AAI_CTR_MOD128,
3073 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3074 OP_ALG_AAI_HMAC_PRECOMP,
3081 .cra_name = "seqiv(authenc("
3082 "hmac(md5),rfc3686(ctr(aes))))",
3083 .cra_driver_name = "seqiv-authenc-hmac-md5-"
3084 "rfc3686-ctr-aes-caam",
3087 .setkey = aead_setkey,
3088 .setauthsize = aead_setauthsize,
3089 .encrypt = aead_encrypt,
3090 .decrypt = aead_decrypt,
3091 .ivsize = CTR_RFC3686_IV_SIZE,
3092 .maxauthsize = MD5_DIGEST_SIZE,
3095 .class1_alg_type = OP_ALG_ALGSEL_AES |
3096 OP_ALG_AAI_CTR_MOD128,
3097 .class2_alg_type = OP_ALG_ALGSEL_MD5 |
3098 OP_ALG_AAI_HMAC_PRECOMP,
3106 .cra_name = "authenc(hmac(sha1),"
3107 "rfc3686(ctr(aes)))",
3108 .cra_driver_name = "authenc-hmac-sha1-"
3109 "rfc3686-ctr-aes-caam",
3112 .setkey = aead_setkey,
3113 .setauthsize = aead_setauthsize,
3114 .encrypt = aead_encrypt,
3115 .decrypt = aead_decrypt,
3116 .ivsize = CTR_RFC3686_IV_SIZE,
3117 .maxauthsize = SHA1_DIGEST_SIZE,
3120 .class1_alg_type = OP_ALG_ALGSEL_AES |
3121 OP_ALG_AAI_CTR_MOD128,
3122 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3123 OP_ALG_AAI_HMAC_PRECOMP,
3130 .cra_name = "seqiv(authenc("
3131 "hmac(sha1),rfc3686(ctr(aes))))",
3132 .cra_driver_name = "seqiv-authenc-hmac-sha1-"
3133 "rfc3686-ctr-aes-caam",
3136 .setkey = aead_setkey,
3137 .setauthsize = aead_setauthsize,
3138 .encrypt = aead_encrypt,
3139 .decrypt = aead_decrypt,
3140 .ivsize = CTR_RFC3686_IV_SIZE,
3141 .maxauthsize = SHA1_DIGEST_SIZE,
3144 .class1_alg_type = OP_ALG_ALGSEL_AES |
3145 OP_ALG_AAI_CTR_MOD128,
3146 .class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3147 OP_ALG_AAI_HMAC_PRECOMP,
3155 .cra_name = "authenc(hmac(sha224),"
3156 "rfc3686(ctr(aes)))",
3157 .cra_driver_name = "authenc-hmac-sha224-"
3158 "rfc3686-ctr-aes-caam",
3161 .setkey = aead_setkey,
3162 .setauthsize = aead_setauthsize,
3163 .encrypt = aead_encrypt,
3164 .decrypt = aead_decrypt,
3165 .ivsize = CTR_RFC3686_IV_SIZE,
3166 .maxauthsize = SHA224_DIGEST_SIZE,
3169 .class1_alg_type = OP_ALG_ALGSEL_AES |
3170 OP_ALG_AAI_CTR_MOD128,
3171 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3172 OP_ALG_AAI_HMAC_PRECOMP,
3179 .cra_name = "seqiv(authenc("
3180 "hmac(sha224),rfc3686(ctr(aes))))",
3181 .cra_driver_name = "seqiv-authenc-hmac-sha224-"
3182 "rfc3686-ctr-aes-caam",
3185 .setkey = aead_setkey,
3186 .setauthsize = aead_setauthsize,
3187 .encrypt = aead_encrypt,
3188 .decrypt = aead_decrypt,
3189 .ivsize = CTR_RFC3686_IV_SIZE,
3190 .maxauthsize = SHA224_DIGEST_SIZE,
3193 .class1_alg_type = OP_ALG_ALGSEL_AES |
3194 OP_ALG_AAI_CTR_MOD128,
3195 .class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3196 OP_ALG_AAI_HMAC_PRECOMP,
3204 .cra_name = "authenc(hmac(sha256),"
3205 "rfc3686(ctr(aes)))",
3206 .cra_driver_name = "authenc-hmac-sha256-"
3207 "rfc3686-ctr-aes-caam",
3210 .setkey = aead_setkey,
3211 .setauthsize = aead_setauthsize,
3212 .encrypt = aead_encrypt,
3213 .decrypt = aead_decrypt,
3214 .ivsize = CTR_RFC3686_IV_SIZE,
3215 .maxauthsize = SHA256_DIGEST_SIZE,
3218 .class1_alg_type = OP_ALG_ALGSEL_AES |
3219 OP_ALG_AAI_CTR_MOD128,
3220 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3221 OP_ALG_AAI_HMAC_PRECOMP,
3228 .cra_name = "seqiv(authenc(hmac(sha256),"
3229 "rfc3686(ctr(aes))))",
3230 .cra_driver_name = "seqiv-authenc-hmac-sha256-"
3231 "rfc3686-ctr-aes-caam",
3234 .setkey = aead_setkey,
3235 .setauthsize = aead_setauthsize,
3236 .encrypt = aead_encrypt,
3237 .decrypt = aead_decrypt,
3238 .ivsize = CTR_RFC3686_IV_SIZE,
3239 .maxauthsize = SHA256_DIGEST_SIZE,
3242 .class1_alg_type = OP_ALG_ALGSEL_AES |
3243 OP_ALG_AAI_CTR_MOD128,
3244 .class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3245 OP_ALG_AAI_HMAC_PRECOMP,
3253 .cra_name = "authenc(hmac(sha384),"
3254 "rfc3686(ctr(aes)))",
3255 .cra_driver_name = "authenc-hmac-sha384-"
3256 "rfc3686-ctr-aes-caam",
3259 .setkey = aead_setkey,
3260 .setauthsize = aead_setauthsize,
3261 .encrypt = aead_encrypt,
3262 .decrypt = aead_decrypt,
3263 .ivsize = CTR_RFC3686_IV_SIZE,
3264 .maxauthsize = SHA384_DIGEST_SIZE,
3267 .class1_alg_type = OP_ALG_ALGSEL_AES |
3268 OP_ALG_AAI_CTR_MOD128,
3269 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3270 OP_ALG_AAI_HMAC_PRECOMP,
3277 .cra_name = "seqiv(authenc(hmac(sha384),"
3278 "rfc3686(ctr(aes))))",
3279 .cra_driver_name = "seqiv-authenc-hmac-sha384-"
3280 "rfc3686-ctr-aes-caam",
3283 .setkey = aead_setkey,
3284 .setauthsize = aead_setauthsize,
3285 .encrypt = aead_encrypt,
3286 .decrypt = aead_decrypt,
3287 .ivsize = CTR_RFC3686_IV_SIZE,
3288 .maxauthsize = SHA384_DIGEST_SIZE,
3291 .class1_alg_type = OP_ALG_ALGSEL_AES |
3292 OP_ALG_AAI_CTR_MOD128,
3293 .class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3294 OP_ALG_AAI_HMAC_PRECOMP,
3302 .cra_name = "authenc(hmac(sha512),"
3303 "rfc3686(ctr(aes)))",
3304 .cra_driver_name = "authenc-hmac-sha512-"
3305 "rfc3686-ctr-aes-caam",
3308 .setkey = aead_setkey,
3309 .setauthsize = aead_setauthsize,
3310 .encrypt = aead_encrypt,
3311 .decrypt = aead_decrypt,
3312 .ivsize = CTR_RFC3686_IV_SIZE,
3313 .maxauthsize = SHA512_DIGEST_SIZE,
3316 .class1_alg_type = OP_ALG_ALGSEL_AES |
3317 OP_ALG_AAI_CTR_MOD128,
3318 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3319 OP_ALG_AAI_HMAC_PRECOMP,
3326 .cra_name = "seqiv(authenc(hmac(sha512),"
3327 "rfc3686(ctr(aes))))",
3328 .cra_driver_name = "seqiv-authenc-hmac-sha512-"
3329 "rfc3686-ctr-aes-caam",
3332 .setkey = aead_setkey,
3333 .setauthsize = aead_setauthsize,
3334 .encrypt = aead_encrypt,
3335 .decrypt = aead_decrypt,
3336 .ivsize = CTR_RFC3686_IV_SIZE,
3337 .maxauthsize = SHA512_DIGEST_SIZE,
3340 .class1_alg_type = OP_ALG_ALGSEL_AES |
3341 OP_ALG_AAI_CTR_MOD128,
3342 .class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3343 OP_ALG_AAI_HMAC_PRECOMP,
3350 struct caam_crypto_alg {
3351 struct crypto_alg crypto_alg;
3352 struct list_head entry;
3353 struct caam_alg_entry caam;
3356 static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam)
3358 ctx->jrdev = caam_jr_alloc();
3359 if (IS_ERR(ctx->jrdev)) {
3360 pr_err("Job Ring Device allocation for transform failed\n");
3361 return PTR_ERR(ctx->jrdev);
3364 /* copy descriptor header template value */
3365 ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3366 ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3371 static int caam_cra_init(struct crypto_tfm *tfm)
3373 struct crypto_alg *alg = tfm->__crt_alg;
3374 struct caam_crypto_alg *caam_alg =
3375 container_of(alg, struct caam_crypto_alg, crypto_alg);
3376 struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
3378 return caam_init_common(ctx, &caam_alg->caam);
3381 static int caam_aead_init(struct crypto_aead *tfm)
3383 struct aead_alg *alg = crypto_aead_alg(tfm);
3384 struct caam_aead_alg *caam_alg =
3385 container_of(alg, struct caam_aead_alg, aead);
3386 struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3388 return caam_init_common(ctx, &caam_alg->caam);
3391 static void caam_exit_common(struct caam_ctx *ctx)
3393 if (ctx->sh_desc_enc_dma &&
3394 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_enc_dma))
3395 dma_unmap_single(ctx->jrdev, ctx->sh_desc_enc_dma,
3396 desc_bytes(ctx->sh_desc_enc), DMA_TO_DEVICE);
3397 if (ctx->sh_desc_dec_dma &&
3398 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_dec_dma))
3399 dma_unmap_single(ctx->jrdev, ctx->sh_desc_dec_dma,
3400 desc_bytes(ctx->sh_desc_dec), DMA_TO_DEVICE);
3401 if (ctx->sh_desc_givenc_dma &&
3402 !dma_mapping_error(ctx->jrdev, ctx->sh_desc_givenc_dma))
3403 dma_unmap_single(ctx->jrdev, ctx->sh_desc_givenc_dma,
3404 desc_bytes(ctx->sh_desc_givenc),
3407 !dma_mapping_error(ctx->jrdev, ctx->key_dma))
3408 dma_unmap_single(ctx->jrdev, ctx->key_dma,
3409 ctx->cdata.keylen + ctx->adata.keylen_pad,
3412 caam_jr_free(ctx->jrdev);
3415 static void caam_cra_exit(struct crypto_tfm *tfm)
3417 caam_exit_common(crypto_tfm_ctx(tfm));
3420 static void caam_aead_exit(struct crypto_aead *tfm)
3422 caam_exit_common(crypto_aead_ctx(tfm));
3425 static void __exit caam_algapi_exit(void)
3428 struct caam_crypto_alg *t_alg, *n;
3431 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3432 struct caam_aead_alg *t_alg = driver_aeads + i;
3434 if (t_alg->registered)
3435 crypto_unregister_aead(&t_alg->aead);
3441 list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
3442 crypto_unregister_alg(&t_alg->crypto_alg);
3443 list_del(&t_alg->entry);
3448 static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
3451 struct caam_crypto_alg *t_alg;
3452 struct crypto_alg *alg;
3454 t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
3456 pr_err("failed to allocate t_alg\n");
3457 return ERR_PTR(-ENOMEM);
3460 alg = &t_alg->crypto_alg;
3462 snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
3463 snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
3464 template->driver_name);
3465 alg->cra_module = THIS_MODULE;
3466 alg->cra_init = caam_cra_init;
3467 alg->cra_exit = caam_cra_exit;
3468 alg->cra_priority = CAAM_CRA_PRIORITY;
3469 alg->cra_blocksize = template->blocksize;
3470 alg->cra_alignmask = 0;
3471 alg->cra_ctxsize = sizeof(struct caam_ctx);
3472 alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
3474 switch (template->type) {
3475 case CRYPTO_ALG_TYPE_GIVCIPHER:
3476 alg->cra_type = &crypto_givcipher_type;
3477 alg->cra_ablkcipher = template->template_ablkcipher;
3479 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3480 alg->cra_type = &crypto_ablkcipher_type;
3481 alg->cra_ablkcipher = template->template_ablkcipher;
3485 t_alg->caam.class1_alg_type = template->class1_alg_type;
3486 t_alg->caam.class2_alg_type = template->class2_alg_type;
3491 static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3493 struct aead_alg *alg = &t_alg->aead;
3495 alg->base.cra_module = THIS_MODULE;
3496 alg->base.cra_priority = CAAM_CRA_PRIORITY;
3497 alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3498 alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
3500 alg->init = caam_aead_init;
3501 alg->exit = caam_aead_exit;
3504 static int __init caam_algapi_init(void)
3506 struct device_node *dev_node;
3507 struct platform_device *pdev;
3508 struct device *ctrldev;
3509 struct caam_drv_private *priv;
3511 u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3512 unsigned int md_limit = SHA512_DIGEST_SIZE;
3513 bool registered = false;
3515 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3517 dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3522 pdev = of_find_device_by_node(dev_node);
3524 of_node_put(dev_node);
3528 ctrldev = &pdev->dev;
3529 priv = dev_get_drvdata(ctrldev);
3530 of_node_put(dev_node);
3533 * If priv is NULL, it's probably because the caam driver wasn't
3534 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3540 INIT_LIST_HEAD(&alg_list);
3543 * Register crypto algorithms the device supports.
3544 * First, detect presence and attributes of DES, AES, and MD blocks.
3546 cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3547 cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3548 des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3549 aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3550 md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3552 /* If MD is present, limit digest size based on LP256 */
3553 if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3554 md_limit = SHA256_DIGEST_SIZE;
3556 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3557 struct caam_crypto_alg *t_alg;
3558 struct caam_alg_template *alg = driver_algs + i;
3559 u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
3561 /* Skip DES algorithms if not supported by device */
3563 ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3564 (alg_sel == OP_ALG_ALGSEL_DES)))
3567 /* Skip AES algorithms if not supported by device */
3568 if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3572 * Check support for AES modes not available
3575 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3576 if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
3580 t_alg = caam_alg_alloc(alg);
3581 if (IS_ERR(t_alg)) {
3582 err = PTR_ERR(t_alg);
3583 pr_warn("%s alg allocation failed\n", alg->driver_name);
3587 err = crypto_register_alg(&t_alg->crypto_alg);
3589 pr_warn("%s alg registration failed\n",
3590 t_alg->crypto_alg.cra_driver_name);
3595 list_add_tail(&t_alg->entry, &alg_list);
3599 for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3600 struct caam_aead_alg *t_alg = driver_aeads + i;
3601 u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3603 u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3605 u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3607 /* Skip DES algorithms if not supported by device */
3609 ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3610 (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3613 /* Skip AES algorithms if not supported by device */
3614 if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3618 * Check support for AES algorithms not available
3621 if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3622 if (alg_aai == OP_ALG_AAI_GCM)
3626 * Skip algorithms requiring message digests
3627 * if MD or MD size is not supported by device.
3630 (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3633 caam_aead_alg_init(t_alg);
3635 err = crypto_register_aead(&t_alg->aead);
3637 pr_warn("%s alg registration failed\n",
3638 t_alg->aead.base.cra_driver_name);
3642 t_alg->registered = true;
3647 pr_info("caam algorithms registered in /proc/crypto\n");
3652 module_init(caam_algapi_init);
3653 module_exit(caam_algapi_exit);
3655 MODULE_LICENSE("GPL");
3656 MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3657 MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");