2 * Glue Code for assembler optimized version of Blowfish
4 * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
6 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
7 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 * CTR part based on code (crypto/ctr.c) by:
9 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
28 #include <asm/processor.h>
29 #include <crypto/blowfish.h>
30 #include <linux/crypto.h>
31 #include <linux/init.h>
32 #include <linux/module.h>
33 #include <linux/types.h>
34 #include <crypto/algapi.h>
35 #include <asm/crypto/blowfish.h>
37 /* regular block cipher functions */
38 asmlinkage void __blowfish_enc_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src,
40 EXPORT_SYMBOL_GPL(__blowfish_enc_blk);
42 asmlinkage void blowfish_dec_blk(struct bf_ctx *ctx, u8 *dst, const u8 *src);
43 EXPORT_SYMBOL_GPL(blowfish_dec_blk);
45 /* 4-way parallel cipher functions */
46 asmlinkage void __blowfish_enc_blk_4way(struct bf_ctx *ctx, u8 *dst,
47 const u8 *src, bool xor);
48 EXPORT_SYMBOL_GPL(__blowfish_enc_blk_4way);
50 asmlinkage void blowfish_dec_blk_4way(struct bf_ctx *ctx, u8 *dst,
52 EXPORT_SYMBOL_GPL(blowfish_dec_blk_4way);
54 static void blowfish_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
56 blowfish_enc_blk(crypto_tfm_ctx(tfm), dst, src);
59 static void blowfish_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
61 blowfish_dec_blk(crypto_tfm_ctx(tfm), dst, src);
64 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk,
65 void (*fn)(struct bf_ctx *, u8 *, const u8 *),
66 void (*fn_4way)(struct bf_ctx *, u8 *, const u8 *))
68 struct bf_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
69 unsigned int bsize = BF_BLOCK_SIZE;
73 err = blkcipher_walk_virt(desc, walk);
75 while ((nbytes = walk->nbytes)) {
76 u8 *wsrc = walk->src.virt.addr;
77 u8 *wdst = walk->dst.virt.addr;
79 /* Process four block batch */
80 if (nbytes >= bsize * 4) {
82 fn_4way(ctx, wdst, wsrc);
87 } while (nbytes >= bsize * 4);
93 /* Handle leftovers */
100 } while (nbytes >= bsize);
103 err = blkcipher_walk_done(desc, walk, nbytes);
109 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
110 struct scatterlist *src, unsigned int nbytes)
112 struct blkcipher_walk walk;
114 blkcipher_walk_init(&walk, dst, src, nbytes);
115 return ecb_crypt(desc, &walk, blowfish_enc_blk, blowfish_enc_blk_4way);
118 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
119 struct scatterlist *src, unsigned int nbytes)
121 struct blkcipher_walk walk;
123 blkcipher_walk_init(&walk, dst, src, nbytes);
124 return ecb_crypt(desc, &walk, blowfish_dec_blk, blowfish_dec_blk_4way);
127 static unsigned int __cbc_encrypt(struct blkcipher_desc *desc,
128 struct blkcipher_walk *walk)
130 struct bf_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
131 unsigned int bsize = BF_BLOCK_SIZE;
132 unsigned int nbytes = walk->nbytes;
133 u64 *src = (u64 *)walk->src.virt.addr;
134 u64 *dst = (u64 *)walk->dst.virt.addr;
135 u64 *iv = (u64 *)walk->iv;
139 blowfish_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
145 } while (nbytes >= bsize);
147 *(u64 *)walk->iv = *iv;
151 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
152 struct scatterlist *src, unsigned int nbytes)
154 struct blkcipher_walk walk;
157 blkcipher_walk_init(&walk, dst, src, nbytes);
158 err = blkcipher_walk_virt(desc, &walk);
160 while ((nbytes = walk.nbytes)) {
161 nbytes = __cbc_encrypt(desc, &walk);
162 err = blkcipher_walk_done(desc, &walk, nbytes);
168 static unsigned int __cbc_decrypt(struct blkcipher_desc *desc,
169 struct blkcipher_walk *walk)
171 struct bf_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
172 unsigned int bsize = BF_BLOCK_SIZE;
173 unsigned int nbytes = walk->nbytes;
174 u64 *src = (u64 *)walk->src.virt.addr;
175 u64 *dst = (u64 *)walk->dst.virt.addr;
179 /* Start of the last block. */
180 src += nbytes / bsize - 1;
181 dst += nbytes / bsize - 1;
185 /* Process four block batch */
186 if (nbytes >= bsize * 4) {
188 nbytes -= bsize * 4 - bsize;
196 blowfish_dec_blk_4way(ctx, (u8 *)dst, (u8 *)src);
209 } while (nbytes >= bsize * 4);
215 /* Handle leftovers */
217 blowfish_dec_blk(ctx, (u8 *)dst, (u8 *)src);
229 *dst ^= *(u64 *)walk->iv;
230 *(u64 *)walk->iv = last_iv;
235 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
236 struct scatterlist *src, unsigned int nbytes)
238 struct blkcipher_walk walk;
241 blkcipher_walk_init(&walk, dst, src, nbytes);
242 err = blkcipher_walk_virt(desc, &walk);
244 while ((nbytes = walk.nbytes)) {
245 nbytes = __cbc_decrypt(desc, &walk);
246 err = blkcipher_walk_done(desc, &walk, nbytes);
252 static void ctr_crypt_final(struct bf_ctx *ctx, struct blkcipher_walk *walk)
254 u8 *ctrblk = walk->iv;
255 u8 keystream[BF_BLOCK_SIZE];
256 u8 *src = walk->src.virt.addr;
257 u8 *dst = walk->dst.virt.addr;
258 unsigned int nbytes = walk->nbytes;
260 blowfish_enc_blk(ctx, keystream, ctrblk);
261 crypto_xor(keystream, src, nbytes);
262 memcpy(dst, keystream, nbytes);
264 crypto_inc(ctrblk, BF_BLOCK_SIZE);
267 static unsigned int __ctr_crypt(struct blkcipher_desc *desc,
268 struct blkcipher_walk *walk)
270 struct bf_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
271 unsigned int bsize = BF_BLOCK_SIZE;
272 unsigned int nbytes = walk->nbytes;
273 u64 *src = (u64 *)walk->src.virt.addr;
274 u64 *dst = (u64 *)walk->dst.virt.addr;
275 u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv);
278 /* Process four block batch */
279 if (nbytes >= bsize * 4) {
288 /* create ctrblks for parallel encrypt */
289 ctrblocks[0] = cpu_to_be64(ctrblk++);
290 ctrblocks[1] = cpu_to_be64(ctrblk++);
291 ctrblocks[2] = cpu_to_be64(ctrblk++);
292 ctrblocks[3] = cpu_to_be64(ctrblk++);
294 blowfish_enc_blk_xor_4way(ctx, (u8 *)dst,
299 } while ((nbytes -= bsize * 4) >= bsize * 4);
305 /* Handle leftovers */
310 ctrblocks[0] = cpu_to_be64(ctrblk++);
312 blowfish_enc_blk_xor(ctx, (u8 *)dst, (u8 *)ctrblocks);
316 } while ((nbytes -= bsize) >= bsize);
319 *(__be64 *)walk->iv = cpu_to_be64(ctrblk);
323 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
324 struct scatterlist *src, unsigned int nbytes)
326 struct blkcipher_walk walk;
329 blkcipher_walk_init(&walk, dst, src, nbytes);
330 err = blkcipher_walk_virt_block(desc, &walk, BF_BLOCK_SIZE);
332 while ((nbytes = walk.nbytes) >= BF_BLOCK_SIZE) {
333 nbytes = __ctr_crypt(desc, &walk);
334 err = blkcipher_walk_done(desc, &walk, nbytes);
338 ctr_crypt_final(crypto_blkcipher_ctx(desc->tfm), &walk);
339 err = blkcipher_walk_done(desc, &walk, 0);
345 static struct crypto_alg bf_algs[4] = { {
346 .cra_name = "blowfish",
347 .cra_driver_name = "blowfish-asm",
349 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
350 .cra_blocksize = BF_BLOCK_SIZE,
351 .cra_ctxsize = sizeof(struct bf_ctx),
353 .cra_module = THIS_MODULE,
356 .cia_min_keysize = BF_MIN_KEY_SIZE,
357 .cia_max_keysize = BF_MAX_KEY_SIZE,
358 .cia_setkey = blowfish_setkey,
359 .cia_encrypt = blowfish_encrypt,
360 .cia_decrypt = blowfish_decrypt,
364 .cra_name = "ecb(blowfish)",
365 .cra_driver_name = "ecb-blowfish-asm",
367 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
368 .cra_blocksize = BF_BLOCK_SIZE,
369 .cra_ctxsize = sizeof(struct bf_ctx),
371 .cra_type = &crypto_blkcipher_type,
372 .cra_module = THIS_MODULE,
375 .min_keysize = BF_MIN_KEY_SIZE,
376 .max_keysize = BF_MAX_KEY_SIZE,
377 .setkey = blowfish_setkey,
378 .encrypt = ecb_encrypt,
379 .decrypt = ecb_decrypt,
383 .cra_name = "cbc(blowfish)",
384 .cra_driver_name = "cbc-blowfish-asm",
386 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
387 .cra_blocksize = BF_BLOCK_SIZE,
388 .cra_ctxsize = sizeof(struct bf_ctx),
390 .cra_type = &crypto_blkcipher_type,
391 .cra_module = THIS_MODULE,
394 .min_keysize = BF_MIN_KEY_SIZE,
395 .max_keysize = BF_MAX_KEY_SIZE,
396 .ivsize = BF_BLOCK_SIZE,
397 .setkey = blowfish_setkey,
398 .encrypt = cbc_encrypt,
399 .decrypt = cbc_decrypt,
403 .cra_name = "ctr(blowfish)",
404 .cra_driver_name = "ctr-blowfish-asm",
406 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
408 .cra_ctxsize = sizeof(struct bf_ctx),
410 .cra_type = &crypto_blkcipher_type,
411 .cra_module = THIS_MODULE,
414 .min_keysize = BF_MIN_KEY_SIZE,
415 .max_keysize = BF_MAX_KEY_SIZE,
416 .ivsize = BF_BLOCK_SIZE,
417 .setkey = blowfish_setkey,
418 .encrypt = ctr_crypt,
419 .decrypt = ctr_crypt,
424 static bool is_blacklisted_cpu(void)
426 if (boot_cpu_data.x86_vendor != X86_VENDOR_INTEL)
429 if (boot_cpu_data.x86 == 0x0f) {
431 * On Pentium 4, blowfish-x86_64 is slower than generic C
432 * implementation because use of 64bit rotates (which are really
433 * slow on P4). Therefore blacklist P4s.
442 module_param(force, int, 0);
443 MODULE_PARM_DESC(force, "Force module load, ignore CPU blacklist");
445 static int __init init(void)
447 if (!force && is_blacklisted_cpu()) {
449 "blowfish-x86_64: performance on this CPU "
450 "would be suboptimal: disabling "
451 "blowfish-x86_64.\n");
455 return crypto_register_algs(bf_algs, ARRAY_SIZE(bf_algs));
458 static void __exit fini(void)
460 crypto_unregister_algs(bf_algs, ARRAY_SIZE(bf_algs));
466 MODULE_LICENSE("GPL");
467 MODULE_DESCRIPTION("Blowfish Cipher Algorithm, asm optimized");
468 MODULE_ALIAS("blowfish");
469 MODULE_ALIAS("blowfish-asm");