diff options
author | André Fabian Silva Delgado <emulatorman@parabola.nu> | 2015-09-08 01:01:14 -0300 |
---|---|---|
committer | André Fabian Silva Delgado <emulatorman@parabola.nu> | 2015-09-08 01:01:14 -0300 |
commit | e5fd91f1ef340da553f7a79da9540c3db711c937 (patch) | |
tree | b11842027dc6641da63f4bcc524f8678263304a3 /drivers/crypto/vmx/aes_ctr.c | |
parent | 2a9b0348e685a63d97486f6749622b61e9e3292f (diff) |
Linux-libre 4.2-gnu
Diffstat (limited to 'drivers/crypto/vmx/aes_ctr.c')
-rw-r--r-- | drivers/crypto/vmx/aes_ctr.c | 225 |
1 files changed, 117 insertions, 108 deletions
diff --git a/drivers/crypto/vmx/aes_ctr.c b/drivers/crypto/vmx/aes_ctr.c index 96dbee4bf..7adae42a7 100644 --- a/drivers/crypto/vmx/aes_ctr.c +++ b/drivers/crypto/vmx/aes_ctr.c @@ -30,138 +30,147 @@ #include "aesp8-ppc.h" struct p8_aes_ctr_ctx { - struct crypto_blkcipher *fallback; - struct aes_key enc_key; + struct crypto_blkcipher *fallback; + struct aes_key enc_key; }; static int p8_aes_ctr_init(struct crypto_tfm *tfm) { - const char *alg; - struct crypto_blkcipher *fallback; - struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); - - if (!(alg = crypto_tfm_alg_name(tfm))) { - printk(KERN_ERR "Failed to get algorithm name.\n"); - return -ENOENT; - } - - fallback = crypto_alloc_blkcipher(alg, 0 ,CRYPTO_ALG_NEED_FALLBACK); - if (IS_ERR(fallback)) { - printk(KERN_ERR "Failed to allocate transformation for '%s': %ld\n", - alg, PTR_ERR(fallback)); - return PTR_ERR(fallback); - } - printk(KERN_INFO "Using '%s' as fallback implementation.\n", - crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); - - crypto_blkcipher_set_flags(fallback, - crypto_blkcipher_get_flags((struct crypto_blkcipher *) tfm)); - ctx->fallback = fallback; - - return 0; + const char *alg; + struct crypto_blkcipher *fallback; + struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); + + if (!(alg = crypto_tfm_alg_name(tfm))) { + printk(KERN_ERR "Failed to get algorithm name.\n"); + return -ENOENT; + } + + fallback = + crypto_alloc_blkcipher(alg, 0, CRYPTO_ALG_NEED_FALLBACK); + if (IS_ERR(fallback)) { + printk(KERN_ERR + "Failed to allocate transformation for '%s': %ld\n", + alg, PTR_ERR(fallback)); + return PTR_ERR(fallback); + } + printk(KERN_INFO "Using '%s' as fallback implementation.\n", + crypto_tfm_alg_driver_name((struct crypto_tfm *) fallback)); + + crypto_blkcipher_set_flags( + fallback, + crypto_blkcipher_get_flags((struct crypto_blkcipher *)tfm)); + ctx->fallback = fallback; + + return 0; } static void p8_aes_ctr_exit(struct crypto_tfm *tfm) { - struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); + struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); - if (ctx->fallback) { - crypto_free_blkcipher(ctx->fallback); - ctx->fallback = NULL; - } + if (ctx->fallback) { + crypto_free_blkcipher(ctx->fallback); + ctx->fallback = NULL; + } } static int p8_aes_ctr_setkey(struct crypto_tfm *tfm, const u8 *key, - unsigned int keylen) + unsigned int keylen) { - int ret; - struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); + int ret; + struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx(tfm); - pagefault_disable(); - enable_kernel_altivec(); - ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); - pagefault_enable(); + pagefault_disable(); + enable_kernel_altivec(); + ret = aes_p8_set_encrypt_key(key, keylen * 8, &ctx->enc_key); + pagefault_enable(); - ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); - return ret; + ret += crypto_blkcipher_setkey(ctx->fallback, key, keylen); + return ret; } static void p8_aes_ctr_final(struct p8_aes_ctr_ctx *ctx, - struct blkcipher_walk *walk) + struct blkcipher_walk *walk) { - u8 *ctrblk = walk->iv; - u8 keystream[AES_BLOCK_SIZE]; - u8 *src = walk->src.virt.addr; - u8 *dst = walk->dst.virt.addr; - unsigned int nbytes = walk->nbytes; - - pagefault_disable(); - enable_kernel_altivec(); - aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key); - pagefault_enable(); - - crypto_xor(keystream, src, nbytes); - memcpy(dst, keystream, nbytes); - crypto_inc(ctrblk, AES_BLOCK_SIZE); + u8 *ctrblk = walk->iv; + u8 keystream[AES_BLOCK_SIZE]; + u8 *src = walk->src.virt.addr; + u8 *dst = walk->dst.virt.addr; + unsigned int nbytes = walk->nbytes; + + pagefault_disable(); + enable_kernel_altivec(); + aes_p8_encrypt(ctrblk, keystream, &ctx->enc_key); + pagefault_enable(); + + crypto_xor(keystream, src, nbytes); + memcpy(dst, keystream, nbytes); + crypto_inc(ctrblk, AES_BLOCK_SIZE); } static int p8_aes_ctr_crypt(struct blkcipher_desc *desc, - struct scatterlist *dst, struct scatterlist *src, - unsigned int nbytes) + struct scatterlist *dst, + struct scatterlist *src, unsigned int nbytes) { - int ret; - struct blkcipher_walk walk; - struct p8_aes_ctr_ctx *ctx = crypto_tfm_ctx( - crypto_blkcipher_tfm(desc->tfm)); - struct blkcipher_desc fallback_desc = { - .tfm = ctx->fallback, - .info = desc->info, - .flags = desc->flags - }; - - if (in_interrupt()) { - ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, nbytes); - } else { - blkcipher_walk_init(&walk, dst, src, nbytes); - ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); - while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { - pagefault_disable(); - enable_kernel_altivec(); - aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, walk.dst.virt.addr, - (nbytes & AES_BLOCK_MASK)/AES_BLOCK_SIZE, &ctx->enc_key, walk.iv); - pagefault_enable(); - - crypto_inc(walk.iv, AES_BLOCK_SIZE); - nbytes &= AES_BLOCK_SIZE - 1; - ret = blkcipher_walk_done(desc, &walk, nbytes); - } - if (walk.nbytes) { - p8_aes_ctr_final(ctx, &walk); - ret = blkcipher_walk_done(desc, &walk, 0); - } - } - - return ret; + int ret; + struct blkcipher_walk walk; + struct p8_aes_ctr_ctx *ctx = + crypto_tfm_ctx(crypto_blkcipher_tfm(desc->tfm)); + struct blkcipher_desc fallback_desc = { + .tfm = ctx->fallback, + .info = desc->info, + .flags = desc->flags + }; + + if (in_interrupt()) { + ret = crypto_blkcipher_encrypt(&fallback_desc, dst, src, + nbytes); + } else { + blkcipher_walk_init(&walk, dst, src, nbytes); + ret = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE); + while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) { + pagefault_disable(); + enable_kernel_altivec(); + aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr, + walk.dst.virt.addr, + (nbytes & + AES_BLOCK_MASK) / + AES_BLOCK_SIZE, + &ctx->enc_key, + walk.iv); + pagefault_enable(); + + crypto_inc(walk.iv, AES_BLOCK_SIZE); + nbytes &= AES_BLOCK_SIZE - 1; + ret = blkcipher_walk_done(desc, &walk, nbytes); + } + if (walk.nbytes) { + p8_aes_ctr_final(ctx, &walk); + ret = blkcipher_walk_done(desc, &walk, 0); + } + } + + return ret; } struct crypto_alg p8_aes_ctr_alg = { - .cra_name = "ctr(aes)", - .cra_driver_name = "p8_aes_ctr", - .cra_module = THIS_MODULE, - .cra_priority = 1000, - .cra_type = &crypto_blkcipher_type, - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, - .cra_alignmask = 0, - .cra_blocksize = 1, - .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx), - .cra_init = p8_aes_ctr_init, - .cra_exit = p8_aes_ctr_exit, - .cra_blkcipher = { - .ivsize = 0, - .min_keysize = AES_MIN_KEY_SIZE, - .max_keysize = AES_MAX_KEY_SIZE, - .setkey = p8_aes_ctr_setkey, - .encrypt = p8_aes_ctr_crypt, - .decrypt = p8_aes_ctr_crypt, - }, + .cra_name = "ctr(aes)", + .cra_driver_name = "p8_aes_ctr", + .cra_module = THIS_MODULE, + .cra_priority = 1000, + .cra_type = &crypto_blkcipher_type, + .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER | CRYPTO_ALG_NEED_FALLBACK, + .cra_alignmask = 0, + .cra_blocksize = 1, + .cra_ctxsize = sizeof(struct p8_aes_ctr_ctx), + .cra_init = p8_aes_ctr_init, + .cra_exit = p8_aes_ctr_exit, + .cra_blkcipher = { + .ivsize = 0, + .min_keysize = AES_MIN_KEY_SIZE, + .max_keysize = AES_MAX_KEY_SIZE, + .setkey = p8_aes_ctr_setkey, + .encrypt = p8_aes_ctr_crypt, + .decrypt = p8_aes_ctr_crypt, + }, }; |