diff options
author | Jussi Kivilinna | 2011-11-09 11:56:06 +0800 |
---|---|---|
committer | Herbert Xu | 2011-11-09 11:56:06 +0800 |
commit | ce0045561e1edb92e4a509eb433ff52d3afaa258 (patch) | |
tree | 78c8a6fcd82d966aca6a48d17c9063de34e224e9 /crypto | |
parent | f9d2691fc9a00f39b587f965c33cca012a5597bc (diff) |
crypto: xts: add interface for parallelized cipher implementations
Add xts_crypt() function that can be used by cipher implementations that can
benefit from parallelized cipher operations.
Signed-off-by: Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>
Diffstat (limited to 'crypto')
-rw-r--r-- | crypto/xts.c | 75 |
1 files changed, 73 insertions, 2 deletions
diff --git a/crypto/xts.c b/crypto/xts.c index 96f3f88d576e..ca1608f44cb5 100644 --- a/crypto/xts.c +++ b/crypto/xts.c @@ -21,11 +21,10 @@ #include <linux/scatterlist.h> #include <linux/slab.h> +#include <crypto/xts.h> #include <crypto/b128ops.h> #include <crypto/gf128mul.h> -#define XTS_BLOCK_SIZE 16 - struct priv { struct crypto_cipher *child; struct crypto_cipher *tweak; @@ -167,6 +166,78 @@ static int decrypt(struct blkcipher_desc *desc, struct scatterlist *dst, crypto_cipher_alg(ctx->child)->cia_decrypt); } +int xts_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst, + struct scatterlist *ssrc, unsigned int nbytes, + struct xts_crypt_req *req) +{ + const unsigned int bsize = XTS_BLOCK_SIZE; + const unsigned int max_blks = req->tbuflen / bsize; + struct blkcipher_walk walk; + unsigned int nblocks; + be128 *src, *dst, *t; + be128 *t_buf = req->tbuf; + int err, i; + + BUG_ON(max_blks < 1); + + blkcipher_walk_init(&walk, sdst, ssrc, nbytes); + + err = blkcipher_walk_virt(desc, &walk); + nbytes = walk.nbytes; + if (!nbytes) + return err; + + nblocks = min(nbytes / bsize, max_blks); + src = (be128 *)walk.src.virt.addr; + dst = (be128 *)walk.dst.virt.addr; + + /* calculate first value of T */ + req->tweak_fn(req->tweak_ctx, (u8 *)&t_buf[0], walk.iv); + + i = 0; + goto first; + + for (;;) { + do { + for (i = 0; i < nblocks; i++) { + gf128mul_x_ble(&t_buf[i], t); +first: + t = &t_buf[i]; + + /* PP <- T xor P */ + be128_xor(dst + i, t, src + i); + } + + /* CC <- E(Key2,PP) */ + req->crypt_fn(req->crypt_ctx, (u8 *)dst, + nblocks * bsize); + + /* C <- T xor CC */ + for (i = 0; i < nblocks; i++) + be128_xor(dst + i, dst + i, &t_buf[i]); + + src += nblocks; + dst += nblocks; + nbytes -= nblocks * bsize; + nblocks = min(nbytes / bsize, max_blks); + } while (nblocks > 0); + + *(be128 *)walk.iv = *t; + + err = blkcipher_walk_done(desc, &walk, nbytes); + nbytes = walk.nbytes; + if (!nbytes) + break; + + nblocks = min(nbytes / bsize, max_blks); + src = (be128 *)walk.src.virt.addr; + dst = (be128 *)walk.dst.virt.addr; + } + + return err; +} +EXPORT_SYMBOL_GPL(xts_crypt); + static int init_tfm(struct crypto_tfm *tfm) { struct crypto_cipher *cipher; |