Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: xts: add interface for parallelized cipher implementations

Add xts_crypt() function that can be used by cipher implementations that can
benefit from parallelized cipher operations.

Signed-off-by: Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Jussi Kivilinna and committed by
Herbert Xu
ce004556 f9d2691f

+100 -2
+73 -2
crypto/xts.c
··· 21 21 #include <linux/scatterlist.h> 22 22 #include <linux/slab.h> 23 23 24 + #include <crypto/xts.h> 24 25 #include <crypto/b128ops.h> 25 26 #include <crypto/gf128mul.h> 26 - 27 - #define XTS_BLOCK_SIZE 16 28 27 29 28 struct priv { 30 29 struct crypto_cipher *child; ··· 165 166 return crypt(desc, &w, ctx, crypto_cipher_alg(ctx->tweak)->cia_encrypt, 166 167 crypto_cipher_alg(ctx->child)->cia_decrypt); 167 168 } 169 + 170 + int xts_crypt(struct blkcipher_desc *desc, struct scatterlist *sdst, 171 + struct scatterlist *ssrc, unsigned int nbytes, 172 + struct xts_crypt_req *req) 173 + { 174 + const unsigned int bsize = XTS_BLOCK_SIZE; 175 + const unsigned int max_blks = req->tbuflen / bsize; 176 + struct blkcipher_walk walk; 177 + unsigned int nblocks; 178 + be128 *src, *dst, *t; 179 + be128 *t_buf = req->tbuf; 180 + int err, i; 181 + 182 + BUG_ON(max_blks < 1); 183 + 184 + blkcipher_walk_init(&walk, sdst, ssrc, nbytes); 185 + 186 + err = blkcipher_walk_virt(desc, &walk); 187 + nbytes = walk.nbytes; 188 + if (!nbytes) 189 + return err; 190 + 191 + nblocks = min(nbytes / bsize, max_blks); 192 + src = (be128 *)walk.src.virt.addr; 193 + dst = (be128 *)walk.dst.virt.addr; 194 + 195 + /* calculate first value of T */ 196 + req->tweak_fn(req->tweak_ctx, (u8 *)&t_buf[0], walk.iv); 197 + 198 + i = 0; 199 + goto first; 200 + 201 + for (;;) { 202 + do { 203 + for (i = 0; i < nblocks; i++) { 204 + gf128mul_x_ble(&t_buf[i], t); 205 + first: 206 + t = &t_buf[i]; 207 + 208 + /* PP <- T xor P */ 209 + be128_xor(dst + i, t, src + i); 210 + } 211 + 212 + /* CC <- E(Key2,PP) */ 213 + req->crypt_fn(req->crypt_ctx, (u8 *)dst, 214 + nblocks * bsize); 215 + 216 + /* C <- T xor CC */ 217 + for (i = 0; i < nblocks; i++) 218 + be128_xor(dst + i, dst + i, &t_buf[i]); 219 + 220 + src += nblocks; 221 + dst += nblocks; 222 + nbytes -= nblocks * bsize; 223 + nblocks = min(nbytes / bsize, max_blks); 224 + } while (nblocks > 0); 225 + 226 + *(be128 *)walk.iv = *t; 227 + 228 + err = blkcipher_walk_done(desc, &walk, nbytes); 229 + nbytes = walk.nbytes; 230 + if (!nbytes) 231 + break; 232 + 233 + nblocks = min(nbytes / bsize, max_blks); 234 + src = (be128 *)walk.src.virt.addr; 235 + dst = (be128 *)walk.dst.virt.addr; 236 + } 237 + 238 + return err; 239 + } 240 + EXPORT_SYMBOL_GPL(xts_crypt); 168 241 169 242 static int init_tfm(struct crypto_tfm *tfm) 170 243 {
+27
include/crypto/xts.h
··· 1 + #ifndef _CRYPTO_XTS_H 2 + #define _CRYPTO_XTS_H 3 + 4 + #include <crypto/b128ops.h> 5 + 6 + struct scatterlist; 7 + struct blkcipher_desc; 8 + 9 + #define XTS_BLOCK_SIZE 16 10 + 11 + struct xts_crypt_req { 12 + be128 *tbuf; 13 + unsigned int tbuflen; 14 + 15 + void *tweak_ctx; 16 + void (*tweak_fn)(void *ctx, u8* dst, const u8* src); 17 + void *crypt_ctx; 18 + void (*crypt_fn)(void *ctx, u8 *blks, unsigned int nbytes); 19 + }; 20 + 21 + #define XTS_TWEAK_CAST(x) ((void (*)(void *, u8*, const u8*))(x)) 22 + 23 + int xts_crypt(struct blkcipher_desc *desc, struct scatterlist *dst, 24 + struct scatterlist *src, unsigned int nbytes, 25 + struct xts_crypt_req *req); 26 + 27 + #endif /* _CRYPTO_XTS_H */