Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: x86/xts - implement support for ciphertext stealing

Align the x86 code with the generic XTS template, which now supports
ciphertext stealing as described by the IEEE XTS-AES spec P1619.

Tested-by: Stephan Mueller <smueller@chronox.de>
Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Ard Biesheuvel and committed by
Herbert Xu
8ce5fac2 b395ed4f

+81 -18
+4 -2
arch/x86/crypto/aesni-intel_glue.c
··· 609 609 return glue_xts_req_128bit(&aesni_enc_xts, req, 610 610 XTS_TWEAK_CAST(aesni_xts_tweak), 611 611 aes_ctx(ctx->raw_tweak_ctx), 612 - aes_ctx(ctx->raw_crypt_ctx)); 612 + aes_ctx(ctx->raw_crypt_ctx), 613 + false); 613 614 } 614 615 615 616 static int xts_decrypt(struct skcipher_request *req) ··· 621 620 return glue_xts_req_128bit(&aesni_dec_xts, req, 622 621 XTS_TWEAK_CAST(aesni_xts_tweak), 623 622 aes_ctx(ctx->raw_tweak_ctx), 624 - aes_ctx(ctx->raw_crypt_ctx)); 623 + aes_ctx(ctx->raw_crypt_ctx), 624 + true); 625 625 } 626 626 627 627 static int
+2 -2
arch/x86/crypto/camellia_aesni_avx2_glue.c
··· 182 182 183 183 return glue_xts_req_128bit(&camellia_enc_xts, req, 184 184 XTS_TWEAK_CAST(camellia_enc_blk), 185 - &ctx->tweak_ctx, &ctx->crypt_ctx); 185 + &ctx->tweak_ctx, &ctx->crypt_ctx, false); 186 186 } 187 187 188 188 static int xts_decrypt(struct skcipher_request *req) ··· 192 192 193 193 return glue_xts_req_128bit(&camellia_dec_xts, req, 194 194 XTS_TWEAK_CAST(camellia_enc_blk), 195 - &ctx->tweak_ctx, &ctx->crypt_ctx); 195 + &ctx->tweak_ctx, &ctx->crypt_ctx, true); 196 196 } 197 197 198 198 static struct skcipher_alg camellia_algs[] = {
+2 -2
arch/x86/crypto/camellia_aesni_avx_glue.c
··· 208 208 209 209 return glue_xts_req_128bit(&camellia_enc_xts, req, 210 210 XTS_TWEAK_CAST(camellia_enc_blk), 211 - &ctx->tweak_ctx, &ctx->crypt_ctx); 211 + &ctx->tweak_ctx, &ctx->crypt_ctx, false); 212 212 } 213 213 214 214 static int xts_decrypt(struct skcipher_request *req) ··· 218 218 219 219 return glue_xts_req_128bit(&camellia_dec_xts, req, 220 220 XTS_TWEAK_CAST(camellia_enc_blk), 221 - &ctx->tweak_ctx, &ctx->crypt_ctx); 221 + &ctx->tweak_ctx, &ctx->crypt_ctx, true); 222 222 } 223 223 224 224 static struct skcipher_alg camellia_algs[] = {
+2 -2
arch/x86/crypto/cast6_avx_glue.c
··· 201 201 202 202 return glue_xts_req_128bit(&cast6_enc_xts, req, 203 203 XTS_TWEAK_CAST(__cast6_encrypt), 204 - &ctx->tweak_ctx, &ctx->crypt_ctx); 204 + &ctx->tweak_ctx, &ctx->crypt_ctx, false); 205 205 } 206 206 207 207 static int xts_decrypt(struct skcipher_request *req) ··· 211 211 212 212 return glue_xts_req_128bit(&cast6_dec_xts, req, 213 213 XTS_TWEAK_CAST(__cast6_encrypt), 214 - &ctx->tweak_ctx, &ctx->crypt_ctx); 214 + &ctx->tweak_ctx, &ctx->crypt_ctx, true); 215 215 } 216 216 217 217 static struct skcipher_alg cast6_algs[] = {
+64 -3
arch/x86/crypto/glue_helper.c
··· 14 14 #include <crypto/b128ops.h> 15 15 #include <crypto/gf128mul.h> 16 16 #include <crypto/internal/skcipher.h> 17 + #include <crypto/scatterwalk.h> 17 18 #include <crypto/xts.h> 18 19 #include <asm/crypto/glue_helper.h> 19 20 ··· 260 259 int glue_xts_req_128bit(const struct common_glue_ctx *gctx, 261 260 struct skcipher_request *req, 262 261 common_glue_func_t tweak_fn, void *tweak_ctx, 263 - void *crypt_ctx) 262 + void *crypt_ctx, bool decrypt) 264 263 { 264 + const bool cts = (req->cryptlen % XTS_BLOCK_SIZE); 265 265 const unsigned int bsize = 128 / 8; 266 + struct skcipher_request subreq; 266 267 struct skcipher_walk walk; 267 268 bool fpu_enabled = false; 268 - unsigned int nbytes; 269 + unsigned int nbytes, tail; 269 270 int err; 271 + 272 + if (req->cryptlen < XTS_BLOCK_SIZE) 273 + return -EINVAL; 274 + 275 + if (unlikely(cts)) { 276 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 277 + 278 + tail = req->cryptlen % XTS_BLOCK_SIZE + XTS_BLOCK_SIZE; 279 + 280 + skcipher_request_set_tfm(&subreq, tfm); 281 + skcipher_request_set_callback(&subreq, 282 + crypto_skcipher_get_flags(tfm), 283 + NULL, NULL); 284 + skcipher_request_set_crypt(&subreq, req->src, req->dst, 285 + req->cryptlen - tail, req->iv); 286 + req = &subreq; 287 + } 270 288 271 289 err = skcipher_walk_virt(&walk, req, false); 272 290 nbytes = walk.nbytes; 273 - if (!nbytes) 291 + if (err) 274 292 return err; 275 293 276 294 /* set minimum length to bsize, for tweak_fn */ ··· 307 287 nbytes = walk.nbytes; 308 288 } 309 289 290 + if (unlikely(cts)) { 291 + u8 *next_tweak, *final_tweak = req->iv; 292 + struct scatterlist *src, *dst; 293 + struct scatterlist s[2], d[2]; 294 + le128 b[2]; 295 + 296 + dst = src = scatterwalk_ffwd(s, req->src, req->cryptlen); 297 + if (req->dst != req->src) 298 + dst = scatterwalk_ffwd(d, req->dst, req->cryptlen); 299 + 300 + if (decrypt) { 301 + next_tweak = memcpy(b, req->iv, XTS_BLOCK_SIZE); 302 + gf128mul_x_ble(b, b); 303 + } else { 304 + next_tweak = req->iv; 305 + } 306 + 307 + skcipher_request_set_crypt(&subreq, src, dst, XTS_BLOCK_SIZE, 308 + next_tweak); 309 + 310 + err = skcipher_walk_virt(&walk, req, false) ?: 311 + skcipher_walk_done(&walk, 312 + __glue_xts_req_128bit(gctx, crypt_ctx, &walk)); 313 + if (err) 314 + goto out; 315 + 316 + scatterwalk_map_and_copy(b, dst, 0, XTS_BLOCK_SIZE, 0); 317 + memcpy(b + 1, b, tail - XTS_BLOCK_SIZE); 318 + scatterwalk_map_and_copy(b, src, XTS_BLOCK_SIZE, 319 + tail - XTS_BLOCK_SIZE, 0); 320 + scatterwalk_map_and_copy(b, dst, 0, tail, 1); 321 + 322 + skcipher_request_set_crypt(&subreq, dst, dst, XTS_BLOCK_SIZE, 323 + final_tweak); 324 + 325 + err = skcipher_walk_virt(&walk, req, false) ?: 326 + skcipher_walk_done(&walk, 327 + __glue_xts_req_128bit(gctx, crypt_ctx, &walk)); 328 + } 329 + 330 + out: 310 331 glue_fpu_end(fpu_enabled); 311 332 312 333 return err;
+2 -2
arch/x86/crypto/serpent_avx2_glue.c
··· 167 167 168 168 return glue_xts_req_128bit(&serpent_enc_xts, req, 169 169 XTS_TWEAK_CAST(__serpent_encrypt), 170 - &ctx->tweak_ctx, &ctx->crypt_ctx); 170 + &ctx->tweak_ctx, &ctx->crypt_ctx, false); 171 171 } 172 172 173 173 static int xts_decrypt(struct skcipher_request *req) ··· 177 177 178 178 return glue_xts_req_128bit(&serpent_dec_xts, req, 179 179 XTS_TWEAK_CAST(__serpent_encrypt), 180 - &ctx->tweak_ctx, &ctx->crypt_ctx); 180 + &ctx->tweak_ctx, &ctx->crypt_ctx, true); 181 181 } 182 182 183 183 static struct skcipher_alg serpent_algs[] = {
+2 -2
arch/x86/crypto/serpent_avx_glue.c
··· 207 207 208 208 return glue_xts_req_128bit(&serpent_enc_xts, req, 209 209 XTS_TWEAK_CAST(__serpent_encrypt), 210 - &ctx->tweak_ctx, &ctx->crypt_ctx); 210 + &ctx->tweak_ctx, &ctx->crypt_ctx, false); 211 211 } 212 212 213 213 static int xts_decrypt(struct skcipher_request *req) ··· 217 217 218 218 return glue_xts_req_128bit(&serpent_dec_xts, req, 219 219 XTS_TWEAK_CAST(__serpent_encrypt), 220 - &ctx->tweak_ctx, &ctx->crypt_ctx); 220 + &ctx->tweak_ctx, &ctx->crypt_ctx, true); 221 221 } 222 222 223 223 static struct skcipher_alg serpent_algs[] = {
+2 -2
arch/x86/crypto/twofish_avx_glue.c
··· 210 210 211 211 return glue_xts_req_128bit(&twofish_enc_xts, req, 212 212 XTS_TWEAK_CAST(twofish_enc_blk), 213 - &ctx->tweak_ctx, &ctx->crypt_ctx); 213 + &ctx->tweak_ctx, &ctx->crypt_ctx, false); 214 214 } 215 215 216 216 static int xts_decrypt(struct skcipher_request *req) ··· 220 220 221 221 return glue_xts_req_128bit(&twofish_dec_xts, req, 222 222 XTS_TWEAK_CAST(twofish_enc_blk), 223 - &ctx->tweak_ctx, &ctx->crypt_ctx); 223 + &ctx->tweak_ctx, &ctx->crypt_ctx, true); 224 224 } 225 225 226 226 static struct skcipher_alg twofish_algs[] = {
+1 -1
arch/x86/include/asm/crypto/glue_helper.h
··· 114 114 extern int glue_xts_req_128bit(const struct common_glue_ctx *gctx, 115 115 struct skcipher_request *req, 116 116 common_glue_func_t tweak_fn, void *tweak_ctx, 117 - void *crypt_ctx); 117 + void *crypt_ctx, bool decrypt); 118 118 119 119 extern void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src, 120 120 le128 *iv, common_glue_func_t fn);