Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

Pull ARM AES crypto fixes from Herbert Xu:
"This push fixes a regression on ARM where odd-sized blocks supplied to
AES may cause crashes"

* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6:
crypto: arm-aes - fix encryption of unaligned data
crypto: arm64-aes - fix encryption of unaligned data

+11 -11
+5 -5
arch/arm/crypto/aesbs-glue.c
··· 137 137 dst += AES_BLOCK_SIZE; 138 138 } while (--blocks); 139 139 } 140 - err = blkcipher_walk_done(desc, &walk, 0); 140 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 141 141 } 142 142 return err; 143 143 } ··· 158 158 bsaes_cbc_encrypt(walk.src.virt.addr, walk.dst.virt.addr, 159 159 walk.nbytes, &ctx->dec, walk.iv); 160 160 kernel_neon_end(); 161 - err = blkcipher_walk_done(desc, &walk, 0); 161 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 162 162 } 163 163 while (walk.nbytes) { 164 164 u32 blocks = walk.nbytes / AES_BLOCK_SIZE; ··· 182 182 dst += AES_BLOCK_SIZE; 183 183 src += AES_BLOCK_SIZE; 184 184 } while (--blocks); 185 - err = blkcipher_walk_done(desc, &walk, 0); 185 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 186 186 } 187 187 return err; 188 188 } ··· 268 268 bsaes_xts_encrypt(walk.src.virt.addr, walk.dst.virt.addr, 269 269 walk.nbytes, &ctx->enc, walk.iv); 270 270 kernel_neon_end(); 271 - err = blkcipher_walk_done(desc, &walk, 0); 271 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 272 272 } 273 273 return err; 274 274 } ··· 292 292 bsaes_xts_decrypt(walk.src.virt.addr, walk.dst.virt.addr, 293 293 walk.nbytes, &ctx->dec, walk.iv); 294 294 kernel_neon_end(); 295 - err = blkcipher_walk_done(desc, &walk, 0); 295 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 296 296 } 297 297 return err; 298 298 }
+6 -6
arch/arm64/crypto/aes-glue.c
··· 106 106 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { 107 107 aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr, 108 108 (u8 *)ctx->key_enc, rounds, blocks, first); 109 - err = blkcipher_walk_done(desc, &walk, 0); 109 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 110 110 } 111 111 kernel_neon_end(); 112 112 return err; ··· 128 128 for (first = 1; (blocks = (walk.nbytes / AES_BLOCK_SIZE)); first = 0) { 129 129 aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr, 130 130 (u8 *)ctx->key_dec, rounds, blocks, first); 131 - err = blkcipher_walk_done(desc, &walk, 0); 131 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 132 132 } 133 133 kernel_neon_end(); 134 134 return err; ··· 151 151 aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr, 152 152 (u8 *)ctx->key_enc, rounds, blocks, walk.iv, 153 153 first); 154 - err = blkcipher_walk_done(desc, &walk, 0); 154 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 155 155 } 156 156 kernel_neon_end(); 157 157 return err; ··· 174 174 aes_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr, 175 175 (u8 *)ctx->key_dec, rounds, blocks, walk.iv, 176 176 first); 177 - err = blkcipher_walk_done(desc, &walk, 0); 177 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 178 178 } 179 179 kernel_neon_end(); 180 180 return err; ··· 243 243 aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr, 244 244 (u8 *)ctx->key1.key_enc, rounds, blocks, 245 245 (u8 *)ctx->key2.key_enc, walk.iv, first); 246 - err = blkcipher_walk_done(desc, &walk, 0); 246 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 247 247 } 248 248 kernel_neon_end(); 249 249 ··· 267 267 aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr, 268 268 (u8 *)ctx->key1.key_dec, rounds, blocks, 269 269 (u8 *)ctx->key2.key_enc, walk.iv, first); 270 - err = blkcipher_walk_done(desc, &walk, 0); 270 + err = blkcipher_walk_done(desc, &walk, walk.nbytes % AES_BLOCK_SIZE); 271 271 } 272 272 kernel_neon_end(); 273 273