Merge git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6

* git://git.kernel.org/pub/scm/linux/kernel/git/herbert/crypto-2.6:
[CRYPTO] blkcipher: Fix inverted test in blkcipher_get_spot
[CRYPTO] blkcipher: Fix handling of kmalloc page straddling

+7 -4
+7 -4
crypto/blkcipher.c
··· 59 scatterwalk_unmap(walk->dst.virt.addr, 1); 60 } 61 62 static inline u8 *blkcipher_get_spot(u8 *start, unsigned int len) 63 { 64 - if (offset_in_page(start + len) < len) 65 - return (u8 *)((unsigned long)(start + len) & PAGE_MASK); 66 - return start; 67 } 68 69 static inline unsigned int blkcipher_done_slow(struct crypto_blkcipher *tfm, ··· 157 if (walk->buffer) 158 goto ok; 159 160 - n = bsize * 2 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 161 walk->buffer = kmalloc(n, GFP_ATOMIC); 162 if (!walk->buffer) 163 return blkcipher_walk_done(desc, walk, -ENOMEM);
··· 59 scatterwalk_unmap(walk->dst.virt.addr, 1); 60 } 61 62 + /* Get a spot of the specified length that does not straddle a page. 63 + * The caller needs to ensure that there is enough space for this operation. 64 + */ 65 static inline u8 *blkcipher_get_spot(u8 *start, unsigned int len) 66 { 67 + u8 *end_page = (u8 *)(((unsigned long)(start + len - 1)) & PAGE_MASK); 68 + return start > end_page ? start : end_page; 69 } 70 71 static inline unsigned int blkcipher_done_slow(struct crypto_blkcipher *tfm, ··· 155 if (walk->buffer) 156 goto ok; 157 158 + n = bsize * 3 - (alignmask + 1) + 159 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); 160 walk->buffer = kmalloc(n, GFP_ATOMIC); 161 if (!walk->buffer) 162 return blkcipher_walk_done(desc, walk, -ENOMEM);