crypto: skcipher - Fix skcipher_dequeue_givcrypt NULL test

As struct skcipher_givcrypt_request includes struct crypto_request
at a non-zero offset, testing for NULL after converting the pointer
returned by crypto_dequeue_request does not work. This can result
in IPsec crashes when the queue is depleted.

This patch fixes it by doing the pointer conversion only when the
return value is non-NULL. In particular, we create a new function
__crypto_dequeue_request that does the pointer conversion.

Reported-by: Brad Bosch <bradbosch@comcast.net>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

+12 -4
+9 -2
crypto/algapi.c
··· 692 } 693 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 694 695 - struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 696 { 697 struct list_head *request; 698 ··· 707 request = queue->list.next; 708 list_del(request); 709 710 - return list_entry(request, struct crypto_async_request, list); 711 } 712 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 713
··· 692 } 693 EXPORT_SYMBOL_GPL(crypto_enqueue_request); 694 695 + void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset) 696 { 697 struct list_head *request; 698 ··· 707 request = queue->list.next; 708 list_del(request); 709 710 + return (char *)list_entry(request, struct crypto_async_request, list) - 711 + offset; 712 + } 713 + EXPORT_SYMBOL_GPL(__crypto_dequeue_request); 714 + 715 + struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue) 716 + { 717 + return __crypto_dequeue_request(queue, 0); 718 } 719 EXPORT_SYMBOL_GPL(crypto_dequeue_request); 720
+1
include/crypto/algapi.h
··· 137 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen); 138 int crypto_enqueue_request(struct crypto_queue *queue, 139 struct crypto_async_request *request); 140 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); 141 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); 142
··· 137 void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen); 138 int crypto_enqueue_request(struct crypto_queue *queue, 139 struct crypto_async_request *request); 140 + void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset); 141 struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); 142 int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); 143
+2 -2
include/crypto/internal/skcipher.h
··· 79 static inline struct skcipher_givcrypt_request *skcipher_dequeue_givcrypt( 80 struct crypto_queue *queue) 81 { 82 - return container_of(ablkcipher_dequeue_request(queue), 83 - struct skcipher_givcrypt_request, creq); 84 } 85 86 static inline void *skcipher_givcrypt_reqctx(
··· 79 static inline struct skcipher_givcrypt_request *skcipher_dequeue_givcrypt( 80 struct crypto_queue *queue) 81 { 82 + return __crypto_dequeue_request( 83 + queue, offsetof(struct skcipher_givcrypt_request, creq.base)); 84 } 85 86 static inline void *skcipher_givcrypt_reqctx(