Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: ahash - Add unaligned handling and default operations

This patch exports the finup operation where available and adds
a default finup operation for ahash. The operations final, finup
and digest also will now deal with unaligned result pointers by
copying it. Finally export/import operations are will now be
exported too.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

+263 -22
+201 -3
crypto/ahash.c
··· 24 24 25 25 #include "internal.h" 26 26 27 + struct ahash_request_priv { 28 + crypto_completion_t complete; 29 + void *data; 30 + u8 *result; 31 + void *ubuf[] CRYPTO_MINALIGN_ATTR; 32 + }; 33 + 27 34 static inline struct ahash_alg *crypto_ahash_alg(struct crypto_ahash *hash) 28 35 { 29 36 return container_of(crypto_hash_alg_common(hash), struct ahash_alg, ··· 163 156 return ret; 164 157 } 165 158 166 - static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key, 159 + int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, 167 160 unsigned int keylen) 168 161 { 169 162 struct ahash_alg *ahash = crypto_ahash_alg(tfm); ··· 174 167 175 168 return ahash->setkey(tfm, key, keylen); 176 169 } 170 + EXPORT_SYMBOL_GPL(crypto_ahash_setkey); 177 171 178 172 static int ahash_nosetkey(struct crypto_ahash *tfm, const u8 *key, 179 173 unsigned int keylen) 174 + { 175 + return -ENOSYS; 176 + } 177 + 178 + static inline unsigned int ahash_align_buffer_size(unsigned len, 179 + unsigned long mask) 180 + { 181 + return len + (mask & ~(crypto_tfm_ctx_alignment() - 1)); 182 + } 183 + 184 + static void ahash_op_unaligned_finish(struct ahash_request *req, int err) 185 + { 186 + struct ahash_request_priv *priv = req->priv; 187 + 188 + if (err == -EINPROGRESS) 189 + return; 190 + 191 + if (!err) 192 + memcpy(priv->result, req->result, 193 + crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); 194 + 195 + kzfree(priv); 196 + } 197 + 198 + static void ahash_op_unaligned_done(struct crypto_async_request *req, int err) 199 + { 200 + struct ahash_request *areq = req->data; 201 + struct ahash_request_priv *priv = areq->priv; 202 + crypto_completion_t complete = priv->complete; 203 + void *data = priv->data; 204 + 205 + ahash_op_unaligned_finish(areq, err); 206 + 207 + complete(data, err); 208 + } 209 + 210 + static int ahash_op_unaligned(struct ahash_request *req, 211 + int (*op)(struct ahash_request *)) 212 + { 213 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 214 + unsigned long alignmask = crypto_ahash_alignmask(tfm); 215 + unsigned int ds = crypto_ahash_digestsize(tfm); 216 + struct ahash_request_priv *priv; 217 + int err; 218 + 219 + priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), 220 + (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 221 + GFP_ATOMIC : GFP_ATOMIC); 222 + if (!priv) 223 + return -ENOMEM; 224 + 225 + priv->result = req->result; 226 + priv->complete = req->base.complete; 227 + priv->data = req->base.data; 228 + 229 + req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); 230 + req->base.complete = ahash_op_unaligned_done; 231 + req->base.data = req; 232 + req->priv = priv; 233 + 234 + err = op(req); 235 + ahash_op_unaligned_finish(req, err); 236 + 237 + return err; 238 + } 239 + 240 + static int crypto_ahash_op(struct ahash_request *req, 241 + int (*op)(struct ahash_request *)) 242 + { 243 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 244 + unsigned long alignmask = crypto_ahash_alignmask(tfm); 245 + 246 + if ((unsigned long)req->result & alignmask) 247 + return ahash_op_unaligned(req, op); 248 + 249 + return op(req); 250 + } 251 + 252 + int crypto_ahash_final(struct ahash_request *req) 253 + { 254 + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->final); 255 + } 256 + EXPORT_SYMBOL_GPL(crypto_ahash_final); 257 + 258 + int crypto_ahash_finup(struct ahash_request *req) 259 + { 260 + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->finup); 261 + } 262 + EXPORT_SYMBOL_GPL(crypto_ahash_finup); 263 + 264 + int crypto_ahash_digest(struct ahash_request *req) 265 + { 266 + return crypto_ahash_op(req, crypto_ahash_reqtfm(req)->digest); 267 + } 268 + EXPORT_SYMBOL_GPL(crypto_ahash_digest); 269 + 270 + static void ahash_def_finup_finish2(struct ahash_request *req, int err) 271 + { 272 + struct ahash_request_priv *priv = req->priv; 273 + 274 + if (err == -EINPROGRESS) 275 + return; 276 + 277 + if (!err) 278 + memcpy(priv->result, req->result, 279 + crypto_ahash_digestsize(crypto_ahash_reqtfm(req))); 280 + 281 + kzfree(priv); 282 + } 283 + 284 + static void ahash_def_finup_done2(struct crypto_async_request *req, int err) 285 + { 286 + struct ahash_request *areq = req->data; 287 + struct ahash_request_priv *priv = areq->priv; 288 + crypto_completion_t complete = priv->complete; 289 + void *data = priv->data; 290 + 291 + ahash_def_finup_finish2(areq, err); 292 + 293 + complete(data, err); 294 + } 295 + 296 + static int ahash_def_finup_finish1(struct ahash_request *req, int err) 297 + { 298 + if (err) 299 + goto out; 300 + 301 + req->base.complete = ahash_def_finup_done2; 302 + req->base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; 303 + err = crypto_ahash_reqtfm(req)->final(req); 304 + 305 + out: 306 + ahash_def_finup_finish2(req, err); 307 + return err; 308 + } 309 + 310 + static void ahash_def_finup_done1(struct crypto_async_request *req, int err) 311 + { 312 + struct ahash_request *areq = req->data; 313 + struct ahash_request_priv *priv = areq->priv; 314 + crypto_completion_t complete = priv->complete; 315 + void *data = priv->data; 316 + 317 + err = ahash_def_finup_finish1(areq, err); 318 + 319 + complete(data, err); 320 + } 321 + 322 + static int ahash_def_finup(struct ahash_request *req) 323 + { 324 + struct crypto_ahash *tfm = crypto_ahash_reqtfm(req); 325 + unsigned long alignmask = crypto_ahash_alignmask(tfm); 326 + unsigned int ds = crypto_ahash_digestsize(tfm); 327 + struct ahash_request_priv *priv; 328 + 329 + priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), 330 + (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? 331 + GFP_ATOMIC : GFP_ATOMIC); 332 + if (!priv) 333 + return -ENOMEM; 334 + 335 + priv->result = req->result; 336 + priv->complete = req->base.complete; 337 + priv->data = req->base.data; 338 + 339 + req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); 340 + req->base.complete = ahash_def_finup_done1; 341 + req->base.data = req; 342 + req->priv = priv; 343 + 344 + return ahash_def_finup_finish1(req, tfm->update(req)); 345 + } 346 + 347 + static int ahash_no_export(struct ahash_request *req, void *out) 348 + { 349 + return -ENOSYS; 350 + } 351 + 352 + static int ahash_no_import(struct ahash_request *req, const void *in) 180 353 { 181 354 return -ENOSYS; 182 355 } ··· 366 179 struct crypto_ahash *hash = __crypto_ahash_cast(tfm); 367 180 struct ahash_alg *alg = crypto_ahash_alg(hash); 368 181 182 + hash->setkey = ahash_nosetkey; 183 + hash->export = ahash_no_export; 184 + hash->import = ahash_no_import; 185 + 369 186 if (tfm->__crt_alg->cra_type != &crypto_ahash_type) 370 187 return crypto_init_shash_ops_async(tfm); 371 188 372 189 hash->init = alg->init; 373 190 hash->update = alg->update; 374 - hash->final = alg->final; 191 + hash->final = alg->final; 192 + hash->finup = alg->finup ?: ahash_def_finup; 375 193 hash->digest = alg->digest; 376 - hash->setkey = alg->setkey ? ahash_setkey : ahash_nosetkey; 194 + 195 + if (alg->setkey) 196 + hash->setkey = alg->setkey; 197 + if (alg->export) 198 + hash->export = alg->export; 199 + if (alg->import) 200 + hash->import = alg->import; 377 201 378 202 return 0; 379 203 }
+48 -4
crypto/shash.c
··· 235 235 return crypto_shash_final(ahash_request_ctx(req), req->result); 236 236 } 237 237 238 + int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc) 239 + { 240 + struct crypto_hash_walk walk; 241 + int nbytes; 242 + 243 + for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0; 244 + nbytes = crypto_hash_walk_done(&walk, nbytes)) 245 + nbytes = crypto_hash_walk_last(&walk) ? 246 + crypto_shash_finup(desc, walk.data, nbytes, 247 + req->result) : 248 + crypto_shash_update(desc, walk.data, nbytes); 249 + 250 + return nbytes; 251 + } 252 + EXPORT_SYMBOL_GPL(shash_ahash_finup); 253 + 254 + static int shash_async_finup(struct ahash_request *req) 255 + { 256 + struct crypto_shash **ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(req)); 257 + struct shash_desc *desc = ahash_request_ctx(req); 258 + 259 + desc->tfm = *ctx; 260 + desc->flags = req->base.flags; 261 + 262 + return shash_ahash_finup(req, desc); 263 + } 264 + 238 265 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc) 239 266 { 240 267 struct scatterlist *sg = req->src; ··· 279 252 crypto_yield(desc->flags); 280 253 } else 281 254 err = crypto_shash_init(desc) ?: 282 - shash_ahash_update(req, desc) ?: 283 - crypto_shash_final(desc, req->result); 255 + shash_ahash_finup(req, desc); 284 256 285 257 return err; 286 258 } ··· 296 270 return shash_ahash_digest(req, desc); 297 271 } 298 272 273 + static int shash_async_export(struct ahash_request *req, void *out) 274 + { 275 + return crypto_shash_export(ahash_request_ctx(req), out); 276 + } 277 + 278 + static int shash_async_import(struct ahash_request *req, const void *in) 279 + { 280 + return crypto_shash_import(ahash_request_ctx(req), in); 281 + } 282 + 299 283 static void crypto_exit_shash_ops_async(struct crypto_tfm *tfm) 300 284 { 301 285 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); ··· 316 280 int crypto_init_shash_ops_async(struct crypto_tfm *tfm) 317 281 { 318 282 struct crypto_alg *calg = tfm->__crt_alg; 283 + struct shash_alg *alg = __crypto_shash_alg(calg); 319 284 struct crypto_ahash *crt = __crypto_ahash_cast(tfm); 320 285 struct crypto_shash **ctx = crypto_tfm_ctx(tfm); 321 286 struct crypto_shash *shash; ··· 335 298 336 299 crt->init = shash_async_init; 337 300 crt->update = shash_async_update; 338 - crt->final = shash_async_final; 301 + crt->final = shash_async_final; 302 + crt->finup = shash_async_finup; 339 303 crt->digest = shash_async_digest; 340 - crt->setkey = shash_async_setkey; 304 + 305 + if (alg->setkey) 306 + crt->setkey = shash_async_setkey; 307 + if (alg->export) 308 + crt->export = shash_async_export; 309 + if (alg->setkey) 310 + crt->import = shash_async_import; 341 311 342 312 crt->reqsize = sizeof(struct shash_desc) + crypto_shash_descsize(shash); 343 313
+8 -15
include/crypto/hash.h
··· 31 31 struct scatterlist *src; 32 32 u8 *result; 33 33 34 + /* This field may only be used by the ahash API code. */ 35 + void *priv; 36 + 34 37 void *__ctx[] CRYPTO_MINALIGN_ATTR; 35 38 }; 36 39 ··· 178 175 return req->__ctx; 179 176 } 180 177 181 - static inline int crypto_ahash_setkey(struct crypto_ahash *tfm, 182 - const u8 *key, unsigned int keylen) 183 - { 184 - return tfm->setkey(tfm, key, keylen); 185 - } 186 - 187 - static inline int crypto_ahash_digest(struct ahash_request *req) 188 - { 189 - return crypto_ahash_reqtfm(req)->digest(req); 190 - } 178 + int crypto_ahash_setkey(struct crypto_ahash *tfm, const u8 *key, 179 + unsigned int keylen); 180 + int crypto_ahash_finup(struct ahash_request *req); 181 + int crypto_ahash_final(struct ahash_request *req); 182 + int crypto_ahash_digest(struct ahash_request *req); 191 183 192 184 static inline int crypto_ahash_export(struct ahash_request *req, void *out) 193 185 { ··· 202 204 static inline int crypto_ahash_update(struct ahash_request *req) 203 205 { 204 206 return crypto_ahash_reqtfm(req)->update(req); 205 - } 206 - 207 - static inline int crypto_ahash_final(struct ahash_request *req) 208 - { 209 - return crypto_ahash_reqtfm(req)->final(req); 210 207 } 211 208 212 209 static inline void ahash_request_set_tfm(struct ahash_request *req,
+6
include/crypto/internal/hash.h
··· 59 59 struct crypto_hash_walk *walk, 60 60 struct scatterlist *sg, unsigned int len); 61 61 62 + static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk) 63 + { 64 + return !(walk->entrylen | walk->total); 65 + } 66 + 62 67 int crypto_register_ahash(struct ahash_alg *alg); 63 68 int crypto_unregister_ahash(struct ahash_alg *alg); 64 69 int ahash_register_instance(struct crypto_template *tmpl, ··· 99 94 struct shash_alg *shash_attr_alg(struct rtattr *rta, u32 type, u32 mask); 100 95 101 96 int shash_ahash_update(struct ahash_request *req, struct shash_desc *desc); 97 + int shash_ahash_finup(struct ahash_request *req, struct shash_desc *desc); 102 98 int shash_ahash_digest(struct ahash_request *req, struct shash_desc *desc); 103 99 104 100 int crypto_init_shash_ops_async(struct crypto_tfm *tfm);