Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: nx - use the new scatterwalk functions

- In nx_walk_and_build(), use scatterwalk_start_at_pos() instead of a
more complex way to achieve the same result.

- Also in nx_walk_and_build(), use the new functions scatterwalk_next()
which consolidates scatterwalk_clamp() and scatterwalk_map(), and use
scatterwalk_done_src() which consolidates scatterwalk_unmap(),
scatterwalk_advance(), and scatterwalk_done(). Remove unnecessary
code that seemed to be intended to advance to the next sg entry, which
is already handled by the scatterwalk functions.

Note that nx_walk_and_build() does not actually read or write the
mapped virtual address, and thus it is misusing the scatter_walk API.
It really should just access the scatterlist directly. This patch
does not try to address this existing issue.

- In nx_gca(), use memcpy_from_sglist() instead of a more complex way to
achieve the same result.

- In various functions, replace calls to scatterwalk_map_and_copy() with
memcpy_from_sglist() or memcpy_to_sglist() as appropriate. Note that
this eliminates the confusing 'out' argument (which this driver had
tried to work around by defining the missing constants for it...)

Cc: Christophe Leroy <christophe.leroy@csgroup.eu>
Cc: Madhavan Srinivasan <maddy@linux.ibm.com>
Cc: Michael Ellerman <mpe@ellerman.id.au>
Cc: Naveen N Rao <naveen@kernel.org>
Cc: Nicholas Piggin <npiggin@gmail.com>
Cc: linuxppc-dev@lists.ozlabs.org
Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Eric Biggers and committed by
Herbert Xu
422bf8fc 8fd0eecd

+17 -50
+6 -10
drivers/crypto/nx/nx-aes-ccm.c
··· 217 217 memset(b1, 0, 16); 218 218 if (assoclen <= 65280) { 219 219 *(u16 *)b1 = assoclen; 220 - scatterwalk_map_and_copy(b1 + 2, req->src, 0, 221 - iauth_len, SCATTERWALK_FROM_SG); 220 + memcpy_from_sglist(b1 + 2, req->src, 0, iauth_len); 222 221 } else { 223 222 *(u16 *)b1 = (u16)(0xfffe); 224 223 *(u32 *)&b1[2] = assoclen; 225 - scatterwalk_map_and_copy(b1 + 6, req->src, 0, 226 - iauth_len, SCATTERWALK_FROM_SG); 224 + memcpy_from_sglist(b1 + 6, req->src, 0, iauth_len); 227 225 } 228 226 } 229 227 ··· 339 341 nbytes -= authsize; 340 342 341 343 /* copy out the auth tag to compare with later */ 342 - scatterwalk_map_and_copy(priv->oauth_tag, 343 - req->src, nbytes + req->assoclen, authsize, 344 - SCATTERWALK_FROM_SG); 344 + memcpy_from_sglist(priv->oauth_tag, req->src, nbytes + req->assoclen, 345 + authsize); 345 346 346 347 rc = generate_pat(iv, req, nx_ctx, authsize, nbytes, assoclen, 347 348 csbcpb->cpb.aes_ccm.in_pat_or_b0); ··· 462 465 } while (processed < nbytes); 463 466 464 467 /* copy out the auth tag */ 465 - scatterwalk_map_and_copy(csbcpb->cpb.aes_ccm.out_pat_or_mac, 466 - req->dst, nbytes + req->assoclen, authsize, 467 - SCATTERWALK_TO_SG); 468 + memcpy_to_sglist(req->dst, nbytes + req->assoclen, 469 + csbcpb->cpb.aes_ccm.out_pat_or_mac, authsize); 468 470 469 471 out: 470 472 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags);
+6 -11
drivers/crypto/nx/nx-aes-gcm.c
··· 103 103 { 104 104 int rc; 105 105 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead; 106 - struct scatter_walk walk; 107 106 struct nx_sg *nx_sg = nx_ctx->in_sg; 108 107 unsigned int nbytes = assoclen; 109 108 unsigned int processed = 0, to_process; 110 109 unsigned int max_sg_len; 111 110 112 111 if (nbytes <= AES_BLOCK_SIZE) { 113 - scatterwalk_start(&walk, req->src); 114 - scatterwalk_copychunks(out, &walk, nbytes, SCATTERWALK_FROM_SG); 115 - scatterwalk_done(&walk, SCATTERWALK_FROM_SG, 0); 112 + memcpy_from_sglist(out, req->src, 0, nbytes); 116 113 return 0; 117 114 } 118 115 ··· 388 391 mac: 389 392 if (enc) { 390 393 /* copy out the auth tag */ 391 - scatterwalk_map_and_copy( 392 - csbcpb->cpb.aes_gcm.out_pat_or_mac, 394 + memcpy_to_sglist( 393 395 req->dst, req->assoclen + nbytes, 394 - crypto_aead_authsize(crypto_aead_reqtfm(req)), 395 - SCATTERWALK_TO_SG); 396 + csbcpb->cpb.aes_gcm.out_pat_or_mac, 397 + crypto_aead_authsize(crypto_aead_reqtfm(req))); 396 398 } else { 397 399 u8 *itag = nx_ctx->priv.gcm.iauth_tag; 398 400 u8 *otag = csbcpb->cpb.aes_gcm.out_pat_or_mac; 399 401 400 - scatterwalk_map_and_copy( 402 + memcpy_from_sglist( 401 403 itag, req->src, req->assoclen + nbytes, 402 - crypto_aead_authsize(crypto_aead_reqtfm(req)), 403 - SCATTERWALK_FROM_SG); 404 + crypto_aead_authsize(crypto_aead_reqtfm(req))); 404 405 rc = crypto_memneq(itag, otag, 405 406 crypto_aead_authsize(crypto_aead_reqtfm(req))) ? 406 407 -EBADMSG : 0;
+5 -26
drivers/crypto/nx/nx.c
··· 153 153 { 154 154 struct scatter_walk walk; 155 155 struct nx_sg *nx_sg = nx_dst; 156 - unsigned int n, offset = 0, len = *src_len; 156 + unsigned int n, len = *src_len; 157 157 char *dst; 158 158 159 159 /* we need to fast forward through @start bytes first */ 160 - for (;;) { 161 - scatterwalk_start(&walk, sg_src); 162 - 163 - if (start < offset + sg_src->length) 164 - break; 165 - 166 - offset += sg_src->length; 167 - sg_src = sg_next(sg_src); 168 - } 169 - 170 - /* start - offset is the number of bytes to advance in the scatterlist 171 - * element we're currently looking at */ 172 - scatterwalk_advance(&walk, start - offset); 160 + scatterwalk_start_at_pos(&walk, sg_src, start); 173 161 174 162 while (len && (nx_sg - nx_dst) < sglen) { 175 - n = scatterwalk_clamp(&walk, len); 176 - if (!n) { 177 - /* In cases where we have scatterlist chain sg_next 178 - * handles with it properly */ 179 - scatterwalk_start(&walk, sg_next(walk.sg)); 180 - n = scatterwalk_clamp(&walk, len); 181 - } 182 - dst = scatterwalk_map(&walk); 163 + dst = scatterwalk_next(&walk, len, &n); 183 164 184 165 nx_sg = nx_build_sg_list(nx_sg, dst, &n, sglen - (nx_sg - nx_dst)); 185 - len -= n; 186 166 187 - scatterwalk_unmap(dst); 188 - scatterwalk_advance(&walk, n); 189 - scatterwalk_done(&walk, SCATTERWALK_FROM_SG, len); 167 + scatterwalk_done_src(&walk, dst, n); 168 + len -= n; 190 169 } 191 170 /* update to_process */ 192 171 *src_len -= len;
-3
drivers/crypto/nx/nx.h
··· 189 189 190 190 extern struct nx_crypto_driver nx_driver; 191 191 192 - #define SCATTERWALK_TO_SG 1 193 - #define SCATTERWALK_FROM_SG 0 194 - 195 192 #endif