Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: atmel-aes - Allocate aes dev at tfm init time

Allocate the atmel_aes_dev data at tfm init time, and not for
each crypt request.
There's a single AES IP per SoC, clarify that in the code.

Signed-off-by: Tudor Ambarus <tudor.ambarus@microchip.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Tudor Ambarus and committed by
Herbert Xu
ec2088b6 bf2db8e7

+43 -33
+43 -33
drivers/crypto/atmel-aes.c
··· 420 420 return len ? block_size - len : 0; 421 421 } 422 422 423 - static struct atmel_aes_dev *atmel_aes_find_dev(struct atmel_aes_base_ctx *ctx) 423 + static struct atmel_aes_dev *atmel_aes_dev_alloc(struct atmel_aes_base_ctx *ctx) 424 424 { 425 - struct atmel_aes_dev *aes_dd = NULL; 426 - struct atmel_aes_dev *tmp; 425 + struct atmel_aes_dev *aes_dd; 427 426 428 427 spin_lock_bh(&atmel_aes.lock); 429 - if (!ctx->dd) { 430 - list_for_each_entry(tmp, &atmel_aes.dev_list, list) { 431 - aes_dd = tmp; 432 - break; 433 - } 434 - ctx->dd = aes_dd; 435 - } else { 436 - aes_dd = ctx->dd; 437 - } 438 - 428 + /* One AES IP per SoC. */ 429 + aes_dd = list_first_entry_or_null(&atmel_aes.dev_list, 430 + struct atmel_aes_dev, list); 439 431 spin_unlock_bh(&atmel_aes.lock); 440 - 441 432 return aes_dd; 442 433 } 443 434 ··· 960 969 ctx = crypto_tfm_ctx(areq->tfm); 961 970 962 971 dd->areq = areq; 963 - dd->ctx = ctx; 964 972 start_async = (areq != new_areq); 965 973 dd->is_async = start_async; 966 974 ··· 1096 1106 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); 1097 1107 struct atmel_aes_base_ctx *ctx = crypto_skcipher_ctx(skcipher); 1098 1108 struct atmel_aes_reqctx *rctx; 1099 - struct atmel_aes_dev *dd; 1100 1109 u32 opmode = mode & AES_FLAGS_OPMODE_MASK; 1101 1110 1102 1111 if (opmode == AES_FLAGS_XTS) { ··· 1141 1152 } 1142 1153 ctx->is_aead = false; 1143 1154 1144 - dd = atmel_aes_find_dev(ctx); 1145 - if (!dd) 1146 - return -ENODEV; 1147 - 1148 1155 rctx = skcipher_request_ctx(req); 1149 1156 rctx->mode = mode; 1150 1157 ··· 1154 1169 ivsize, 0); 1155 1170 } 1156 1171 1157 - return atmel_aes_handle_queue(dd, &req->base); 1172 + return atmel_aes_handle_queue(ctx->dd, &req->base); 1158 1173 } 1159 1174 1160 1175 static int atmel_aes_setkey(struct crypto_skcipher *tfm, const u8 *key, ··· 1266 1281 static int atmel_aes_init_tfm(struct crypto_skcipher *tfm) 1267 1282 { 1268 1283 struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 1284 + struct atmel_aes_dev *dd; 1285 + 1286 + dd = atmel_aes_dev_alloc(&ctx->base); 1287 + if (!dd) 1288 + return -ENODEV; 1269 1289 1270 1290 crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx)); 1291 + ctx->base.dd = dd; 1292 + ctx->base.dd->ctx = &ctx->base; 1271 1293 ctx->base.start = atmel_aes_start; 1272 1294 1273 1295 return 0; ··· 1283 1291 static int atmel_aes_ctr_init_tfm(struct crypto_skcipher *tfm) 1284 1292 { 1285 1293 struct atmel_aes_ctx *ctx = crypto_skcipher_ctx(tfm); 1294 + struct atmel_aes_dev *dd; 1295 + 1296 + dd = atmel_aes_dev_alloc(&ctx->base); 1297 + if (!dd) 1298 + return -ENODEV; 1286 1299 1287 1300 crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx)); 1301 + ctx->base.dd = dd; 1302 + ctx->base.dd->ctx = &ctx->base; 1288 1303 ctx->base.start = atmel_aes_ctr_start; 1289 1304 1290 1305 return 0; ··· 1729 1730 { 1730 1731 struct atmel_aes_base_ctx *ctx; 1731 1732 struct atmel_aes_reqctx *rctx; 1732 - struct atmel_aes_dev *dd; 1733 1733 1734 1734 ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 1735 1735 ctx->block_size = AES_BLOCK_SIZE; 1736 1736 ctx->is_aead = true; 1737 1737 1738 - dd = atmel_aes_find_dev(ctx); 1739 - if (!dd) 1740 - return -ENODEV; 1741 - 1742 1738 rctx = aead_request_ctx(req); 1743 1739 rctx->mode = AES_FLAGS_GCM | mode; 1744 1740 1745 - return atmel_aes_handle_queue(dd, &req->base); 1741 + return atmel_aes_handle_queue(ctx->dd, &req->base); 1746 1742 } 1747 1743 1748 1744 static int atmel_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key, ··· 1775 1781 static int atmel_aes_gcm_init(struct crypto_aead *tfm) 1776 1782 { 1777 1783 struct atmel_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm); 1784 + struct atmel_aes_dev *dd; 1785 + 1786 + dd = atmel_aes_dev_alloc(&ctx->base); 1787 + if (!dd) 1788 + return -ENODEV; 1778 1789 1779 1790 crypto_aead_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx)); 1791 + ctx->base.dd = dd; 1792 + ctx->base.dd->ctx = &ctx->base; 1780 1793 ctx->base.start = atmel_aes_gcm_start; 1781 1794 1782 1795 return 0; ··· 1916 1915 static int atmel_aes_xts_init_tfm(struct crypto_skcipher *tfm) 1917 1916 { 1918 1917 struct atmel_aes_xts_ctx *ctx = crypto_skcipher_ctx(tfm); 1918 + struct atmel_aes_dev *dd; 1919 1919 const char *tfm_name = crypto_tfm_alg_name(&tfm->base); 1920 + 1921 + dd = atmel_aes_dev_alloc(&ctx->base); 1922 + if (!dd) 1923 + return -ENODEV; 1920 1924 1921 1925 ctx->fallback_tfm = crypto_alloc_skcipher(tfm_name, 0, 1922 1926 CRYPTO_ALG_NEED_FALLBACK); ··· 1930 1924 1931 1925 crypto_skcipher_set_reqsize(tfm, sizeof(struct atmel_aes_reqctx) + 1932 1926 crypto_skcipher_reqsize(ctx->fallback_tfm)); 1927 + ctx->base.dd = dd; 1928 + ctx->base.dd->ctx = &ctx->base; 1933 1929 ctx->base.start = atmel_aes_xts_start; 1934 1930 1935 1931 return 0; ··· 2145 2137 { 2146 2138 struct atmel_aes_authenc_ctx *ctx = crypto_aead_ctx(tfm); 2147 2139 unsigned int auth_reqsize = atmel_sha_authenc_get_reqsize(); 2140 + struct atmel_aes_dev *dd; 2141 + 2142 + dd = atmel_aes_dev_alloc(&ctx->base); 2143 + if (!dd) 2144 + return -ENODEV; 2148 2145 2149 2146 ctx->auth = atmel_sha_authenc_spawn(auth_mode); 2150 2147 if (IS_ERR(ctx->auth)) ··· 2157 2144 2158 2145 crypto_aead_set_reqsize(tfm, (sizeof(struct atmel_aes_authenc_reqctx) + 2159 2146 auth_reqsize)); 2147 + ctx->base.dd = dd; 2148 + ctx->base.dd->ctx = &ctx->base; 2160 2149 ctx->base.start = atmel_aes_authenc_start; 2161 2150 2162 2151 return 0; ··· 2204 2189 struct atmel_aes_base_ctx *ctx = crypto_aead_ctx(tfm); 2205 2190 u32 authsize = crypto_aead_authsize(tfm); 2206 2191 bool enc = (mode & AES_FLAGS_ENCRYPT); 2207 - struct atmel_aes_dev *dd; 2208 2192 2209 2193 /* Compute text length. */ 2210 2194 if (!enc && req->cryptlen < authsize) ··· 2222 2208 ctx->block_size = AES_BLOCK_SIZE; 2223 2209 ctx->is_aead = true; 2224 2210 2225 - dd = atmel_aes_find_dev(ctx); 2226 - if (!dd) 2227 - return -ENODEV; 2228 - 2229 - return atmel_aes_handle_queue(dd, &req->base); 2211 + return atmel_aes_handle_queue(ctx->dd, &req->base); 2230 2212 } 2231 2213 2232 2214 static int atmel_aes_authenc_cbc_aes_encrypt(struct aead_request *req)