Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: atmel-aes - add support to the XTS mode

This patch adds the xts(aes) algorithm, which is supported from
hardware version 0x500 and above (sama5d2x).

Signed-off-by: Cyrille Pitchen <cyrille.pitchen@atmel.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Cyrille Pitchen and committed by
Herbert Xu
d52db518 f709dc86

+183 -6
+4
drivers/crypto/atmel-aes-regs.h
··· 28 28 #define AES_MR_OPMOD_CFB (0x3 << 12) 29 29 #define AES_MR_OPMOD_CTR (0x4 << 12) 30 30 #define AES_MR_OPMOD_GCM (0x5 << 12) 31 + #define AES_MR_OPMOD_XTS (0x6 << 12) 31 32 #define AES_MR_LOD (0x1 << 15) 32 33 #define AES_MR_CFBS_MASK (0x7 << 16) 33 34 #define AES_MR_CFBS_128b (0x0 << 16) ··· 67 66 #define AES_TAGR(x) (0x88 + ((x) * 0x04)) 68 67 #define AES_CTRR 0x98 69 68 #define AES_GCMHR(x) (0x9c + ((x) * 0x04)) 69 + 70 + #define AES_TWR(x) (0xc0 + ((x) * 0x04)) 71 + #define AES_ALPHAR(x) (0xd0 + ((x) * 0x04)) 70 72 71 73 #define AES_HW_VERSION 0xFC 72 74
+179 -6
drivers/crypto/atmel-aes.c
··· 36 36 #include <crypto/scatterwalk.h> 37 37 #include <crypto/algapi.h> 38 38 #include <crypto/aes.h> 39 + #include <crypto/xts.h> 39 40 #include <crypto/internal/aead.h> 40 41 #include <linux/platform_data/crypto-atmel.h> 41 42 #include <dt-bindings/dma/at91.h> ··· 69 68 #define AES_FLAGS_CFB8 (AES_MR_OPMOD_CFB | AES_MR_CFBS_8b) 70 69 #define AES_FLAGS_CTR AES_MR_OPMOD_CTR 71 70 #define AES_FLAGS_GCM AES_MR_OPMOD_GCM 71 + #define AES_FLAGS_XTS AES_MR_OPMOD_XTS 72 72 73 73 #define AES_FLAGS_MODE_MASK (AES_FLAGS_OPMODE_MASK | \ 74 74 AES_FLAGS_ENCRYPT | \ ··· 91 89 bool has_cfb64; 92 90 bool has_ctr32; 93 91 bool has_gcm; 92 + bool has_xts; 94 93 u32 max_burst_size; 95 94 }; 96 95 ··· 136 133 const u32 *ghash_in; 137 134 u32 *ghash_out; 138 135 atmel_aes_fn_t ghash_resume; 136 + }; 137 + 138 + struct atmel_aes_xts_ctx { 139 + struct atmel_aes_base_ctx base; 140 + 141 + u32 key2[AES_KEYSIZE_256 / sizeof(u32)]; 139 142 }; 140 143 141 144 struct atmel_aes_reqctx { ··· 289 280 case AES_GCMHR(2): 290 281 case AES_GCMHR(3): 291 282 snprintf(tmp, sz, "GCMHR[%u]", (offset - AES_GCMHR(0)) >> 2); 283 + break; 284 + 285 + case AES_TWR(0): 286 + case AES_TWR(1): 287 + case AES_TWR(2): 288 + case AES_TWR(3): 289 + snprintf(tmp, sz, "TWR[%u]", (offset - AES_TWR(0)) >> 2); 290 + break; 291 + 292 + case AES_ALPHAR(0): 293 + case AES_ALPHAR(1): 294 + case AES_ALPHAR(2): 295 + case AES_ALPHAR(3): 296 + snprintf(tmp, sz, "ALPHAR[%u]", (offset - AES_ALPHAR(0)) >> 2); 292 297 break; 293 298 294 299 default: ··· 476 453 return err; 477 454 } 478 455 479 - static void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma, 480 - const u32 *iv) 456 + static void atmel_aes_write_ctrl_key(struct atmel_aes_dev *dd, bool use_dma, 457 + const u32 *iv, const u32 *key, int keylen) 481 458 { 482 459 u32 valmr = 0; 483 460 484 461 /* MR register must be set before IV registers */ 485 - if (dd->ctx->keylen == AES_KEYSIZE_128) 462 + if (keylen == AES_KEYSIZE_128) 486 463 valmr |= AES_MR_KEYSIZE_128; 487 - else if (dd->ctx->keylen == AES_KEYSIZE_192) 464 + else if (keylen == AES_KEYSIZE_192) 488 465 valmr |= AES_MR_KEYSIZE_192; 489 466 else 490 467 valmr |= AES_MR_KEYSIZE_256; ··· 501 478 502 479 atmel_aes_write(dd, AES_MR, valmr); 503 480 504 - atmel_aes_write_n(dd, AES_KEYWR(0), dd->ctx->key, 505 - SIZE_IN_WORDS(dd->ctx->keylen)); 481 + atmel_aes_write_n(dd, AES_KEYWR(0), key, SIZE_IN_WORDS(keylen)); 506 482 507 483 if (iv && (valmr & AES_MR_OPMOD_MASK) != AES_MR_OPMOD_ECB) 508 484 atmel_aes_write_block(dd, AES_IVR(0), iv); 509 485 } 510 486 487 + static inline void atmel_aes_write_ctrl(struct atmel_aes_dev *dd, bool use_dma, 488 + const u32 *iv) 489 + 490 + { 491 + atmel_aes_write_ctrl_key(dd, use_dma, iv, 492 + dd->ctx->key, dd->ctx->keylen); 493 + } 511 494 512 495 /* CPU transfer */ 513 496 ··· 1798 1769 }; 1799 1770 1800 1771 1772 + /* xts functions */ 1773 + 1774 + static inline struct atmel_aes_xts_ctx * 1775 + atmel_aes_xts_ctx_cast(struct atmel_aes_base_ctx *ctx) 1776 + { 1777 + return container_of(ctx, struct atmel_aes_xts_ctx, base); 1778 + } 1779 + 1780 + static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd); 1781 + 1782 + static int atmel_aes_xts_start(struct atmel_aes_dev *dd) 1783 + { 1784 + struct atmel_aes_xts_ctx *ctx = atmel_aes_xts_ctx_cast(dd->ctx); 1785 + struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); 1786 + struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); 1787 + unsigned long flags; 1788 + int err; 1789 + 1790 + atmel_aes_set_mode(dd, rctx); 1791 + 1792 + err = atmel_aes_hw_init(dd); 1793 + if (err) 1794 + return atmel_aes_complete(dd, err); 1795 + 1796 + /* Compute the tweak value from req->info with ecb(aes). */ 1797 + flags = dd->flags; 1798 + dd->flags &= ~AES_FLAGS_MODE_MASK; 1799 + dd->flags |= (AES_FLAGS_ECB | AES_FLAGS_ENCRYPT); 1800 + atmel_aes_write_ctrl_key(dd, false, NULL, 1801 + ctx->key2, ctx->base.keylen); 1802 + dd->flags = flags; 1803 + 1804 + atmel_aes_write_block(dd, AES_IDATAR(0), req->info); 1805 + return atmel_aes_wait_for_data_ready(dd, atmel_aes_xts_process_data); 1806 + } 1807 + 1808 + static int atmel_aes_xts_process_data(struct atmel_aes_dev *dd) 1809 + { 1810 + struct ablkcipher_request *req = ablkcipher_request_cast(dd->areq); 1811 + bool use_dma = (req->nbytes >= ATMEL_AES_DMA_THRESHOLD); 1812 + u32 tweak[AES_BLOCK_SIZE / sizeof(u32)]; 1813 + static const u32 one[AES_BLOCK_SIZE / sizeof(u32)] = {cpu_to_le32(1), }; 1814 + u8 *tweak_bytes = (u8 *)tweak; 1815 + int i; 1816 + 1817 + /* Read the computed ciphered tweak value. */ 1818 + atmel_aes_read_block(dd, AES_ODATAR(0), tweak); 1819 + /* 1820 + * Hardware quirk: 1821 + * the order of the ciphered tweak bytes need to be reversed before 1822 + * writing them into the ODATARx registers. 1823 + */ 1824 + for (i = 0; i < AES_BLOCK_SIZE/2; ++i) { 1825 + u8 tmp = tweak_bytes[AES_BLOCK_SIZE - 1 - i]; 1826 + 1827 + tweak_bytes[AES_BLOCK_SIZE - 1 - i] = tweak_bytes[i]; 1828 + tweak_bytes[i] = tmp; 1829 + } 1830 + 1831 + /* Process the data. */ 1832 + atmel_aes_write_ctrl(dd, use_dma, NULL); 1833 + atmel_aes_write_block(dd, AES_TWR(0), tweak); 1834 + atmel_aes_write_block(dd, AES_ALPHAR(0), one); 1835 + if (use_dma) 1836 + return atmel_aes_dma_start(dd, req->src, req->dst, req->nbytes, 1837 + atmel_aes_transfer_complete); 1838 + 1839 + return atmel_aes_cpu_start(dd, req->src, req->dst, req->nbytes, 1840 + atmel_aes_transfer_complete); 1841 + } 1842 + 1843 + static int atmel_aes_xts_setkey(struct crypto_ablkcipher *tfm, const u8 *key, 1844 + unsigned int keylen) 1845 + { 1846 + struct atmel_aes_xts_ctx *ctx = crypto_ablkcipher_ctx(tfm); 1847 + int err; 1848 + 1849 + err = xts_check_key(crypto_ablkcipher_tfm(tfm), key, keylen); 1850 + if (err) 1851 + return err; 1852 + 1853 + memcpy(ctx->base.key, key, keylen/2); 1854 + memcpy(ctx->key2, key + keylen/2, keylen/2); 1855 + ctx->base.keylen = keylen/2; 1856 + 1857 + return 0; 1858 + } 1859 + 1860 + static int atmel_aes_xts_encrypt(struct ablkcipher_request *req) 1861 + { 1862 + return atmel_aes_crypt(req, AES_FLAGS_XTS | AES_FLAGS_ENCRYPT); 1863 + } 1864 + 1865 + static int atmel_aes_xts_decrypt(struct ablkcipher_request *req) 1866 + { 1867 + return atmel_aes_crypt(req, AES_FLAGS_XTS); 1868 + } 1869 + 1870 + static int atmel_aes_xts_cra_init(struct crypto_tfm *tfm) 1871 + { 1872 + struct atmel_aes_xts_ctx *ctx = crypto_tfm_ctx(tfm); 1873 + 1874 + tfm->crt_ablkcipher.reqsize = sizeof(struct atmel_aes_reqctx); 1875 + ctx->base.start = atmel_aes_xts_start; 1876 + 1877 + return 0; 1878 + } 1879 + 1880 + static struct crypto_alg aes_xts_alg = { 1881 + .cra_name = "xts(aes)", 1882 + .cra_driver_name = "atmel-xts-aes", 1883 + .cra_priority = ATMEL_AES_PRIORITY, 1884 + .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC, 1885 + .cra_blocksize = AES_BLOCK_SIZE, 1886 + .cra_ctxsize = sizeof(struct atmel_aes_xts_ctx), 1887 + .cra_alignmask = 0xf, 1888 + .cra_type = &crypto_ablkcipher_type, 1889 + .cra_module = THIS_MODULE, 1890 + .cra_init = atmel_aes_xts_cra_init, 1891 + .cra_exit = atmel_aes_cra_exit, 1892 + .cra_u.ablkcipher = { 1893 + .min_keysize = 2 * AES_MIN_KEY_SIZE, 1894 + .max_keysize = 2 * AES_MAX_KEY_SIZE, 1895 + .ivsize = AES_BLOCK_SIZE, 1896 + .setkey = atmel_aes_xts_setkey, 1897 + .encrypt = atmel_aes_xts_encrypt, 1898 + .decrypt = atmel_aes_xts_decrypt, 1899 + } 1900 + }; 1901 + 1902 + 1801 1903 /* Probe functions */ 1802 1904 1803 1905 static int atmel_aes_buff_init(struct atmel_aes_dev *dd) ··· 2037 1877 { 2038 1878 int i; 2039 1879 1880 + if (dd->caps.has_xts) 1881 + crypto_unregister_alg(&aes_xts_alg); 1882 + 2040 1883 if (dd->caps.has_gcm) 2041 1884 crypto_unregister_aead(&aes_gcm_alg); 2042 1885 ··· 2072 1909 goto err_aes_gcm_alg; 2073 1910 } 2074 1911 1912 + if (dd->caps.has_xts) { 1913 + err = crypto_register_alg(&aes_xts_alg); 1914 + if (err) 1915 + goto err_aes_xts_alg; 1916 + } 1917 + 2075 1918 return 0; 2076 1919 1920 + err_aes_xts_alg: 1921 + crypto_unregister_aead(&aes_gcm_alg); 2077 1922 err_aes_gcm_alg: 2078 1923 crypto_unregister_alg(&aes_cfb64_alg); 2079 1924 err_aes_cfb64_alg: ··· 2099 1928 dd->caps.has_cfb64 = 0; 2100 1929 dd->caps.has_ctr32 = 0; 2101 1930 dd->caps.has_gcm = 0; 1931 + dd->caps.has_xts = 0; 2102 1932 dd->caps.max_burst_size = 1; 2103 1933 2104 1934 /* keep only major version number */ ··· 2109 1937 dd->caps.has_cfb64 = 1; 2110 1938 dd->caps.has_ctr32 = 1; 2111 1939 dd->caps.has_gcm = 1; 1940 + dd->caps.has_xts = 1; 2112 1941 dd->caps.max_burst_size = 4; 2113 1942 break; 2114 1943 case 0x200: