Linux kernel mirror (for testing)
git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel
os
linux
1/*
2 * Shared glue code for 128bit block ciphers
3 */
4
5#ifndef _CRYPTO_GLUE_HELPER_H
6#define _CRYPTO_GLUE_HELPER_H
7
8#include <crypto/internal/skcipher.h>
9#include <linux/kernel.h>
10#include <asm/fpu/api.h>
11#include <crypto/b128ops.h>
12
13typedef void (*common_glue_func_t)(void *ctx, u8 *dst, const u8 *src);
14typedef void (*common_glue_cbc_func_t)(void *ctx, u128 *dst, const u128 *src);
15typedef void (*common_glue_ctr_func_t)(void *ctx, u128 *dst, const u128 *src,
16 le128 *iv);
17typedef void (*common_glue_xts_func_t)(void *ctx, u128 *dst, const u128 *src,
18 le128 *iv);
19
20#define GLUE_FUNC_CAST(fn) ((common_glue_func_t)(fn))
21#define GLUE_CBC_FUNC_CAST(fn) ((common_glue_cbc_func_t)(fn))
22#define GLUE_CTR_FUNC_CAST(fn) ((common_glue_ctr_func_t)(fn))
23#define GLUE_XTS_FUNC_CAST(fn) ((common_glue_xts_func_t)(fn))
24
25struct common_glue_func_entry {
26 unsigned int num_blocks; /* number of blocks that @fn will process */
27 union {
28 common_glue_func_t ecb;
29 common_glue_cbc_func_t cbc;
30 common_glue_ctr_func_t ctr;
31 common_glue_xts_func_t xts;
32 } fn_u;
33};
34
35struct common_glue_ctx {
36 unsigned int num_funcs;
37 int fpu_blocks_limit; /* -1 means fpu not needed at all */
38
39 /*
40 * First funcs entry must have largest num_blocks and last funcs entry
41 * must have num_blocks == 1!
42 */
43 struct common_glue_func_entry funcs[];
44};
45
46static inline bool glue_fpu_begin(unsigned int bsize, int fpu_blocks_limit,
47 struct blkcipher_desc *desc,
48 bool fpu_enabled, unsigned int nbytes)
49{
50 if (likely(fpu_blocks_limit < 0))
51 return false;
52
53 if (fpu_enabled)
54 return true;
55
56 /*
57 * Vector-registers are only used when chunk to be processed is large
58 * enough, so do not enable FPU until it is necessary.
59 */
60 if (nbytes < bsize * (unsigned int)fpu_blocks_limit)
61 return false;
62
63 if (desc) {
64 /* prevent sleeping if FPU is in use */
65 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
66 }
67
68 kernel_fpu_begin();
69 return true;
70}
71
72static inline bool glue_skwalk_fpu_begin(unsigned int bsize,
73 int fpu_blocks_limit,
74 struct skcipher_walk *walk,
75 bool fpu_enabled, unsigned int nbytes)
76{
77 if (likely(fpu_blocks_limit < 0))
78 return false;
79
80 if (fpu_enabled)
81 return true;
82
83 /*
84 * Vector-registers are only used when chunk to be processed is large
85 * enough, so do not enable FPU until it is necessary.
86 */
87 if (nbytes < bsize * (unsigned int)fpu_blocks_limit)
88 return false;
89
90 /* prevent sleeping if FPU is in use */
91 skcipher_walk_atomise(walk);
92
93 kernel_fpu_begin();
94 return true;
95}
96
97static inline void glue_fpu_end(bool fpu_enabled)
98{
99 if (fpu_enabled)
100 kernel_fpu_end();
101}
102
103static inline void le128_to_be128(be128 *dst, const le128 *src)
104{
105 dst->a = cpu_to_be64(le64_to_cpu(src->a));
106 dst->b = cpu_to_be64(le64_to_cpu(src->b));
107}
108
109static inline void be128_to_le128(le128 *dst, const be128 *src)
110{
111 dst->a = cpu_to_le64(be64_to_cpu(src->a));
112 dst->b = cpu_to_le64(be64_to_cpu(src->b));
113}
114
115static inline void le128_inc(le128 *i)
116{
117 u64 a = le64_to_cpu(i->a);
118 u64 b = le64_to_cpu(i->b);
119
120 b++;
121 if (!b)
122 a++;
123
124 i->a = cpu_to_le64(a);
125 i->b = cpu_to_le64(b);
126}
127
128extern int glue_ecb_crypt_128bit(const struct common_glue_ctx *gctx,
129 struct blkcipher_desc *desc,
130 struct scatterlist *dst,
131 struct scatterlist *src, unsigned int nbytes);
132
133extern int glue_cbc_encrypt_128bit(const common_glue_func_t fn,
134 struct blkcipher_desc *desc,
135 struct scatterlist *dst,
136 struct scatterlist *src,
137 unsigned int nbytes);
138
139extern int glue_cbc_decrypt_128bit(const struct common_glue_ctx *gctx,
140 struct blkcipher_desc *desc,
141 struct scatterlist *dst,
142 struct scatterlist *src,
143 unsigned int nbytes);
144
145extern int glue_ctr_crypt_128bit(const struct common_glue_ctx *gctx,
146 struct blkcipher_desc *desc,
147 struct scatterlist *dst,
148 struct scatterlist *src, unsigned int nbytes);
149
150extern int glue_xts_crypt_128bit(const struct common_glue_ctx *gctx,
151 struct blkcipher_desc *desc,
152 struct scatterlist *dst,
153 struct scatterlist *src, unsigned int nbytes,
154 common_glue_func_t tweak_fn, void *tweak_ctx,
155 void *crypt_ctx);
156
157extern int glue_xts_crypt_128bit(const struct common_glue_ctx *gctx,
158 struct blkcipher_desc *desc,
159 struct scatterlist *dst,
160 struct scatterlist *src, unsigned int nbytes,
161 common_glue_func_t tweak_fn, void *tweak_ctx,
162 void *crypt_ctx);
163
164extern int glue_xts_req_128bit(const struct common_glue_ctx *gctx,
165 struct skcipher_request *req,
166 common_glue_func_t tweak_fn, void *tweak_ctx,
167 void *crypt_ctx);
168
169extern void glue_xts_crypt_128bit_one(void *ctx, u128 *dst, const u128 *src,
170 le128 *iv, common_glue_func_t fn);
171
172#endif /* _CRYPTO_GLUE_HELPER_H */