at v2.6.34 9.3 kB view raw
1/* 2 * Cryptographic API for algorithms (i.e., low-level API). 3 * 4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> 5 * 6 * This program is free software; you can redistribute it and/or modify it 7 * under the terms of the GNU General Public License as published by the Free 8 * Software Foundation; either version 2 of the License, or (at your option) 9 * any later version. 10 * 11 */ 12#ifndef _CRYPTO_ALGAPI_H 13#define _CRYPTO_ALGAPI_H 14 15#include <linux/crypto.h> 16#include <linux/list.h> 17#include <linux/kernel.h> 18 19struct module; 20struct rtattr; 21struct seq_file; 22 23struct crypto_type { 24 unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask); 25 unsigned int (*extsize)(struct crypto_alg *alg); 26 int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask); 27 int (*init_tfm)(struct crypto_tfm *tfm); 28 void (*show)(struct seq_file *m, struct crypto_alg *alg); 29 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask); 30 31 unsigned int type; 32 unsigned int maskclear; 33 unsigned int maskset; 34 unsigned int tfmsize; 35}; 36 37struct crypto_instance { 38 struct crypto_alg alg; 39 40 struct crypto_template *tmpl; 41 struct hlist_node list; 42 43 void *__ctx[] CRYPTO_MINALIGN_ATTR; 44}; 45 46struct crypto_template { 47 struct list_head list; 48 struct hlist_head instances; 49 struct module *module; 50 51 struct crypto_instance *(*alloc)(struct rtattr **tb); 52 void (*free)(struct crypto_instance *inst); 53 int (*create)(struct crypto_template *tmpl, struct rtattr **tb); 54 55 char name[CRYPTO_MAX_ALG_NAME]; 56}; 57 58struct crypto_spawn { 59 struct list_head list; 60 struct crypto_alg *alg; 61 struct crypto_instance *inst; 62 const struct crypto_type *frontend; 63 u32 mask; 64}; 65 66struct crypto_queue { 67 struct list_head list; 68 struct list_head *backlog; 69 70 unsigned int qlen; 71 unsigned int max_qlen; 72}; 73 74struct scatter_walk { 75 struct scatterlist *sg; 76 unsigned int offset; 77}; 78 79struct blkcipher_walk { 80 union { 81 struct { 82 struct page *page; 83 unsigned long offset; 84 } phys; 85 86 struct { 87 u8 *page; 88 u8 *addr; 89 } virt; 90 } src, dst; 91 92 struct scatter_walk in; 93 unsigned int nbytes; 94 95 struct scatter_walk out; 96 unsigned int total; 97 98 void *page; 99 u8 *buffer; 100 u8 *iv; 101 102 int flags; 103 unsigned int blocksize; 104}; 105 106extern const struct crypto_type crypto_ablkcipher_type; 107extern const struct crypto_type crypto_aead_type; 108extern const struct crypto_type crypto_blkcipher_type; 109 110void crypto_mod_put(struct crypto_alg *alg); 111 112int crypto_register_template(struct crypto_template *tmpl); 113void crypto_unregister_template(struct crypto_template *tmpl); 114struct crypto_template *crypto_lookup_template(const char *name); 115 116int crypto_register_instance(struct crypto_template *tmpl, 117 struct crypto_instance *inst); 118 119int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, 120 struct crypto_instance *inst, u32 mask); 121int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, 122 struct crypto_instance *inst, 123 const struct crypto_type *frontend); 124 125void crypto_drop_spawn(struct crypto_spawn *spawn); 126struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, 127 u32 mask); 128void *crypto_spawn_tfm2(struct crypto_spawn *spawn); 129 130static inline void crypto_set_spawn(struct crypto_spawn *spawn, 131 struct crypto_instance *inst) 132{ 133 spawn->inst = inst; 134} 135 136struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); 137int crypto_check_attr_type(struct rtattr **tb, u32 type); 138const char *crypto_attr_alg_name(struct rtattr *rta); 139struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, 140 const struct crypto_type *frontend, 141 u32 type, u32 mask); 142 143static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta, 144 u32 type, u32 mask) 145{ 146 return crypto_attr_alg2(rta, NULL, type, mask); 147} 148 149int crypto_attr_u32(struct rtattr *rta, u32 *num); 150void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, 151 unsigned int head); 152struct crypto_instance *crypto_alloc_instance(const char *name, 153 struct crypto_alg *alg); 154 155void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen); 156int crypto_enqueue_request(struct crypto_queue *queue, 157 struct crypto_async_request *request); 158void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset); 159struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); 160int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); 161 162/* These functions require the input/output to be aligned as u32. */ 163void crypto_inc(u8 *a, unsigned int size); 164void crypto_xor(u8 *dst, const u8 *src, unsigned int size); 165 166int blkcipher_walk_done(struct blkcipher_desc *desc, 167 struct blkcipher_walk *walk, int err); 168int blkcipher_walk_virt(struct blkcipher_desc *desc, 169 struct blkcipher_walk *walk); 170int blkcipher_walk_phys(struct blkcipher_desc *desc, 171 struct blkcipher_walk *walk); 172int blkcipher_walk_virt_block(struct blkcipher_desc *desc, 173 struct blkcipher_walk *walk, 174 unsigned int blocksize); 175 176static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm) 177{ 178 return PTR_ALIGN(crypto_tfm_ctx(tfm), 179 crypto_tfm_alg_alignmask(tfm) + 1); 180} 181 182static inline struct crypto_instance *crypto_tfm_alg_instance( 183 struct crypto_tfm *tfm) 184{ 185 return container_of(tfm->__crt_alg, struct crypto_instance, alg); 186} 187 188static inline void *crypto_instance_ctx(struct crypto_instance *inst) 189{ 190 return inst->__ctx; 191} 192 193static inline struct ablkcipher_alg *crypto_ablkcipher_alg( 194 struct crypto_ablkcipher *tfm) 195{ 196 return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher; 197} 198 199static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm) 200{ 201 return crypto_tfm_ctx(&tfm->base); 202} 203 204static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm) 205{ 206 return crypto_tfm_ctx_aligned(&tfm->base); 207} 208 209static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm) 210{ 211 return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead; 212} 213 214static inline void *crypto_aead_ctx(struct crypto_aead *tfm) 215{ 216 return crypto_tfm_ctx(&tfm->base); 217} 218 219static inline struct crypto_instance *crypto_aead_alg_instance( 220 struct crypto_aead *aead) 221{ 222 return crypto_tfm_alg_instance(&aead->base); 223} 224 225static inline struct crypto_blkcipher *crypto_spawn_blkcipher( 226 struct crypto_spawn *spawn) 227{ 228 u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; 229 u32 mask = CRYPTO_ALG_TYPE_MASK; 230 231 return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask)); 232} 233 234static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm) 235{ 236 return crypto_tfm_ctx(&tfm->base); 237} 238 239static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm) 240{ 241 return crypto_tfm_ctx_aligned(&tfm->base); 242} 243 244static inline struct crypto_cipher *crypto_spawn_cipher( 245 struct crypto_spawn *spawn) 246{ 247 u32 type = CRYPTO_ALG_TYPE_CIPHER; 248 u32 mask = CRYPTO_ALG_TYPE_MASK; 249 250 return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask)); 251} 252 253static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm) 254{ 255 return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher; 256} 257 258static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn) 259{ 260 u32 type = CRYPTO_ALG_TYPE_HASH; 261 u32 mask = CRYPTO_ALG_TYPE_HASH_MASK; 262 263 return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask)); 264} 265 266static inline void *crypto_hash_ctx(struct crypto_hash *tfm) 267{ 268 return crypto_tfm_ctx(&tfm->base); 269} 270 271static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm) 272{ 273 return crypto_tfm_ctx_aligned(&tfm->base); 274} 275 276static inline void blkcipher_walk_init(struct blkcipher_walk *walk, 277 struct scatterlist *dst, 278 struct scatterlist *src, 279 unsigned int nbytes) 280{ 281 walk->in.sg = src; 282 walk->out.sg = dst; 283 walk->total = nbytes; 284} 285 286static inline struct crypto_async_request *crypto_get_backlog( 287 struct crypto_queue *queue) 288{ 289 return queue->backlog == &queue->list ? NULL : 290 container_of(queue->backlog, struct crypto_async_request, list); 291} 292 293static inline int ablkcipher_enqueue_request(struct crypto_queue *queue, 294 struct ablkcipher_request *request) 295{ 296 return crypto_enqueue_request(queue, &request->base); 297} 298 299static inline struct ablkcipher_request *ablkcipher_dequeue_request( 300 struct crypto_queue *queue) 301{ 302 return ablkcipher_request_cast(crypto_dequeue_request(queue)); 303} 304 305static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req) 306{ 307 return req->__ctx; 308} 309 310static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue, 311 struct crypto_ablkcipher *tfm) 312{ 313 return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm)); 314} 315 316static inline void *aead_request_ctx(struct aead_request *req) 317{ 318 return req->__ctx; 319} 320 321static inline void aead_request_complete(struct aead_request *req, int err) 322{ 323 req->base.complete(&req->base, err); 324} 325 326static inline u32 aead_request_flags(struct aead_request *req) 327{ 328 return req->base.flags; 329} 330 331static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb, 332 u32 type, u32 mask) 333{ 334 return crypto_attr_alg(tb[1], type, mask); 335} 336 337/* 338 * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms. 339 * Otherwise returns zero. 340 */ 341static inline int crypto_requires_sync(u32 type, u32 mask) 342{ 343 return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC; 344} 345 346#endif /* _CRYPTO_ALGAPI_H */ 347