Root/
1 | /* |
2 | * Cryptographic API for algorithms (i.e., low-level API). |
3 | * |
4 | * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> |
5 | * |
6 | * This program is free software; you can redistribute it and/or modify it |
7 | * under the terms of the GNU General Public License as published by the Free |
8 | * Software Foundation; either version 2 of the License, or (at your option) |
9 | * any later version. |
10 | * |
11 | */ |
12 | #ifndef _CRYPTO_ALGAPI_H |
13 | #define _CRYPTO_ALGAPI_H |
14 | |
15 | #include <linux/crypto.h> |
16 | #include <linux/list.h> |
17 | #include <linux/kernel.h> |
18 | |
19 | struct module; |
20 | struct rtattr; |
21 | struct seq_file; |
22 | |
23 | struct crypto_type { |
24 | unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask); |
25 | unsigned int (*extsize)(struct crypto_alg *alg); |
26 | int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask); |
27 | int (*init_tfm)(struct crypto_tfm *tfm); |
28 | void (*show)(struct seq_file *m, struct crypto_alg *alg); |
29 | struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask); |
30 | |
31 | unsigned int type; |
32 | unsigned int maskclear; |
33 | unsigned int maskset; |
34 | unsigned int tfmsize; |
35 | }; |
36 | |
37 | struct crypto_instance { |
38 | struct crypto_alg alg; |
39 | |
40 | struct crypto_template *tmpl; |
41 | struct hlist_node list; |
42 | |
43 | void *__ctx[] CRYPTO_MINALIGN_ATTR; |
44 | }; |
45 | |
46 | struct crypto_template { |
47 | struct list_head list; |
48 | struct hlist_head instances; |
49 | struct module *module; |
50 | |
51 | struct crypto_instance *(*alloc)(struct rtattr **tb); |
52 | void (*free)(struct crypto_instance *inst); |
53 | int (*create)(struct crypto_template *tmpl, struct rtattr **tb); |
54 | |
55 | char name[CRYPTO_MAX_ALG_NAME]; |
56 | }; |
57 | |
58 | struct crypto_spawn { |
59 | struct list_head list; |
60 | struct crypto_alg *alg; |
61 | struct crypto_instance *inst; |
62 | const struct crypto_type *frontend; |
63 | u32 mask; |
64 | }; |
65 | |
66 | struct crypto_queue { |
67 | struct list_head list; |
68 | struct list_head *backlog; |
69 | |
70 | unsigned int qlen; |
71 | unsigned int max_qlen; |
72 | }; |
73 | |
74 | struct scatter_walk { |
75 | struct scatterlist *sg; |
76 | unsigned int offset; |
77 | }; |
78 | |
79 | struct blkcipher_walk { |
80 | union { |
81 | struct { |
82 | struct page *page; |
83 | unsigned long offset; |
84 | } phys; |
85 | |
86 | struct { |
87 | u8 *page; |
88 | u8 *addr; |
89 | } virt; |
90 | } src, dst; |
91 | |
92 | struct scatter_walk in; |
93 | unsigned int nbytes; |
94 | |
95 | struct scatter_walk out; |
96 | unsigned int total; |
97 | |
98 | void *page; |
99 | u8 *buffer; |
100 | u8 *iv; |
101 | |
102 | int flags; |
103 | unsigned int blocksize; |
104 | }; |
105 | |
106 | struct ablkcipher_walk { |
107 | struct { |
108 | struct page *page; |
109 | unsigned int offset; |
110 | } src, dst; |
111 | |
112 | struct scatter_walk in; |
113 | unsigned int nbytes; |
114 | struct scatter_walk out; |
115 | unsigned int total; |
116 | struct list_head buffers; |
117 | u8 *iv_buffer; |
118 | u8 *iv; |
119 | int flags; |
120 | unsigned int blocksize; |
121 | }; |
122 | |
123 | extern const struct crypto_type crypto_ablkcipher_type; |
124 | extern const struct crypto_type crypto_aead_type; |
125 | extern const struct crypto_type crypto_blkcipher_type; |
126 | |
127 | void crypto_mod_put(struct crypto_alg *alg); |
128 | |
129 | int crypto_register_template(struct crypto_template *tmpl); |
130 | void crypto_unregister_template(struct crypto_template *tmpl); |
131 | struct crypto_template *crypto_lookup_template(const char *name); |
132 | |
133 | int crypto_register_instance(struct crypto_template *tmpl, |
134 | struct crypto_instance *inst); |
135 | |
136 | int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, |
137 | struct crypto_instance *inst, u32 mask); |
138 | int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, |
139 | struct crypto_instance *inst, |
140 | const struct crypto_type *frontend); |
141 | |
142 | void crypto_drop_spawn(struct crypto_spawn *spawn); |
143 | struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, |
144 | u32 mask); |
145 | void *crypto_spawn_tfm2(struct crypto_spawn *spawn); |
146 | |
147 | static inline void crypto_set_spawn(struct crypto_spawn *spawn, |
148 | struct crypto_instance *inst) |
149 | { |
150 | spawn->inst = inst; |
151 | } |
152 | |
153 | struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); |
154 | int crypto_check_attr_type(struct rtattr **tb, u32 type); |
155 | const char *crypto_attr_alg_name(struct rtattr *rta); |
156 | struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, |
157 | const struct crypto_type *frontend, |
158 | u32 type, u32 mask); |
159 | |
160 | static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta, |
161 | u32 type, u32 mask) |
162 | { |
163 | return crypto_attr_alg2(rta, NULL, type, mask); |
164 | } |
165 | |
166 | int crypto_attr_u32(struct rtattr *rta, u32 *num); |
167 | void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, |
168 | unsigned int head); |
169 | struct crypto_instance *crypto_alloc_instance(const char *name, |
170 | struct crypto_alg *alg); |
171 | |
172 | void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen); |
173 | int crypto_enqueue_request(struct crypto_queue *queue, |
174 | struct crypto_async_request *request); |
175 | void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset); |
176 | struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); |
177 | int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); |
178 | |
179 | /* These functions require the input/output to be aligned as u32. */ |
180 | void crypto_inc(u8 *a, unsigned int size); |
181 | void crypto_xor(u8 *dst, const u8 *src, unsigned int size); |
182 | |
183 | int blkcipher_walk_done(struct blkcipher_desc *desc, |
184 | struct blkcipher_walk *walk, int err); |
185 | int blkcipher_walk_virt(struct blkcipher_desc *desc, |
186 | struct blkcipher_walk *walk); |
187 | int blkcipher_walk_phys(struct blkcipher_desc *desc, |
188 | struct blkcipher_walk *walk); |
189 | int blkcipher_walk_virt_block(struct blkcipher_desc *desc, |
190 | struct blkcipher_walk *walk, |
191 | unsigned int blocksize); |
192 | |
193 | int ablkcipher_walk_done(struct ablkcipher_request *req, |
194 | struct ablkcipher_walk *walk, int err); |
195 | int ablkcipher_walk_phys(struct ablkcipher_request *req, |
196 | struct ablkcipher_walk *walk); |
197 | void __ablkcipher_walk_complete(struct ablkcipher_walk *walk); |
198 | |
199 | static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm) |
200 | { |
201 | return PTR_ALIGN(crypto_tfm_ctx(tfm), |
202 | crypto_tfm_alg_alignmask(tfm) + 1); |
203 | } |
204 | |
205 | static inline struct crypto_instance *crypto_tfm_alg_instance( |
206 | struct crypto_tfm *tfm) |
207 | { |
208 | return container_of(tfm->__crt_alg, struct crypto_instance, alg); |
209 | } |
210 | |
211 | static inline void *crypto_instance_ctx(struct crypto_instance *inst) |
212 | { |
213 | return inst->__ctx; |
214 | } |
215 | |
216 | static inline struct ablkcipher_alg *crypto_ablkcipher_alg( |
217 | struct crypto_ablkcipher *tfm) |
218 | { |
219 | return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher; |
220 | } |
221 | |
222 | static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm) |
223 | { |
224 | return crypto_tfm_ctx(&tfm->base); |
225 | } |
226 | |
227 | static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm) |
228 | { |
229 | return crypto_tfm_ctx_aligned(&tfm->base); |
230 | } |
231 | |
232 | static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm) |
233 | { |
234 | return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead; |
235 | } |
236 | |
237 | static inline void *crypto_aead_ctx(struct crypto_aead *tfm) |
238 | { |
239 | return crypto_tfm_ctx(&tfm->base); |
240 | } |
241 | |
242 | static inline struct crypto_instance *crypto_aead_alg_instance( |
243 | struct crypto_aead *aead) |
244 | { |
245 | return crypto_tfm_alg_instance(&aead->base); |
246 | } |
247 | |
248 | static inline struct crypto_blkcipher *crypto_spawn_blkcipher( |
249 | struct crypto_spawn *spawn) |
250 | { |
251 | u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; |
252 | u32 mask = CRYPTO_ALG_TYPE_MASK; |
253 | |
254 | return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask)); |
255 | } |
256 | |
257 | static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm) |
258 | { |
259 | return crypto_tfm_ctx(&tfm->base); |
260 | } |
261 | |
262 | static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm) |
263 | { |
264 | return crypto_tfm_ctx_aligned(&tfm->base); |
265 | } |
266 | |
267 | static inline struct crypto_cipher *crypto_spawn_cipher( |
268 | struct crypto_spawn *spawn) |
269 | { |
270 | u32 type = CRYPTO_ALG_TYPE_CIPHER; |
271 | u32 mask = CRYPTO_ALG_TYPE_MASK; |
272 | |
273 | return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask)); |
274 | } |
275 | |
276 | static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm) |
277 | { |
278 | return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher; |
279 | } |
280 | |
281 | static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn) |
282 | { |
283 | u32 type = CRYPTO_ALG_TYPE_HASH; |
284 | u32 mask = CRYPTO_ALG_TYPE_HASH_MASK; |
285 | |
286 | return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask)); |
287 | } |
288 | |
289 | static inline void *crypto_hash_ctx(struct crypto_hash *tfm) |
290 | { |
291 | return crypto_tfm_ctx(&tfm->base); |
292 | } |
293 | |
294 | static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm) |
295 | { |
296 | return crypto_tfm_ctx_aligned(&tfm->base); |
297 | } |
298 | |
299 | static inline void blkcipher_walk_init(struct blkcipher_walk *walk, |
300 | struct scatterlist *dst, |
301 | struct scatterlist *src, |
302 | unsigned int nbytes) |
303 | { |
304 | walk->in.sg = src; |
305 | walk->out.sg = dst; |
306 | walk->total = nbytes; |
307 | } |
308 | |
309 | static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk, |
310 | struct scatterlist *dst, |
311 | struct scatterlist *src, |
312 | unsigned int nbytes) |
313 | { |
314 | walk->in.sg = src; |
315 | walk->out.sg = dst; |
316 | walk->total = nbytes; |
317 | INIT_LIST_HEAD(&walk->buffers); |
318 | } |
319 | |
320 | static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk) |
321 | { |
322 | if (unlikely(!list_empty(&walk->buffers))) |
323 | __ablkcipher_walk_complete(walk); |
324 | } |
325 | |
326 | static inline struct crypto_async_request *crypto_get_backlog( |
327 | struct crypto_queue *queue) |
328 | { |
329 | return queue->backlog == &queue->list ? NULL : |
330 | container_of(queue->backlog, struct crypto_async_request, list); |
331 | } |
332 | |
333 | static inline int ablkcipher_enqueue_request(struct crypto_queue *queue, |
334 | struct ablkcipher_request *request) |
335 | { |
336 | return crypto_enqueue_request(queue, &request->base); |
337 | } |
338 | |
339 | static inline struct ablkcipher_request *ablkcipher_dequeue_request( |
340 | struct crypto_queue *queue) |
341 | { |
342 | return ablkcipher_request_cast(crypto_dequeue_request(queue)); |
343 | } |
344 | |
345 | static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req) |
346 | { |
347 | return req->__ctx; |
348 | } |
349 | |
350 | static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue, |
351 | struct crypto_ablkcipher *tfm) |
352 | { |
353 | return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm)); |
354 | } |
355 | |
356 | static inline void *aead_request_ctx(struct aead_request *req) |
357 | { |
358 | return req->__ctx; |
359 | } |
360 | |
361 | static inline void aead_request_complete(struct aead_request *req, int err) |
362 | { |
363 | req->base.complete(&req->base, err); |
364 | } |
365 | |
366 | static inline u32 aead_request_flags(struct aead_request *req) |
367 | { |
368 | return req->base.flags; |
369 | } |
370 | |
371 | static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb, |
372 | u32 type, u32 mask) |
373 | { |
374 | return crypto_attr_alg(tb[1], type, mask); |
375 | } |
376 | |
377 | /* |
378 | * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms. |
379 | * Otherwise returns zero. |
380 | */ |
381 | static inline int crypto_requires_sync(u32 type, u32 mask) |
382 | { |
383 | return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC; |
384 | } |
385 | |
386 | #endif /* _CRYPTO_ALGAPI_H */ |
387 | |
388 |
Branches:
ben-wpan
ben-wpan-stefan
javiroman/ks7010
jz-2.6.34
jz-2.6.34-rc5
jz-2.6.34-rc6
jz-2.6.34-rc7
jz-2.6.35
jz-2.6.36
jz-2.6.37
jz-2.6.38
jz-2.6.39
jz-3.0
jz-3.1
jz-3.11
jz-3.12
jz-3.13
jz-3.15
jz-3.16
jz-3.18-dt
jz-3.2
jz-3.3
jz-3.4
jz-3.5
jz-3.6
jz-3.6-rc2-pwm
jz-3.9
jz-3.9-clk
jz-3.9-rc8
jz47xx
jz47xx-2.6.38
master
Tags:
od-2011-09-04
od-2011-09-18
v2.6.34-rc5
v2.6.34-rc6
v2.6.34-rc7
v3.9