Root/
1 | /* |
2 | * Cryptographic API for algorithms (i.e., low-level API). |
3 | * |
4 | * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au> |
5 | * |
6 | * This program is free software; you can redistribute it and/or modify it |
7 | * under the terms of the GNU General Public License as published by the Free |
8 | * Software Foundation; either version 2 of the License, or (at your option) |
9 | * any later version. |
10 | * |
11 | */ |
12 | #ifndef _CRYPTO_ALGAPI_H |
13 | #define _CRYPTO_ALGAPI_H |
14 | |
15 | #include <linux/crypto.h> |
16 | #include <linux/list.h> |
17 | #include <linux/kernel.h> |
18 | #include <linux/skbuff.h> |
19 | |
20 | struct module; |
21 | struct rtattr; |
22 | struct seq_file; |
23 | |
24 | struct crypto_type { |
25 | unsigned int (*ctxsize)(struct crypto_alg *alg, u32 type, u32 mask); |
26 | unsigned int (*extsize)(struct crypto_alg *alg); |
27 | int (*init)(struct crypto_tfm *tfm, u32 type, u32 mask); |
28 | int (*init_tfm)(struct crypto_tfm *tfm); |
29 | void (*show)(struct seq_file *m, struct crypto_alg *alg); |
30 | int (*report)(struct sk_buff *skb, struct crypto_alg *alg); |
31 | struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask); |
32 | |
33 | unsigned int type; |
34 | unsigned int maskclear; |
35 | unsigned int maskset; |
36 | unsigned int tfmsize; |
37 | }; |
38 | |
39 | struct crypto_instance { |
40 | struct crypto_alg alg; |
41 | |
42 | struct crypto_template *tmpl; |
43 | struct hlist_node list; |
44 | |
45 | void *__ctx[] CRYPTO_MINALIGN_ATTR; |
46 | }; |
47 | |
48 | struct crypto_template { |
49 | struct list_head list; |
50 | struct hlist_head instances; |
51 | struct module *module; |
52 | |
53 | struct crypto_instance *(*alloc)(struct rtattr **tb); |
54 | void (*free)(struct crypto_instance *inst); |
55 | int (*create)(struct crypto_template *tmpl, struct rtattr **tb); |
56 | |
57 | char name[CRYPTO_MAX_ALG_NAME]; |
58 | }; |
59 | |
60 | struct crypto_spawn { |
61 | struct list_head list; |
62 | struct crypto_alg *alg; |
63 | struct crypto_instance *inst; |
64 | const struct crypto_type *frontend; |
65 | u32 mask; |
66 | }; |
67 | |
68 | struct crypto_queue { |
69 | struct list_head list; |
70 | struct list_head *backlog; |
71 | |
72 | unsigned int qlen; |
73 | unsigned int max_qlen; |
74 | }; |
75 | |
76 | struct scatter_walk { |
77 | struct scatterlist *sg; |
78 | unsigned int offset; |
79 | }; |
80 | |
81 | struct blkcipher_walk { |
82 | union { |
83 | struct { |
84 | struct page *page; |
85 | unsigned long offset; |
86 | } phys; |
87 | |
88 | struct { |
89 | u8 *page; |
90 | u8 *addr; |
91 | } virt; |
92 | } src, dst; |
93 | |
94 | struct scatter_walk in; |
95 | unsigned int nbytes; |
96 | |
97 | struct scatter_walk out; |
98 | unsigned int total; |
99 | |
100 | void *page; |
101 | u8 *buffer; |
102 | u8 *iv; |
103 | |
104 | int flags; |
105 | unsigned int blocksize; |
106 | }; |
107 | |
108 | struct ablkcipher_walk { |
109 | struct { |
110 | struct page *page; |
111 | unsigned int offset; |
112 | } src, dst; |
113 | |
114 | struct scatter_walk in; |
115 | unsigned int nbytes; |
116 | struct scatter_walk out; |
117 | unsigned int total; |
118 | struct list_head buffers; |
119 | u8 *iv_buffer; |
120 | u8 *iv; |
121 | int flags; |
122 | unsigned int blocksize; |
123 | }; |
124 | |
125 | extern const struct crypto_type crypto_ablkcipher_type; |
126 | extern const struct crypto_type crypto_aead_type; |
127 | extern const struct crypto_type crypto_blkcipher_type; |
128 | |
129 | void crypto_mod_put(struct crypto_alg *alg); |
130 | |
131 | int crypto_register_template(struct crypto_template *tmpl); |
132 | void crypto_unregister_template(struct crypto_template *tmpl); |
133 | struct crypto_template *crypto_lookup_template(const char *name); |
134 | |
135 | int crypto_register_instance(struct crypto_template *tmpl, |
136 | struct crypto_instance *inst); |
137 | int crypto_unregister_instance(struct crypto_alg *alg); |
138 | |
139 | int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg, |
140 | struct crypto_instance *inst, u32 mask); |
141 | int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg, |
142 | struct crypto_instance *inst, |
143 | const struct crypto_type *frontend); |
144 | |
145 | void crypto_drop_spawn(struct crypto_spawn *spawn); |
146 | struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type, |
147 | u32 mask); |
148 | void *crypto_spawn_tfm2(struct crypto_spawn *spawn); |
149 | |
150 | static inline void crypto_set_spawn(struct crypto_spawn *spawn, |
151 | struct crypto_instance *inst) |
152 | { |
153 | spawn->inst = inst; |
154 | } |
155 | |
156 | struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); |
157 | int crypto_check_attr_type(struct rtattr **tb, u32 type); |
158 | const char *crypto_attr_alg_name(struct rtattr *rta); |
159 | struct crypto_alg *crypto_attr_alg2(struct rtattr *rta, |
160 | const struct crypto_type *frontend, |
161 | u32 type, u32 mask); |
162 | |
163 | static inline struct crypto_alg *crypto_attr_alg(struct rtattr *rta, |
164 | u32 type, u32 mask) |
165 | { |
166 | return crypto_attr_alg2(rta, NULL, type, mask); |
167 | } |
168 | |
169 | int crypto_attr_u32(struct rtattr *rta, u32 *num); |
170 | void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg, |
171 | unsigned int head); |
172 | struct crypto_instance *crypto_alloc_instance(const char *name, |
173 | struct crypto_alg *alg); |
174 | |
175 | void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen); |
176 | int crypto_enqueue_request(struct crypto_queue *queue, |
177 | struct crypto_async_request *request); |
178 | void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset); |
179 | struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue); |
180 | int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm); |
181 | |
182 | /* These functions require the input/output to be aligned as u32. */ |
183 | void crypto_inc(u8 *a, unsigned int size); |
184 | void crypto_xor(u8 *dst, const u8 *src, unsigned int size); |
185 | |
186 | int blkcipher_walk_done(struct blkcipher_desc *desc, |
187 | struct blkcipher_walk *walk, int err); |
188 | int blkcipher_walk_virt(struct blkcipher_desc *desc, |
189 | struct blkcipher_walk *walk); |
190 | int blkcipher_walk_phys(struct blkcipher_desc *desc, |
191 | struct blkcipher_walk *walk); |
192 | int blkcipher_walk_virt_block(struct blkcipher_desc *desc, |
193 | struct blkcipher_walk *walk, |
194 | unsigned int blocksize); |
195 | |
196 | int ablkcipher_walk_done(struct ablkcipher_request *req, |
197 | struct ablkcipher_walk *walk, int err); |
198 | int ablkcipher_walk_phys(struct ablkcipher_request *req, |
199 | struct ablkcipher_walk *walk); |
200 | void __ablkcipher_walk_complete(struct ablkcipher_walk *walk); |
201 | |
202 | static inline void *crypto_tfm_ctx_aligned(struct crypto_tfm *tfm) |
203 | { |
204 | return PTR_ALIGN(crypto_tfm_ctx(tfm), |
205 | crypto_tfm_alg_alignmask(tfm) + 1); |
206 | } |
207 | |
208 | static inline struct crypto_instance *crypto_tfm_alg_instance( |
209 | struct crypto_tfm *tfm) |
210 | { |
211 | return container_of(tfm->__crt_alg, struct crypto_instance, alg); |
212 | } |
213 | |
214 | static inline void *crypto_instance_ctx(struct crypto_instance *inst) |
215 | { |
216 | return inst->__ctx; |
217 | } |
218 | |
219 | static inline struct ablkcipher_alg *crypto_ablkcipher_alg( |
220 | struct crypto_ablkcipher *tfm) |
221 | { |
222 | return &crypto_ablkcipher_tfm(tfm)->__crt_alg->cra_ablkcipher; |
223 | } |
224 | |
225 | static inline void *crypto_ablkcipher_ctx(struct crypto_ablkcipher *tfm) |
226 | { |
227 | return crypto_tfm_ctx(&tfm->base); |
228 | } |
229 | |
230 | static inline void *crypto_ablkcipher_ctx_aligned(struct crypto_ablkcipher *tfm) |
231 | { |
232 | return crypto_tfm_ctx_aligned(&tfm->base); |
233 | } |
234 | |
235 | static inline struct aead_alg *crypto_aead_alg(struct crypto_aead *tfm) |
236 | { |
237 | return &crypto_aead_tfm(tfm)->__crt_alg->cra_aead; |
238 | } |
239 | |
240 | static inline void *crypto_aead_ctx(struct crypto_aead *tfm) |
241 | { |
242 | return crypto_tfm_ctx(&tfm->base); |
243 | } |
244 | |
245 | static inline struct crypto_instance *crypto_aead_alg_instance( |
246 | struct crypto_aead *aead) |
247 | { |
248 | return crypto_tfm_alg_instance(&aead->base); |
249 | } |
250 | |
251 | static inline struct crypto_blkcipher *crypto_spawn_blkcipher( |
252 | struct crypto_spawn *spawn) |
253 | { |
254 | u32 type = CRYPTO_ALG_TYPE_BLKCIPHER; |
255 | u32 mask = CRYPTO_ALG_TYPE_MASK; |
256 | |
257 | return __crypto_blkcipher_cast(crypto_spawn_tfm(spawn, type, mask)); |
258 | } |
259 | |
260 | static inline void *crypto_blkcipher_ctx(struct crypto_blkcipher *tfm) |
261 | { |
262 | return crypto_tfm_ctx(&tfm->base); |
263 | } |
264 | |
265 | static inline void *crypto_blkcipher_ctx_aligned(struct crypto_blkcipher *tfm) |
266 | { |
267 | return crypto_tfm_ctx_aligned(&tfm->base); |
268 | } |
269 | |
270 | static inline struct crypto_cipher *crypto_spawn_cipher( |
271 | struct crypto_spawn *spawn) |
272 | { |
273 | u32 type = CRYPTO_ALG_TYPE_CIPHER; |
274 | u32 mask = CRYPTO_ALG_TYPE_MASK; |
275 | |
276 | return __crypto_cipher_cast(crypto_spawn_tfm(spawn, type, mask)); |
277 | } |
278 | |
279 | static inline struct cipher_alg *crypto_cipher_alg(struct crypto_cipher *tfm) |
280 | { |
281 | return &crypto_cipher_tfm(tfm)->__crt_alg->cra_cipher; |
282 | } |
283 | |
284 | static inline struct crypto_hash *crypto_spawn_hash(struct crypto_spawn *spawn) |
285 | { |
286 | u32 type = CRYPTO_ALG_TYPE_HASH; |
287 | u32 mask = CRYPTO_ALG_TYPE_HASH_MASK; |
288 | |
289 | return __crypto_hash_cast(crypto_spawn_tfm(spawn, type, mask)); |
290 | } |
291 | |
292 | static inline void *crypto_hash_ctx(struct crypto_hash *tfm) |
293 | { |
294 | return crypto_tfm_ctx(&tfm->base); |
295 | } |
296 | |
297 | static inline void *crypto_hash_ctx_aligned(struct crypto_hash *tfm) |
298 | { |
299 | return crypto_tfm_ctx_aligned(&tfm->base); |
300 | } |
301 | |
302 | static inline void blkcipher_walk_init(struct blkcipher_walk *walk, |
303 | struct scatterlist *dst, |
304 | struct scatterlist *src, |
305 | unsigned int nbytes) |
306 | { |
307 | walk->in.sg = src; |
308 | walk->out.sg = dst; |
309 | walk->total = nbytes; |
310 | } |
311 | |
312 | static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk, |
313 | struct scatterlist *dst, |
314 | struct scatterlist *src, |
315 | unsigned int nbytes) |
316 | { |
317 | walk->in.sg = src; |
318 | walk->out.sg = dst; |
319 | walk->total = nbytes; |
320 | INIT_LIST_HEAD(&walk->buffers); |
321 | } |
322 | |
323 | static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk) |
324 | { |
325 | if (unlikely(!list_empty(&walk->buffers))) |
326 | __ablkcipher_walk_complete(walk); |
327 | } |
328 | |
329 | static inline struct crypto_async_request *crypto_get_backlog( |
330 | struct crypto_queue *queue) |
331 | { |
332 | return queue->backlog == &queue->list ? NULL : |
333 | container_of(queue->backlog, struct crypto_async_request, list); |
334 | } |
335 | |
336 | static inline int ablkcipher_enqueue_request(struct crypto_queue *queue, |
337 | struct ablkcipher_request *request) |
338 | { |
339 | return crypto_enqueue_request(queue, &request->base); |
340 | } |
341 | |
342 | static inline struct ablkcipher_request *ablkcipher_dequeue_request( |
343 | struct crypto_queue *queue) |
344 | { |
345 | return ablkcipher_request_cast(crypto_dequeue_request(queue)); |
346 | } |
347 | |
348 | static inline void *ablkcipher_request_ctx(struct ablkcipher_request *req) |
349 | { |
350 | return req->__ctx; |
351 | } |
352 | |
353 | static inline int ablkcipher_tfm_in_queue(struct crypto_queue *queue, |
354 | struct crypto_ablkcipher *tfm) |
355 | { |
356 | return crypto_tfm_in_queue(queue, crypto_ablkcipher_tfm(tfm)); |
357 | } |
358 | |
359 | static inline void *aead_request_ctx(struct aead_request *req) |
360 | { |
361 | return req->__ctx; |
362 | } |
363 | |
364 | static inline void aead_request_complete(struct aead_request *req, int err) |
365 | { |
366 | req->base.complete(&req->base, err); |
367 | } |
368 | |
369 | static inline u32 aead_request_flags(struct aead_request *req) |
370 | { |
371 | return req->base.flags; |
372 | } |
373 | |
374 | static inline struct crypto_alg *crypto_get_attr_alg(struct rtattr **tb, |
375 | u32 type, u32 mask) |
376 | { |
377 | return crypto_attr_alg(tb[1], type, mask); |
378 | } |
379 | |
380 | /* |
381 | * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms. |
382 | * Otherwise returns zero. |
383 | */ |
384 | static inline int crypto_requires_sync(u32 type, u32 mask) |
385 | { |
386 | return (type ^ CRYPTO_ALG_ASYNC) & mask & CRYPTO_ALG_ASYNC; |
387 | } |
388 | |
389 | #endif /* _CRYPTO_ALGAPI_H */ |
390 | |
391 |
Branches:
ben-wpan
ben-wpan-stefan
javiroman/ks7010
jz-2.6.34
jz-2.6.34-rc5
jz-2.6.34-rc6
jz-2.6.34-rc7
jz-2.6.35
jz-2.6.36
jz-2.6.37
jz-2.6.38
jz-2.6.39
jz-3.0
jz-3.1
jz-3.11
jz-3.12
jz-3.13
jz-3.15
jz-3.16
jz-3.18-dt
jz-3.2
jz-3.3
jz-3.4
jz-3.5
jz-3.6
jz-3.6-rc2-pwm
jz-3.9
jz-3.9-clk
jz-3.9-rc8
jz47xx
jz47xx-2.6.38
master
Tags:
od-2011-09-04
od-2011-09-18
v2.6.34-rc5
v2.6.34-rc6
v2.6.34-rc7
v3.9