2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
18 #include <linux/compiler.h>
19 #include <linux/init.h>
20 #include <linux/crypto.h>
21 #include <linux/errno.h>
22 #include <linux/kernel.h>
23 #include <linux/kmod.h>
24 #include <linux/rwsem.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
29 LIST_HEAD(crypto_alg_list);
30 DECLARE_RWSEM(crypto_alg_sem);
32 static inline int crypto_alg_get(struct crypto_alg *alg)
34 return try_module_get(alg->cra_module);
37 static inline void crypto_alg_put(struct crypto_alg *alg)
39 module_put(alg->cra_module);
42 static struct crypto_alg *crypto_alg_lookup(const char *name)
44 struct crypto_alg *q, *alg = NULL;
50 down_read(&crypto_alg_sem);
52 list_for_each_entry(q, &crypto_alg_list, cra_list) {
55 exact = !strcmp(q->cra_driver_name, name);
56 fuzzy = !strcmp(q->cra_name, name);
57 if (!exact && !(fuzzy && q->cra_priority > best))
60 if (unlikely(!crypto_alg_get(q)))
63 best = q->cra_priority;
72 up_read(&crypto_alg_sem);
76 /* A far more intelligent version of this is planned. For now, just
77 * try an exact match on the name of the algorithm. */
78 static inline struct crypto_alg *crypto_alg_mod_lookup(const char *name)
80 return try_then_request_module(crypto_alg_lookup(name), name);
83 static int crypto_init_flags(struct crypto_tfm *tfm, u32 flags)
85 tfm->crt_flags = flags & CRYPTO_TFM_REQ_MASK;
86 flags &= ~CRYPTO_TFM_REQ_MASK;
88 switch (crypto_tfm_alg_type(tfm)) {
89 case CRYPTO_ALG_TYPE_CIPHER:
90 return crypto_init_cipher_flags(tfm, flags);
92 case CRYPTO_ALG_TYPE_DIGEST:
93 return crypto_init_digest_flags(tfm, flags);
95 case CRYPTO_ALG_TYPE_COMPRESS:
96 return crypto_init_compress_flags(tfm, flags);
106 static int crypto_init_ops(struct crypto_tfm *tfm)
108 switch (crypto_tfm_alg_type(tfm)) {
109 case CRYPTO_ALG_TYPE_CIPHER:
110 return crypto_init_cipher_ops(tfm);
112 case CRYPTO_ALG_TYPE_DIGEST:
113 return crypto_init_digest_ops(tfm);
115 case CRYPTO_ALG_TYPE_COMPRESS:
116 return crypto_init_compress_ops(tfm);
126 static void crypto_exit_ops(struct crypto_tfm *tfm)
128 switch (crypto_tfm_alg_type(tfm)) {
129 case CRYPTO_ALG_TYPE_CIPHER:
130 crypto_exit_cipher_ops(tfm);
133 case CRYPTO_ALG_TYPE_DIGEST:
134 crypto_exit_digest_ops(tfm);
137 case CRYPTO_ALG_TYPE_COMPRESS:
138 crypto_exit_compress_ops(tfm);
147 static unsigned int crypto_ctxsize(struct crypto_alg *alg, int flags)
151 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
155 case CRYPTO_ALG_TYPE_CIPHER:
156 len = crypto_cipher_ctxsize(alg, flags);
159 case CRYPTO_ALG_TYPE_DIGEST:
160 len = crypto_digest_ctxsize(alg, flags);
163 case CRYPTO_ALG_TYPE_COMPRESS:
164 len = crypto_compress_ctxsize(alg, flags);
168 return len + (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
171 struct crypto_tfm *crypto_alloc_tfm2(const char *name, u32 flags,
174 struct crypto_tfm *tfm = NULL;
175 struct crypto_alg *alg;
176 unsigned int tfm_size;
179 alg = crypto_alg_mod_lookup(name);
181 alg = crypto_alg_lookup(name);
186 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, flags);
187 tfm = kzalloc(tfm_size, GFP_KERNEL);
191 tfm->__crt_alg = alg;
193 if (crypto_init_flags(tfm, flags))
196 if (crypto_init_ops(tfm))
199 if (alg->cra_init && alg->cra_init(tfm))
200 goto cra_init_failed;
205 crypto_exit_ops(tfm);
215 struct crypto_tfm *crypto_alloc_tfm(const char *name, u32 flags)
217 return crypto_alloc_tfm2(name, flags, 0);
220 void crypto_free_tfm(struct crypto_tfm *tfm)
222 struct crypto_alg *alg;
228 alg = tfm->__crt_alg;
229 size = sizeof(*tfm) + alg->cra_ctxsize;
233 crypto_exit_ops(tfm);
235 memset(tfm, 0, size);
239 static inline int crypto_set_driver_name(struct crypto_alg *alg)
241 static const char suffix[] = "-generic";
242 char *driver_name = alg->cra_driver_name;
248 len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
249 if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME)
250 return -ENAMETOOLONG;
252 memcpy(driver_name + len, suffix, sizeof(suffix));
256 int crypto_register_alg(struct crypto_alg *alg)
259 struct crypto_alg *q;
261 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
264 if (alg->cra_alignmask & alg->cra_blocksize)
267 if (alg->cra_blocksize > PAGE_SIZE / 8)
270 if (alg->cra_priority < 0)
273 ret = crypto_set_driver_name(alg);
277 down_write(&crypto_alg_sem);
279 list_for_each_entry(q, &crypto_alg_list, cra_list) {
286 list_add(&alg->cra_list, &crypto_alg_list);
288 up_write(&crypto_alg_sem);
292 int crypto_unregister_alg(struct crypto_alg *alg)
295 struct crypto_alg *q;
297 BUG_ON(!alg->cra_module);
299 down_write(&crypto_alg_sem);
300 list_for_each_entry(q, &crypto_alg_list, cra_list) {
302 list_del(&alg->cra_list);
308 up_write(&crypto_alg_sem);
312 int crypto_alg_available(const char *name, u32 flags)
315 struct crypto_alg *alg = crypto_alg_mod_lookup(name);
325 static int __init init_crypto(void)
327 printk(KERN_INFO "Initializing Cryptographic API\n");
332 __initcall(init_crypto);
334 EXPORT_SYMBOL_GPL(crypto_register_alg);
335 EXPORT_SYMBOL_GPL(crypto_unregister_alg);
336 EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
337 EXPORT_SYMBOL_GPL(crypto_free_tfm);
338 EXPORT_SYMBOL_GPL(crypto_alg_available);