2 * Scatterlist Cryptographic API.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
17 #ifndef _LINUX_CRYPTO_H
18 #define _LINUX_CRYPTO_H
20 #include <asm/atomic.h>
21 #include <linux/module.h>
22 #include <linux/kernel.h>
23 #include <linux/types.h>
24 #include <linux/list.h>
25 #include <linux/slab.h>
26 #include <linux/string.h>
27 #include <linux/uaccess.h>
30 * Algorithm masks and types.
32 #define CRYPTO_ALG_TYPE_MASK 0x0000000f
33 #define CRYPTO_ALG_TYPE_CIPHER 0x00000001
34 #define CRYPTO_ALG_TYPE_DIGEST 0x00000002
35 #define CRYPTO_ALG_TYPE_COMPRESS 0x00000004
37 #define CRYPTO_ALG_LARVAL 0x00000010
40 * Transform masks and values (for crt_flags).
42 #define CRYPTO_TFM_MODE_MASK 0x000000ff
43 #define CRYPTO_TFM_REQ_MASK 0x000fff00
44 #define CRYPTO_TFM_RES_MASK 0xfff00000
46 #define CRYPTO_TFM_MODE_ECB 0x00000001
47 #define CRYPTO_TFM_MODE_CBC 0x00000002
48 #define CRYPTO_TFM_MODE_CFB 0x00000004
49 #define CRYPTO_TFM_MODE_CTR 0x00000008
51 #define CRYPTO_TFM_REQ_WEAK_KEY 0x00000100
52 #define CRYPTO_TFM_REQ_MAY_SLEEP 0x00000200
53 #define CRYPTO_TFM_RES_WEAK_KEY 0x00100000
54 #define CRYPTO_TFM_RES_BAD_KEY_LEN 0x00200000
55 #define CRYPTO_TFM_RES_BAD_KEY_SCHED 0x00400000
56 #define CRYPTO_TFM_RES_BAD_BLOCK_LEN 0x00800000
57 #define CRYPTO_TFM_RES_BAD_FLAGS 0x01000000
60 * Miscellaneous stuff.
62 #define CRYPTO_UNSPEC 0
63 #define CRYPTO_MAX_ALG_NAME 64
65 #define CRYPTO_DIR_ENCRYPT 1
66 #define CRYPTO_DIR_DECRYPT 0
69 * The macro CRYPTO_MINALIGN_ATTR (along with the void * type in the actual
70 * declaration) is used to ensure that the crypto_tfm context structure is
71 * aligned correctly for the given architecture so that there are no alignment
72 * faults for C data types. In particular, this is required on platforms such
73 * as arm where pointers are 32-bit aligned but there are data types such as
74 * u64 which require 64-bit alignment.
76 #if defined(ARCH_KMALLOC_MINALIGN)
77 #define CRYPTO_MINALIGN ARCH_KMALLOC_MINALIGN
78 #elif defined(ARCH_SLAB_MINALIGN)
79 #define CRYPTO_MINALIGN ARCH_SLAB_MINALIGN
82 #ifdef CRYPTO_MINALIGN
83 #define CRYPTO_MINALIGN_ATTR __attribute__ ((__aligned__(CRYPTO_MINALIGN)))
85 #define CRYPTO_MINALIGN_ATTR
92 struct crypto_tfm *tfm;
93 void (*crfn)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
94 unsigned int (*prfn)(const struct cipher_desc *desc, u8 *dst,
95 const u8 *src, unsigned int nbytes);
100 * Algorithms: modular crypto algorithm implementations, managed
101 * via crypto_register_alg() and crypto_unregister_alg().
104 unsigned int cia_min_keysize;
105 unsigned int cia_max_keysize;
106 int (*cia_setkey)(struct crypto_tfm *tfm, const u8 *key,
107 unsigned int keylen, u32 *flags);
108 void (*cia_encrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
109 void (*cia_decrypt)(struct crypto_tfm *tfm, u8 *dst, const u8 *src);
111 unsigned int (*cia_encrypt_ecb)(const struct cipher_desc *desc,
112 u8 *dst, const u8 *src,
113 unsigned int nbytes);
114 unsigned int (*cia_decrypt_ecb)(const struct cipher_desc *desc,
115 u8 *dst, const u8 *src,
116 unsigned int nbytes);
117 unsigned int (*cia_encrypt_cbc)(const struct cipher_desc *desc,
118 u8 *dst, const u8 *src,
119 unsigned int nbytes);
120 unsigned int (*cia_decrypt_cbc)(const struct cipher_desc *desc,
121 u8 *dst, const u8 *src,
122 unsigned int nbytes);
126 unsigned int dia_digestsize;
127 void (*dia_init)(struct crypto_tfm *tfm);
128 void (*dia_update)(struct crypto_tfm *tfm, const u8 *data,
130 void (*dia_final)(struct crypto_tfm *tfm, u8 *out);
131 int (*dia_setkey)(struct crypto_tfm *tfm, const u8 *key,
132 unsigned int keylen, u32 *flags);
135 struct compress_alg {
136 int (*coa_compress)(struct crypto_tfm *tfm, const u8 *src,
137 unsigned int slen, u8 *dst, unsigned int *dlen);
138 int (*coa_decompress)(struct crypto_tfm *tfm, const u8 *src,
139 unsigned int slen, u8 *dst, unsigned int *dlen);
142 #define cra_cipher cra_u.cipher
143 #define cra_digest cra_u.digest
144 #define cra_compress cra_u.compress
147 struct list_head cra_list;
149 unsigned int cra_blocksize;
150 unsigned int cra_ctxsize;
151 unsigned int cra_alignmask;
156 char cra_name[CRYPTO_MAX_ALG_NAME];
157 char cra_driver_name[CRYPTO_MAX_ALG_NAME];
160 struct cipher_alg cipher;
161 struct digest_alg digest;
162 struct compress_alg compress;
165 int (*cra_init)(struct crypto_tfm *tfm);
166 void (*cra_exit)(struct crypto_tfm *tfm);
167 void (*cra_destroy)(struct crypto_alg *alg);
169 struct module *cra_module;
173 * Algorithm registration interface.
175 int crypto_register_alg(struct crypto_alg *alg);
176 int crypto_unregister_alg(struct crypto_alg *alg);
179 * Algorithm query interface.
182 int crypto_alg_available(const char *name, u32 flags);
184 static inline int crypto_alg_available(const char *name, u32 flags)
191 * Transforms: user-instantiated objects which encapsulate algorithms
192 * and core processing logic. Managed via crypto_alloc_tfm() and
193 * crypto_free_tfm(), as well as the various helpers below.
198 unsigned int cit_ivsize;
200 int (*cit_setkey)(struct crypto_tfm *tfm,
201 const u8 *key, unsigned int keylen);
202 int (*cit_encrypt)(struct crypto_tfm *tfm,
203 struct scatterlist *dst,
204 struct scatterlist *src,
205 unsigned int nbytes);
206 int (*cit_encrypt_iv)(struct crypto_tfm *tfm,
207 struct scatterlist *dst,
208 struct scatterlist *src,
209 unsigned int nbytes, u8 *iv);
210 int (*cit_decrypt)(struct crypto_tfm *tfm,
211 struct scatterlist *dst,
212 struct scatterlist *src,
213 unsigned int nbytes);
214 int (*cit_decrypt_iv)(struct crypto_tfm *tfm,
215 struct scatterlist *dst,
216 struct scatterlist *src,
217 unsigned int nbytes, u8 *iv);
218 void (*cit_xor_block)(u8 *dst, const u8 *src);
222 void (*dit_init)(struct crypto_tfm *tfm);
223 void (*dit_update)(struct crypto_tfm *tfm,
224 struct scatterlist *sg, unsigned int nsg);
225 void (*dit_final)(struct crypto_tfm *tfm, u8 *out);
226 void (*dit_digest)(struct crypto_tfm *tfm, struct scatterlist *sg,
227 unsigned int nsg, u8 *out);
228 int (*dit_setkey)(struct crypto_tfm *tfm,
229 const u8 *key, unsigned int keylen);
230 #ifdef CONFIG_CRYPTO_HMAC
231 void *dit_hmac_block;
235 struct compress_tfm {
236 int (*cot_compress)(struct crypto_tfm *tfm,
237 const u8 *src, unsigned int slen,
238 u8 *dst, unsigned int *dlen);
239 int (*cot_decompress)(struct crypto_tfm *tfm,
240 const u8 *src, unsigned int slen,
241 u8 *dst, unsigned int *dlen);
244 #define crt_cipher crt_u.cipher
245 #define crt_digest crt_u.digest
246 #define crt_compress crt_u.compress
253 struct cipher_tfm cipher;
254 struct digest_tfm digest;
255 struct compress_tfm compress;
258 struct crypto_alg *__crt_alg;
260 void *__crt_ctx[] CRYPTO_MINALIGN_ATTR;
268 struct crypto_attr_alg {
269 char name[CRYPTO_MAX_ALG_NAME];
273 * Transform user interface.
277 * crypto_alloc_tfm() will first attempt to locate an already loaded algorithm.
278 * If that fails and the kernel supports dynamically loadable modules, it
279 * will then attempt to load a module of the same name or alias. A refcount
280 * is grabbed on the algorithm which is then associated with the new transform.
282 * crypto_free_tfm() frees up the transform and any associated resources,
283 * then drops the refcount on the associated algorithm.
285 struct crypto_tfm *crypto_alloc_tfm(const char *alg_name, u32 tfm_flags);
286 void crypto_free_tfm(struct crypto_tfm *tfm);
289 * Transform helpers which query the underlying algorithm.
291 static inline const char *crypto_tfm_alg_name(struct crypto_tfm *tfm)
293 return tfm->__crt_alg->cra_name;
296 static inline const char *crypto_tfm_alg_modname(struct crypto_tfm *tfm)
298 return module_name(tfm->__crt_alg->cra_module);
301 static inline u32 crypto_tfm_alg_type(struct crypto_tfm *tfm)
303 return tfm->__crt_alg->cra_flags & CRYPTO_ALG_TYPE_MASK;
306 static inline unsigned int crypto_tfm_alg_min_keysize(struct crypto_tfm *tfm)
308 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
309 return tfm->__crt_alg->cra_cipher.cia_min_keysize;
312 static inline unsigned int crypto_tfm_alg_max_keysize(struct crypto_tfm *tfm)
314 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
315 return tfm->__crt_alg->cra_cipher.cia_max_keysize;
318 static inline unsigned int crypto_tfm_alg_ivsize(struct crypto_tfm *tfm)
320 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
321 return tfm->crt_cipher.cit_ivsize;
324 static inline unsigned int crypto_tfm_alg_blocksize(struct crypto_tfm *tfm)
326 return tfm->__crt_alg->cra_blocksize;
329 static inline unsigned int crypto_tfm_alg_digestsize(struct crypto_tfm *tfm)
331 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
332 return tfm->__crt_alg->cra_digest.dia_digestsize;
335 static inline unsigned int crypto_tfm_alg_alignmask(struct crypto_tfm *tfm)
337 return tfm->__crt_alg->cra_alignmask;
340 static inline void *crypto_tfm_ctx(struct crypto_tfm *tfm)
342 return tfm->__crt_ctx;
345 static inline unsigned int crypto_tfm_ctx_alignment(void)
347 struct crypto_tfm *tfm;
348 return __alignof__(tfm->__crt_ctx);
354 static inline void crypto_digest_init(struct crypto_tfm *tfm)
356 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
357 tfm->crt_digest.dit_init(tfm);
360 static inline void crypto_digest_update(struct crypto_tfm *tfm,
361 struct scatterlist *sg,
364 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
365 tfm->crt_digest.dit_update(tfm, sg, nsg);
368 static inline void crypto_digest_final(struct crypto_tfm *tfm, u8 *out)
370 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
371 tfm->crt_digest.dit_final(tfm, out);
374 static inline void crypto_digest_digest(struct crypto_tfm *tfm,
375 struct scatterlist *sg,
376 unsigned int nsg, u8 *out)
378 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
379 tfm->crt_digest.dit_digest(tfm, sg, nsg, out);
382 static inline int crypto_digest_setkey(struct crypto_tfm *tfm,
383 const u8 *key, unsigned int keylen)
385 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_DIGEST);
386 if (tfm->crt_digest.dit_setkey == NULL)
388 return tfm->crt_digest.dit_setkey(tfm, key, keylen);
391 static inline int crypto_cipher_setkey(struct crypto_tfm *tfm,
392 const u8 *key, unsigned int keylen)
394 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
395 return tfm->crt_cipher.cit_setkey(tfm, key, keylen);
398 static inline int crypto_cipher_encrypt(struct crypto_tfm *tfm,
399 struct scatterlist *dst,
400 struct scatterlist *src,
403 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
404 return tfm->crt_cipher.cit_encrypt(tfm, dst, src, nbytes);
407 static inline int crypto_cipher_encrypt_iv(struct crypto_tfm *tfm,
408 struct scatterlist *dst,
409 struct scatterlist *src,
410 unsigned int nbytes, u8 *iv)
412 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
413 BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
414 return tfm->crt_cipher.cit_encrypt_iv(tfm, dst, src, nbytes, iv);
417 static inline int crypto_cipher_decrypt(struct crypto_tfm *tfm,
418 struct scatterlist *dst,
419 struct scatterlist *src,
422 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
423 return tfm->crt_cipher.cit_decrypt(tfm, dst, src, nbytes);
426 static inline int crypto_cipher_decrypt_iv(struct crypto_tfm *tfm,
427 struct scatterlist *dst,
428 struct scatterlist *src,
429 unsigned int nbytes, u8 *iv)
431 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
432 BUG_ON(tfm->crt_cipher.cit_mode == CRYPTO_TFM_MODE_ECB);
433 return tfm->crt_cipher.cit_decrypt_iv(tfm, dst, src, nbytes, iv);
436 static inline void crypto_cipher_set_iv(struct crypto_tfm *tfm,
437 const u8 *src, unsigned int len)
439 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
440 memcpy(tfm->crt_cipher.cit_iv, src, len);
443 static inline void crypto_cipher_get_iv(struct crypto_tfm *tfm,
444 u8 *dst, unsigned int len)
446 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_CIPHER);
447 memcpy(dst, tfm->crt_cipher.cit_iv, len);
450 static inline int crypto_comp_compress(struct crypto_tfm *tfm,
451 const u8 *src, unsigned int slen,
452 u8 *dst, unsigned int *dlen)
454 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
455 return tfm->crt_compress.cot_compress(tfm, src, slen, dst, dlen);
458 static inline int crypto_comp_decompress(struct crypto_tfm *tfm,
459 const u8 *src, unsigned int slen,
460 u8 *dst, unsigned int *dlen)
462 BUG_ON(crypto_tfm_alg_type(tfm) != CRYPTO_ALG_TYPE_COMPRESS);
463 return tfm->crt_compress.cot_decompress(tfm, src, slen, dst, dlen);
469 #ifdef CONFIG_CRYPTO_HMAC
470 void crypto_hmac_init(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen);
471 void crypto_hmac_update(struct crypto_tfm *tfm,
472 struct scatterlist *sg, unsigned int nsg);
473 void crypto_hmac_final(struct crypto_tfm *tfm, u8 *key,
474 unsigned int *keylen, u8 *out);
475 void crypto_hmac(struct crypto_tfm *tfm, u8 *key, unsigned int *keylen,
476 struct scatterlist *sg, unsigned int nsg, u8 *out);
477 #endif /* CONFIG_CRYPTO_HMAC */
479 #endif /* _LINUX_CRYPTO_H */