Loading...
1/*
2 * Cryptographic API.
3 *
4 * Cipher operations.
5 *
6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * This program is free software; you can redistribute it and/or modify it
10 * under the terms of the GNU General Public License as published by the Free
11 * Software Foundation; either version 2 of the License, or (at your option)
12 * any later version.
13 *
14 */
15
16#include <linux/kernel.h>
17#include <linux/crypto.h>
18#include <linux/errno.h>
19#include <linux/slab.h>
20#include <linux/string.h>
21#include "internal.h"
22
23static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key,
24 unsigned int keylen)
25{
26 struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
27 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
28 int ret;
29 u8 *buffer, *alignbuffer;
30 unsigned long absize;
31
32 absize = keylen + alignmask;
33 buffer = kmalloc(absize, GFP_ATOMIC);
34 if (!buffer)
35 return -ENOMEM;
36
37 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
38 memcpy(alignbuffer, key, keylen);
39 ret = cia->cia_setkey(tfm, alignbuffer, keylen);
40 memset(alignbuffer, 0, keylen);
41 kfree(buffer);
42 return ret;
43
44}
45
46static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
47{
48 struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
49 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
50
51 tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
52 if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
53 tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
54 return -EINVAL;
55 }
56
57 if ((unsigned long)key & alignmask)
58 return setkey_unaligned(tfm, key, keylen);
59
60 return cia->cia_setkey(tfm, key, keylen);
61}
62
63static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *,
64 const u8 *),
65 struct crypto_tfm *tfm,
66 u8 *dst, const u8 *src)
67{
68 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
69 unsigned int size = crypto_tfm_alg_blocksize(tfm);
70 u8 buffer[size + alignmask];
71 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
72
73 memcpy(tmp, src, size);
74 fn(tfm, tmp, tmp);
75 memcpy(dst, tmp, size);
76}
77
78static void cipher_encrypt_unaligned(struct crypto_tfm *tfm,
79 u8 *dst, const u8 *src)
80{
81 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
82 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
83
84 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
85 cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src);
86 return;
87 }
88
89 cipher->cia_encrypt(tfm, dst, src);
90}
91
92static void cipher_decrypt_unaligned(struct crypto_tfm *tfm,
93 u8 *dst, const u8 *src)
94{
95 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
96 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
97
98 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
99 cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src);
100 return;
101 }
102
103 cipher->cia_decrypt(tfm, dst, src);
104}
105
106int crypto_init_cipher_ops(struct crypto_tfm *tfm)
107{
108 struct cipher_tfm *ops = &tfm->crt_cipher;
109 struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
110
111 ops->cit_setkey = setkey;
112 ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ?
113 cipher_encrypt_unaligned : cipher->cia_encrypt;
114 ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ?
115 cipher_decrypt_unaligned : cipher->cia_decrypt;
116
117 return 0;
118}
119
120void crypto_exit_cipher_ops(struct crypto_tfm *tfm)
121{
122}
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Cryptographic API.
4 *
5 * Single-block cipher operations.
6 *
7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
8 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
9 */
10
11#include <crypto/algapi.h>
12#include <crypto/internal/cipher.h>
13#include <linux/kernel.h>
14#include <linux/crypto.h>
15#include <linux/errno.h>
16#include <linux/slab.h>
17#include <linux/string.h>
18#include "internal.h"
19
20static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key,
21 unsigned int keylen)
22{
23 struct cipher_alg *cia = crypto_cipher_alg(tfm);
24 unsigned long alignmask = crypto_cipher_alignmask(tfm);
25 int ret;
26 u8 *buffer, *alignbuffer;
27 unsigned long absize;
28
29 absize = keylen + alignmask;
30 buffer = kmalloc(absize, GFP_ATOMIC);
31 if (!buffer)
32 return -ENOMEM;
33
34 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
35 memcpy(alignbuffer, key, keylen);
36 ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen);
37 kfree_sensitive(buffer);
38 return ret;
39
40}
41
42int crypto_cipher_setkey(struct crypto_cipher *tfm,
43 const u8 *key, unsigned int keylen)
44{
45 struct cipher_alg *cia = crypto_cipher_alg(tfm);
46 unsigned long alignmask = crypto_cipher_alignmask(tfm);
47
48 if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize)
49 return -EINVAL;
50
51 if ((unsigned long)key & alignmask)
52 return setkey_unaligned(tfm, key, keylen);
53
54 return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen);
55}
56EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, "CRYPTO_INTERNAL");
57
58static inline void cipher_crypt_one(struct crypto_cipher *tfm,
59 u8 *dst, const u8 *src, bool enc)
60{
61 unsigned long alignmask = crypto_cipher_alignmask(tfm);
62 struct cipher_alg *cia = crypto_cipher_alg(tfm);
63 void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
64 enc ? cia->cia_encrypt : cia->cia_decrypt;
65
66 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
67 unsigned int bs = crypto_cipher_blocksize(tfm);
68 u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
69 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
70
71 memcpy(tmp, src, bs);
72 fn(crypto_cipher_tfm(tfm), tmp, tmp);
73 memcpy(dst, tmp, bs);
74 } else {
75 fn(crypto_cipher_tfm(tfm), dst, src);
76 }
77}
78
79void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
80 u8 *dst, const u8 *src)
81{
82 cipher_crypt_one(tfm, dst, src, true);
83}
84EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, "CRYPTO_INTERNAL");
85
86void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
87 u8 *dst, const u8 *src)
88{
89 cipher_crypt_one(tfm, dst, src, false);
90}
91EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, "CRYPTO_INTERNAL");
92
93struct crypto_cipher *crypto_clone_cipher(struct crypto_cipher *cipher)
94{
95 struct crypto_tfm *tfm = crypto_cipher_tfm(cipher);
96 struct crypto_alg *alg = tfm->__crt_alg;
97 struct crypto_cipher *ncipher;
98 struct crypto_tfm *ntfm;
99
100 if (alg->cra_init)
101 return ERR_PTR(-ENOSYS);
102
103 if (unlikely(!crypto_mod_get(alg)))
104 return ERR_PTR(-ESTALE);
105
106 ntfm = __crypto_alloc_tfmgfp(alg, CRYPTO_ALG_TYPE_CIPHER,
107 CRYPTO_ALG_TYPE_MASK, GFP_ATOMIC);
108 if (IS_ERR(ntfm)) {
109 crypto_mod_put(alg);
110 return ERR_CAST(ntfm);
111 }
112
113 ntfm->crt_flags = tfm->crt_flags;
114
115 ncipher = __crypto_cipher_cast(ntfm);
116
117 return ncipher;
118}
119EXPORT_SYMBOL_GPL(crypto_clone_cipher);