Loading...
1/*
2 * CBC: Cipher Block Chaining mode
3 *
4 * Copyright (c) 2006-2016 Herbert Xu <herbert@gondor.apana.org.au>
5 *
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License as published by the Free
8 * Software Foundation; either version 2 of the License, or (at your option)
9 * any later version.
10 *
11 */
12
13#include <crypto/algapi.h>
14#include <crypto/cbc.h>
15#include <crypto/internal/skcipher.h>
16#include <linux/err.h>
17#include <linux/init.h>
18#include <linux/kernel.h>
19#include <linux/log2.h>
20#include <linux/module.h>
21#include <linux/slab.h>
22
23struct crypto_cbc_ctx {
24 struct crypto_cipher *child;
25};
26
27static int crypto_cbc_setkey(struct crypto_skcipher *parent, const u8 *key,
28 unsigned int keylen)
29{
30 struct crypto_cbc_ctx *ctx = crypto_skcipher_ctx(parent);
31 struct crypto_cipher *child = ctx->child;
32 int err;
33
34 crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
35 crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) &
36 CRYPTO_TFM_REQ_MASK);
37 err = crypto_cipher_setkey(child, key, keylen);
38 crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) &
39 CRYPTO_TFM_RES_MASK);
40 return err;
41}
42
43static inline void crypto_cbc_encrypt_one(struct crypto_skcipher *tfm,
44 const u8 *src, u8 *dst)
45{
46 struct crypto_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
47
48 crypto_cipher_encrypt_one(ctx->child, dst, src);
49}
50
51static int crypto_cbc_encrypt(struct skcipher_request *req)
52{
53 return crypto_cbc_encrypt_walk(req, crypto_cbc_encrypt_one);
54}
55
56static inline void crypto_cbc_decrypt_one(struct crypto_skcipher *tfm,
57 const u8 *src, u8 *dst)
58{
59 struct crypto_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
60
61 crypto_cipher_decrypt_one(ctx->child, dst, src);
62}
63
64static int crypto_cbc_decrypt(struct skcipher_request *req)
65{
66 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
67 struct skcipher_walk walk;
68 int err;
69
70 err = skcipher_walk_virt(&walk, req, false);
71
72 while (walk.nbytes) {
73 err = crypto_cbc_decrypt_blocks(&walk, tfm,
74 crypto_cbc_decrypt_one);
75 err = skcipher_walk_done(&walk, err);
76 }
77
78 return err;
79}
80
81static int crypto_cbc_init_tfm(struct crypto_skcipher *tfm)
82{
83 struct skcipher_instance *inst = skcipher_alg_instance(tfm);
84 struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
85 struct crypto_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
86 struct crypto_cipher *cipher;
87
88 cipher = crypto_spawn_cipher(spawn);
89 if (IS_ERR(cipher))
90 return PTR_ERR(cipher);
91
92 ctx->child = cipher;
93 return 0;
94}
95
96static void crypto_cbc_exit_tfm(struct crypto_skcipher *tfm)
97{
98 struct crypto_cbc_ctx *ctx = crypto_skcipher_ctx(tfm);
99
100 crypto_free_cipher(ctx->child);
101}
102
103static void crypto_cbc_free(struct skcipher_instance *inst)
104{
105 crypto_drop_skcipher(skcipher_instance_ctx(inst));
106 kfree(inst);
107}
108
109static int crypto_cbc_create(struct crypto_template *tmpl, struct rtattr **tb)
110{
111 struct skcipher_instance *inst;
112 struct crypto_attr_type *algt;
113 struct crypto_spawn *spawn;
114 struct crypto_alg *alg;
115 u32 mask;
116 int err;
117
118 err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER);
119 if (err)
120 return err;
121
122 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
123 if (!inst)
124 return -ENOMEM;
125
126 algt = crypto_get_attr_type(tb);
127 err = PTR_ERR(algt);
128 if (IS_ERR(algt))
129 goto err_free_inst;
130
131 mask = CRYPTO_ALG_TYPE_MASK |
132 crypto_requires_off(algt->type, algt->mask,
133 CRYPTO_ALG_NEED_FALLBACK);
134
135 alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER, mask);
136 err = PTR_ERR(alg);
137 if (IS_ERR(alg))
138 goto err_free_inst;
139
140 spawn = skcipher_instance_ctx(inst);
141 err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst),
142 CRYPTO_ALG_TYPE_MASK);
143 crypto_mod_put(alg);
144 if (err)
145 goto err_free_inst;
146
147 err = crypto_inst_setname(skcipher_crypto_instance(inst), "cbc", alg);
148 if (err)
149 goto err_drop_spawn;
150
151 err = -EINVAL;
152 if (!is_power_of_2(alg->cra_blocksize))
153 goto err_drop_spawn;
154
155 inst->alg.base.cra_priority = alg->cra_priority;
156 inst->alg.base.cra_blocksize = alg->cra_blocksize;
157 inst->alg.base.cra_alignmask = alg->cra_alignmask;
158
159 inst->alg.ivsize = alg->cra_blocksize;
160 inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
161 inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
162
163 inst->alg.base.cra_ctxsize = sizeof(struct crypto_cbc_ctx);
164
165 inst->alg.init = crypto_cbc_init_tfm;
166 inst->alg.exit = crypto_cbc_exit_tfm;
167
168 inst->alg.setkey = crypto_cbc_setkey;
169 inst->alg.encrypt = crypto_cbc_encrypt;
170 inst->alg.decrypt = crypto_cbc_decrypt;
171
172 inst->free = crypto_cbc_free;
173
174 err = skcipher_register_instance(tmpl, inst);
175 if (err)
176 goto err_drop_spawn;
177
178out:
179 return err;
180
181err_drop_spawn:
182 crypto_drop_spawn(spawn);
183err_free_inst:
184 kfree(inst);
185 goto out;
186}
187
188static struct crypto_template crypto_cbc_tmpl = {
189 .name = "cbc",
190 .create = crypto_cbc_create,
191 .module = THIS_MODULE,
192};
193
194static int __init crypto_cbc_module_init(void)
195{
196 return crypto_register_template(&crypto_cbc_tmpl);
197}
198
199static void __exit crypto_cbc_module_exit(void)
200{
201 crypto_unregister_template(&crypto_cbc_tmpl);
202}
203
204module_init(crypto_cbc_module_init);
205module_exit(crypto_cbc_module_exit);
206
207MODULE_LICENSE("GPL");
208MODULE_DESCRIPTION("CBC block cipher algorithm");
209MODULE_ALIAS_CRYPTO("cbc");
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * CBC: Cipher Block Chaining mode
4 *
5 * Copyright (c) 2006-2016 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8#include <crypto/algapi.h>
9#include <crypto/internal/cipher.h>
10#include <crypto/internal/skcipher.h>
11#include <linux/err.h>
12#include <linux/init.h>
13#include <linux/kernel.h>
14#include <linux/log2.h>
15#include <linux/module.h>
16
17static int crypto_cbc_encrypt_segment(struct skcipher_walk *walk,
18 struct crypto_skcipher *skcipher)
19{
20 unsigned int bsize = crypto_skcipher_blocksize(skcipher);
21 void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
22 unsigned int nbytes = walk->nbytes;
23 u8 *src = walk->src.virt.addr;
24 u8 *dst = walk->dst.virt.addr;
25 struct crypto_cipher *cipher;
26 struct crypto_tfm *tfm;
27 u8 *iv = walk->iv;
28
29 cipher = skcipher_cipher_simple(skcipher);
30 tfm = crypto_cipher_tfm(cipher);
31 fn = crypto_cipher_alg(cipher)->cia_encrypt;
32
33 do {
34 crypto_xor(iv, src, bsize);
35 fn(tfm, dst, iv);
36 memcpy(iv, dst, bsize);
37
38 src += bsize;
39 dst += bsize;
40 } while ((nbytes -= bsize) >= bsize);
41
42 return nbytes;
43}
44
45static int crypto_cbc_encrypt_inplace(struct skcipher_walk *walk,
46 struct crypto_skcipher *skcipher)
47{
48 unsigned int bsize = crypto_skcipher_blocksize(skcipher);
49 void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
50 unsigned int nbytes = walk->nbytes;
51 u8 *src = walk->src.virt.addr;
52 struct crypto_cipher *cipher;
53 struct crypto_tfm *tfm;
54 u8 *iv = walk->iv;
55
56 cipher = skcipher_cipher_simple(skcipher);
57 tfm = crypto_cipher_tfm(cipher);
58 fn = crypto_cipher_alg(cipher)->cia_encrypt;
59
60 do {
61 crypto_xor(src, iv, bsize);
62 fn(tfm, src, src);
63 iv = src;
64
65 src += bsize;
66 } while ((nbytes -= bsize) >= bsize);
67
68 memcpy(walk->iv, iv, bsize);
69
70 return nbytes;
71}
72
73static int crypto_cbc_encrypt(struct skcipher_request *req)
74{
75 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
76 struct skcipher_walk walk;
77 int err;
78
79 err = skcipher_walk_virt(&walk, req, false);
80
81 while (walk.nbytes) {
82 if (walk.src.virt.addr == walk.dst.virt.addr)
83 err = crypto_cbc_encrypt_inplace(&walk, skcipher);
84 else
85 err = crypto_cbc_encrypt_segment(&walk, skcipher);
86 err = skcipher_walk_done(&walk, err);
87 }
88
89 return err;
90}
91
92static int crypto_cbc_decrypt_segment(struct skcipher_walk *walk,
93 struct crypto_skcipher *skcipher)
94{
95 unsigned int bsize = crypto_skcipher_blocksize(skcipher);
96 void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
97 unsigned int nbytes = walk->nbytes;
98 u8 *src = walk->src.virt.addr;
99 u8 *dst = walk->dst.virt.addr;
100 struct crypto_cipher *cipher;
101 struct crypto_tfm *tfm;
102 u8 *iv = walk->iv;
103
104 cipher = skcipher_cipher_simple(skcipher);
105 tfm = crypto_cipher_tfm(cipher);
106 fn = crypto_cipher_alg(cipher)->cia_decrypt;
107
108 do {
109 fn(tfm, dst, src);
110 crypto_xor(dst, iv, bsize);
111 iv = src;
112
113 src += bsize;
114 dst += bsize;
115 } while ((nbytes -= bsize) >= bsize);
116
117 memcpy(walk->iv, iv, bsize);
118
119 return nbytes;
120}
121
122static int crypto_cbc_decrypt_inplace(struct skcipher_walk *walk,
123 struct crypto_skcipher *skcipher)
124{
125 unsigned int bsize = crypto_skcipher_blocksize(skcipher);
126 void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
127 unsigned int nbytes = walk->nbytes;
128 u8 *src = walk->src.virt.addr;
129 u8 last_iv[MAX_CIPHER_BLOCKSIZE];
130 struct crypto_cipher *cipher;
131 struct crypto_tfm *tfm;
132
133 cipher = skcipher_cipher_simple(skcipher);
134 tfm = crypto_cipher_tfm(cipher);
135 fn = crypto_cipher_alg(cipher)->cia_decrypt;
136
137 /* Start of the last block. */
138 src += nbytes - (nbytes & (bsize - 1)) - bsize;
139 memcpy(last_iv, src, bsize);
140
141 for (;;) {
142 fn(tfm, src, src);
143 if ((nbytes -= bsize) < bsize)
144 break;
145 crypto_xor(src, src - bsize, bsize);
146 src -= bsize;
147 }
148
149 crypto_xor(src, walk->iv, bsize);
150 memcpy(walk->iv, last_iv, bsize);
151
152 return nbytes;
153}
154
155static int crypto_cbc_decrypt(struct skcipher_request *req)
156{
157 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
158 struct skcipher_walk walk;
159 int err;
160
161 err = skcipher_walk_virt(&walk, req, false);
162
163 while (walk.nbytes) {
164 if (walk.src.virt.addr == walk.dst.virt.addr)
165 err = crypto_cbc_decrypt_inplace(&walk, skcipher);
166 else
167 err = crypto_cbc_decrypt_segment(&walk, skcipher);
168 err = skcipher_walk_done(&walk, err);
169 }
170
171 return err;
172}
173
174static int crypto_cbc_create(struct crypto_template *tmpl, struct rtattr **tb)
175{
176 struct skcipher_instance *inst;
177 struct crypto_alg *alg;
178 int err;
179
180 inst = skcipher_alloc_instance_simple(tmpl, tb);
181 if (IS_ERR(inst))
182 return PTR_ERR(inst);
183
184 alg = skcipher_ialg_simple(inst);
185
186 err = -EINVAL;
187 if (!is_power_of_2(alg->cra_blocksize))
188 goto out_free_inst;
189
190 inst->alg.encrypt = crypto_cbc_encrypt;
191 inst->alg.decrypt = crypto_cbc_decrypt;
192
193 err = skcipher_register_instance(tmpl, inst);
194 if (err) {
195out_free_inst:
196 inst->free(inst);
197 }
198
199 return err;
200}
201
202static struct crypto_template crypto_cbc_tmpl = {
203 .name = "cbc",
204 .create = crypto_cbc_create,
205 .module = THIS_MODULE,
206};
207
208static int __init crypto_cbc_module_init(void)
209{
210 return crypto_register_template(&crypto_cbc_tmpl);
211}
212
213static void __exit crypto_cbc_module_exit(void)
214{
215 crypto_unregister_template(&crypto_cbc_tmpl);
216}
217
218subsys_initcall(crypto_cbc_module_init);
219module_exit(crypto_cbc_module_exit);
220
221MODULE_LICENSE("GPL");
222MODULE_DESCRIPTION("CBC block cipher mode of operation");
223MODULE_ALIAS_CRYPTO("cbc");