Linux Audio

Check our new training course

Loading...
v4.6
 
  1/*
  2 * CBC: Cipher Block Chaining mode
  3 *
  4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  5 *
  6 * This program is free software; you can redistribute it and/or modify it
  7 * under the terms of the GNU General Public License as published by the Free
  8 * Software Foundation; either version 2 of the License, or (at your option)
  9 * any later version.
 10 *
 11 */
 12
 13#include <crypto/algapi.h>
 
 
 14#include <linux/err.h>
 15#include <linux/init.h>
 16#include <linux/kernel.h>
 17#include <linux/log2.h>
 18#include <linux/module.h>
 19#include <linux/scatterlist.h>
 20#include <linux/slab.h>
 21
 22struct crypto_cbc_ctx {
 23	struct crypto_cipher *child;
 24};
 25
 26static int crypto_cbc_setkey(struct crypto_tfm *parent, const u8 *key,
 27			     unsigned int keylen)
 28{
 29	struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(parent);
 30	struct crypto_cipher *child = ctx->child;
 31	int err;
 32
 33	crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 34	crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
 35				       CRYPTO_TFM_REQ_MASK);
 36	err = crypto_cipher_setkey(child, key, keylen);
 37	crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
 38				     CRYPTO_TFM_RES_MASK);
 39	return err;
 40}
 41
 42static int crypto_cbc_encrypt_segment(struct blkcipher_desc *desc,
 43				      struct blkcipher_walk *walk,
 44				      struct crypto_cipher *tfm)
 45{
 46	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 47		crypto_cipher_alg(tfm)->cia_encrypt;
 48	int bsize = crypto_cipher_blocksize(tfm);
 49	unsigned int nbytes = walk->nbytes;
 50	u8 *src = walk->src.virt.addr;
 51	u8 *dst = walk->dst.virt.addr;
 
 
 52	u8 *iv = walk->iv;
 53
 
 
 
 
 54	do {
 55		crypto_xor(iv, src, bsize);
 56		fn(crypto_cipher_tfm(tfm), dst, iv);
 57		memcpy(iv, dst, bsize);
 58
 59		src += bsize;
 60		dst += bsize;
 61	} while ((nbytes -= bsize) >= bsize);
 62
 63	return nbytes;
 64}
 65
 66static int crypto_cbc_encrypt_inplace(struct blkcipher_desc *desc,
 67				      struct blkcipher_walk *walk,
 68				      struct crypto_cipher *tfm)
 69{
 70	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 71		crypto_cipher_alg(tfm)->cia_encrypt;
 72	int bsize = crypto_cipher_blocksize(tfm);
 73	unsigned int nbytes = walk->nbytes;
 74	u8 *src = walk->src.virt.addr;
 
 
 75	u8 *iv = walk->iv;
 76
 
 
 
 
 77	do {
 78		crypto_xor(src, iv, bsize);
 79		fn(crypto_cipher_tfm(tfm), src, src);
 80		iv = src;
 81
 82		src += bsize;
 83	} while ((nbytes -= bsize) >= bsize);
 84
 85	memcpy(walk->iv, iv, bsize);
 86
 87	return nbytes;
 88}
 89
 90static int crypto_cbc_encrypt(struct blkcipher_desc *desc,
 91			      struct scatterlist *dst, struct scatterlist *src,
 92			      unsigned int nbytes)
 93{
 94	struct blkcipher_walk walk;
 95	struct crypto_blkcipher *tfm = desc->tfm;
 96	struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
 97	struct crypto_cipher *child = ctx->child;
 98	int err;
 99
100	blkcipher_walk_init(&walk, dst, src, nbytes);
101	err = blkcipher_walk_virt(desc, &walk);
102
103	while ((nbytes = walk.nbytes)) {
104		if (walk.src.virt.addr == walk.dst.virt.addr)
105			nbytes = crypto_cbc_encrypt_inplace(desc, &walk, child);
106		else
107			nbytes = crypto_cbc_encrypt_segment(desc, &walk, child);
108		err = blkcipher_walk_done(desc, &walk, nbytes);
109	}
110
111	return err;
112}
113
114static int crypto_cbc_decrypt_segment(struct blkcipher_desc *desc,
115				      struct blkcipher_walk *walk,
116				      struct crypto_cipher *tfm)
117{
118	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
119		crypto_cipher_alg(tfm)->cia_decrypt;
120	int bsize = crypto_cipher_blocksize(tfm);
121	unsigned int nbytes = walk->nbytes;
122	u8 *src = walk->src.virt.addr;
123	u8 *dst = walk->dst.virt.addr;
 
 
124	u8 *iv = walk->iv;
125
 
 
 
 
126	do {
127		fn(crypto_cipher_tfm(tfm), dst, src);
128		crypto_xor(dst, iv, bsize);
129		iv = src;
130
131		src += bsize;
132		dst += bsize;
133	} while ((nbytes -= bsize) >= bsize);
134
135	memcpy(walk->iv, iv, bsize);
136
137	return nbytes;
138}
139
140static int crypto_cbc_decrypt_inplace(struct blkcipher_desc *desc,
141				      struct blkcipher_walk *walk,
142				      struct crypto_cipher *tfm)
143{
144	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
145		crypto_cipher_alg(tfm)->cia_decrypt;
146	int bsize = crypto_cipher_blocksize(tfm);
147	unsigned int nbytes = walk->nbytes;
148	u8 *src = walk->src.virt.addr;
149	u8 last_iv[bsize];
 
 
 
 
 
 
150
151	/* Start of the last block. */
152	src += nbytes - (nbytes & (bsize - 1)) - bsize;
153	memcpy(last_iv, src, bsize);
154
155	for (;;) {
156		fn(crypto_cipher_tfm(tfm), src, src);
157		if ((nbytes -= bsize) < bsize)
158			break;
159		crypto_xor(src, src - bsize, bsize);
160		src -= bsize;
161	}
162
163	crypto_xor(src, walk->iv, bsize);
164	memcpy(walk->iv, last_iv, bsize);
165
166	return nbytes;
167}
168
169static int crypto_cbc_decrypt(struct blkcipher_desc *desc,
170			      struct scatterlist *dst, struct scatterlist *src,
171			      unsigned int nbytes)
172{
173	struct blkcipher_walk walk;
174	struct crypto_blkcipher *tfm = desc->tfm;
175	struct crypto_cbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
176	struct crypto_cipher *child = ctx->child;
177	int err;
178
179	blkcipher_walk_init(&walk, dst, src, nbytes);
180	err = blkcipher_walk_virt(desc, &walk);
181
182	while ((nbytes = walk.nbytes)) {
183		if (walk.src.virt.addr == walk.dst.virt.addr)
184			nbytes = crypto_cbc_decrypt_inplace(desc, &walk, child);
185		else
186			nbytes = crypto_cbc_decrypt_segment(desc, &walk, child);
187		err = blkcipher_walk_done(desc, &walk, nbytes);
188	}
189
190	return err;
191}
192
193static int crypto_cbc_init_tfm(struct crypto_tfm *tfm)
194{
195	struct crypto_instance *inst = (void *)tfm->__crt_alg;
196	struct crypto_spawn *spawn = crypto_instance_ctx(inst);
197	struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
198	struct crypto_cipher *cipher;
199
200	cipher = crypto_spawn_cipher(spawn);
201	if (IS_ERR(cipher))
202		return PTR_ERR(cipher);
203
204	ctx->child = cipher;
205	return 0;
206}
207
208static void crypto_cbc_exit_tfm(struct crypto_tfm *tfm)
209{
210	struct crypto_cbc_ctx *ctx = crypto_tfm_ctx(tfm);
211	crypto_free_cipher(ctx->child);
212}
213
214static struct crypto_instance *crypto_cbc_alloc(struct rtattr **tb)
215{
216	struct crypto_instance *inst;
217	struct crypto_alg *alg;
218	int err;
219
220	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
221	if (err)
222		return ERR_PTR(err);
223
224	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
225				  CRYPTO_ALG_TYPE_MASK);
226	if (IS_ERR(alg))
227		return ERR_CAST(alg);
228
229	inst = ERR_PTR(-EINVAL);
230	if (!is_power_of_2(alg->cra_blocksize))
231		goto out_put_alg;
232
233	inst = crypto_alloc_instance("cbc", alg);
234	if (IS_ERR(inst))
235		goto out_put_alg;
236
237	inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
238	inst->alg.cra_priority = alg->cra_priority;
239	inst->alg.cra_blocksize = alg->cra_blocksize;
240	inst->alg.cra_alignmask = alg->cra_alignmask;
241	inst->alg.cra_type = &crypto_blkcipher_type;
242
243	/* We access the data as u32s when xoring. */
244	inst->alg.cra_alignmask |= __alignof__(u32) - 1;
245
246	inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
247	inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
248	inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
249
250	inst->alg.cra_ctxsize = sizeof(struct crypto_cbc_ctx);
251
252	inst->alg.cra_init = crypto_cbc_init_tfm;
253	inst->alg.cra_exit = crypto_cbc_exit_tfm;
254
255	inst->alg.cra_blkcipher.setkey = crypto_cbc_setkey;
256	inst->alg.cra_blkcipher.encrypt = crypto_cbc_encrypt;
257	inst->alg.cra_blkcipher.decrypt = crypto_cbc_decrypt;
258
259out_put_alg:
260	crypto_mod_put(alg);
261	return inst;
262}
 
263
264static void crypto_cbc_free(struct crypto_instance *inst)
265{
266	crypto_drop_spawn(crypto_instance_ctx(inst));
267	kfree(inst);
268}
269
270static struct crypto_template crypto_cbc_tmpl = {
271	.name = "cbc",
272	.alloc = crypto_cbc_alloc,
273	.free = crypto_cbc_free,
274	.module = THIS_MODULE,
275};
276
277static int __init crypto_cbc_module_init(void)
278{
279	return crypto_register_template(&crypto_cbc_tmpl);
280}
281
282static void __exit crypto_cbc_module_exit(void)
283{
284	crypto_unregister_template(&crypto_cbc_tmpl);
285}
286
287module_init(crypto_cbc_module_init);
288module_exit(crypto_cbc_module_exit);
289
290MODULE_LICENSE("GPL");
291MODULE_DESCRIPTION("CBC block cipher algorithm");
292MODULE_ALIAS_CRYPTO("cbc");
v6.2
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * CBC: Cipher Block Chaining mode
  4 *
  5 * Copyright (c) 2006-2016 Herbert Xu <herbert@gondor.apana.org.au>
 
 
 
 
 
 
  6 */
  7
  8#include <crypto/algapi.h>
  9#include <crypto/internal/cipher.h>
 10#include <crypto/internal/skcipher.h>
 11#include <linux/err.h>
 12#include <linux/init.h>
 13#include <linux/kernel.h>
 14#include <linux/log2.h>
 15#include <linux/module.h>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 16
 17static int crypto_cbc_encrypt_segment(struct skcipher_walk *walk,
 18				      struct crypto_skcipher *skcipher)
 
 19{
 20	unsigned int bsize = crypto_skcipher_blocksize(skcipher);
 21	void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
 
 22	unsigned int nbytes = walk->nbytes;
 23	u8 *src = walk->src.virt.addr;
 24	u8 *dst = walk->dst.virt.addr;
 25	struct crypto_cipher *cipher;
 26	struct crypto_tfm *tfm;
 27	u8 *iv = walk->iv;
 28
 29	cipher = skcipher_cipher_simple(skcipher);
 30	tfm = crypto_cipher_tfm(cipher);
 31	fn = crypto_cipher_alg(cipher)->cia_encrypt;
 32
 33	do {
 34		crypto_xor(iv, src, bsize);
 35		fn(tfm, dst, iv);
 36		memcpy(iv, dst, bsize);
 37
 38		src += bsize;
 39		dst += bsize;
 40	} while ((nbytes -= bsize) >= bsize);
 41
 42	return nbytes;
 43}
 44
 45static int crypto_cbc_encrypt_inplace(struct skcipher_walk *walk,
 46				      struct crypto_skcipher *skcipher)
 
 47{
 48	unsigned int bsize = crypto_skcipher_blocksize(skcipher);
 49	void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
 
 50	unsigned int nbytes = walk->nbytes;
 51	u8 *src = walk->src.virt.addr;
 52	struct crypto_cipher *cipher;
 53	struct crypto_tfm *tfm;
 54	u8 *iv = walk->iv;
 55
 56	cipher = skcipher_cipher_simple(skcipher);
 57	tfm = crypto_cipher_tfm(cipher);
 58	fn = crypto_cipher_alg(cipher)->cia_encrypt;
 59
 60	do {
 61		crypto_xor(src, iv, bsize);
 62		fn(tfm, src, src);
 63		iv = src;
 64
 65		src += bsize;
 66	} while ((nbytes -= bsize) >= bsize);
 67
 68	memcpy(walk->iv, iv, bsize);
 69
 70	return nbytes;
 71}
 72
 73static int crypto_cbc_encrypt(struct skcipher_request *req)
 
 
 74{
 75	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
 76	struct skcipher_walk walk;
 
 
 77	int err;
 78
 79	err = skcipher_walk_virt(&walk, req, false);
 
 80
 81	while (walk.nbytes) {
 82		if (walk.src.virt.addr == walk.dst.virt.addr)
 83			err = crypto_cbc_encrypt_inplace(&walk, skcipher);
 84		else
 85			err = crypto_cbc_encrypt_segment(&walk, skcipher);
 86		err = skcipher_walk_done(&walk, err);
 87	}
 88
 89	return err;
 90}
 91
 92static int crypto_cbc_decrypt_segment(struct skcipher_walk *walk,
 93				      struct crypto_skcipher *skcipher)
 
 94{
 95	unsigned int bsize = crypto_skcipher_blocksize(skcipher);
 96	void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
 
 97	unsigned int nbytes = walk->nbytes;
 98	u8 *src = walk->src.virt.addr;
 99	u8 *dst = walk->dst.virt.addr;
100	struct crypto_cipher *cipher;
101	struct crypto_tfm *tfm;
102	u8 *iv = walk->iv;
103
104	cipher = skcipher_cipher_simple(skcipher);
105	tfm = crypto_cipher_tfm(cipher);
106	fn = crypto_cipher_alg(cipher)->cia_decrypt;
107
108	do {
109		fn(tfm, dst, src);
110		crypto_xor(dst, iv, bsize);
111		iv = src;
112
113		src += bsize;
114		dst += bsize;
115	} while ((nbytes -= bsize) >= bsize);
116
117	memcpy(walk->iv, iv, bsize);
118
119	return nbytes;
120}
121
122static int crypto_cbc_decrypt_inplace(struct skcipher_walk *walk,
123				      struct crypto_skcipher *skcipher)
 
124{
125	unsigned int bsize = crypto_skcipher_blocksize(skcipher);
126	void (*fn)(struct crypto_tfm *, u8 *, const u8 *);
 
127	unsigned int nbytes = walk->nbytes;
128	u8 *src = walk->src.virt.addr;
129	u8 last_iv[MAX_CIPHER_BLOCKSIZE];
130	struct crypto_cipher *cipher;
131	struct crypto_tfm *tfm;
132
133	cipher = skcipher_cipher_simple(skcipher);
134	tfm = crypto_cipher_tfm(cipher);
135	fn = crypto_cipher_alg(cipher)->cia_decrypt;
136
137	/* Start of the last block. */
138	src += nbytes - (nbytes & (bsize - 1)) - bsize;
139	memcpy(last_iv, src, bsize);
140
141	for (;;) {
142		fn(tfm, src, src);
143		if ((nbytes -= bsize) < bsize)
144			break;
145		crypto_xor(src, src - bsize, bsize);
146		src -= bsize;
147	}
148
149	crypto_xor(src, walk->iv, bsize);
150	memcpy(walk->iv, last_iv, bsize);
151
152	return nbytes;
153}
154
155static int crypto_cbc_decrypt(struct skcipher_request *req)
 
 
156{
157	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
158	struct skcipher_walk walk;
 
 
159	int err;
160
161	err = skcipher_walk_virt(&walk, req, false);
 
162
163	while (walk.nbytes) {
164		if (walk.src.virt.addr == walk.dst.virt.addr)
165			err = crypto_cbc_decrypt_inplace(&walk, skcipher);
166		else
167			err = crypto_cbc_decrypt_segment(&walk, skcipher);
168		err = skcipher_walk_done(&walk, err);
169	}
170
171	return err;
172}
173
174static int crypto_cbc_create(struct crypto_template *tmpl, struct rtattr **tb)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
175{
176	struct skcipher_instance *inst;
 
 
 
 
 
 
177	struct crypto_alg *alg;
178	int err;
179
180	inst = skcipher_alloc_instance_simple(tmpl, tb);
 
 
 
 
 
 
 
 
 
 
 
 
 
181	if (IS_ERR(inst))
182		return PTR_ERR(inst);
 
 
 
 
 
 
 
 
 
183
184	alg = skcipher_ialg_simple(inst);
 
 
185
186	err = -EINVAL;
187	if (!is_power_of_2(alg->cra_blocksize))
188		goto out_free_inst;
 
189
190	inst->alg.encrypt = crypto_cbc_encrypt;
191	inst->alg.decrypt = crypto_cbc_decrypt;
 
192
193	err = skcipher_register_instance(tmpl, inst);
194	if (err) {
195out_free_inst:
196		inst->free(inst);
197	}
198
199	return err;
 
 
 
200}
201
202static struct crypto_template crypto_cbc_tmpl = {
203	.name = "cbc",
204	.create = crypto_cbc_create,
 
205	.module = THIS_MODULE,
206};
207
208static int __init crypto_cbc_module_init(void)
209{
210	return crypto_register_template(&crypto_cbc_tmpl);
211}
212
213static void __exit crypto_cbc_module_exit(void)
214{
215	crypto_unregister_template(&crypto_cbc_tmpl);
216}
217
218subsys_initcall(crypto_cbc_module_init);
219module_exit(crypto_cbc_module_exit);
220
221MODULE_LICENSE("GPL");
222MODULE_DESCRIPTION("CBC block cipher mode of operation");
223MODULE_ALIAS_CRYPTO("cbc");