Linux Audio

Check our new training course

Loading...
v4.6
  1/*
  2 * PCBC: Propagating Cipher Block Chaining mode
  3 *
  4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
  5 * Written by David Howells (dhowells@redhat.com)
  6 *
  7 * Derived from cbc.c
  8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  9 *
 10 * This program is free software; you can redistribute it and/or modify it
 11 * under the terms of the GNU General Public License as published by the Free
 12 * Software Foundation; either version 2 of the License, or (at your option)
 13 * any later version.
 14 *
 15 */
 16
 17#include <crypto/algapi.h>
 18#include <linux/err.h>
 19#include <linux/init.h>
 20#include <linux/kernel.h>
 21#include <linux/module.h>
 22#include <linux/scatterlist.h>
 23#include <linux/slab.h>
 
 24
 25struct crypto_pcbc_ctx {
 26	struct crypto_cipher *child;
 27};
 28
 29static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key,
 30			      unsigned int keylen)
 31{
 32	struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(parent);
 33	struct crypto_cipher *child = ctx->child;
 34	int err;
 35
 36	crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 37	crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
 38				CRYPTO_TFM_REQ_MASK);
 39	err = crypto_cipher_setkey(child, key, keylen);
 40	crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
 41			     CRYPTO_TFM_RES_MASK);
 42	return err;
 43}
 44
 45static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc,
 46				       struct blkcipher_walk *walk,
 47				       struct crypto_cipher *tfm)
 48{
 49	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 50		crypto_cipher_alg(tfm)->cia_encrypt;
 51	int bsize = crypto_cipher_blocksize(tfm);
 52	unsigned int nbytes = walk->nbytes;
 53	u8 *src = walk->src.virt.addr;
 54	u8 *dst = walk->dst.virt.addr;
 55	u8 *iv = walk->iv;
 56
 57	do {
 58		crypto_xor(iv, src, bsize);
 59		fn(crypto_cipher_tfm(tfm), dst, iv);
 60		memcpy(iv, dst, bsize);
 61		crypto_xor(iv, src, bsize);
 62
 63		src += bsize;
 64		dst += bsize;
 65	} while ((nbytes -= bsize) >= bsize);
 66
 67	return nbytes;
 68}
 69
 70static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc,
 71				       struct blkcipher_walk *walk,
 72				       struct crypto_cipher *tfm)
 73{
 74	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 75		crypto_cipher_alg(tfm)->cia_encrypt;
 76	int bsize = crypto_cipher_blocksize(tfm);
 77	unsigned int nbytes = walk->nbytes;
 78	u8 *src = walk->src.virt.addr;
 79	u8 *iv = walk->iv;
 80	u8 tmpbuf[bsize];
 81
 82	do {
 83		memcpy(tmpbuf, src, bsize);
 84		crypto_xor(iv, src, bsize);
 85		fn(crypto_cipher_tfm(tfm), src, iv);
 86		memcpy(iv, tmpbuf, bsize);
 87		crypto_xor(iv, src, bsize);
 88
 89		src += bsize;
 90	} while ((nbytes -= bsize) >= bsize);
 91
 92	memcpy(walk->iv, iv, bsize);
 93
 94	return nbytes;
 95}
 96
 97static int crypto_pcbc_encrypt(struct blkcipher_desc *desc,
 98			       struct scatterlist *dst, struct scatterlist *src,
 99			       unsigned int nbytes)
100{
101	struct blkcipher_walk walk;
102	struct crypto_blkcipher *tfm = desc->tfm;
103	struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
104	struct crypto_cipher *child = ctx->child;
 
 
105	int err;
106
107	blkcipher_walk_init(&walk, dst, src, nbytes);
108	err = blkcipher_walk_virt(desc, &walk);
109
110	while ((nbytes = walk.nbytes)) {
111		if (walk.src.virt.addr == walk.dst.virt.addr)
112			nbytes = crypto_pcbc_encrypt_inplace(desc, &walk,
113							     child);
114		else
115			nbytes = crypto_pcbc_encrypt_segment(desc, &walk,
116							     child);
117		err = blkcipher_walk_done(desc, &walk, nbytes);
118	}
119
120	return err;
121}
122
123static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc,
124				       struct blkcipher_walk *walk,
125				       struct crypto_cipher *tfm)
126{
127	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
128		crypto_cipher_alg(tfm)->cia_decrypt;
129	int bsize = crypto_cipher_blocksize(tfm);
130	unsigned int nbytes = walk->nbytes;
131	u8 *src = walk->src.virt.addr;
132	u8 *dst = walk->dst.virt.addr;
133	u8 *iv = walk->iv;
134
135	do {
136		fn(crypto_cipher_tfm(tfm), dst, src);
137		crypto_xor(dst, iv, bsize);
138		memcpy(iv, src, bsize);
139		crypto_xor(iv, dst, bsize);
140
141		src += bsize;
142		dst += bsize;
143	} while ((nbytes -= bsize) >= bsize);
144
145	memcpy(walk->iv, iv, bsize);
146
147	return nbytes;
148}
149
150static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc,
151				       struct blkcipher_walk *walk,
152				       struct crypto_cipher *tfm)
153{
154	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
155		crypto_cipher_alg(tfm)->cia_decrypt;
156	int bsize = crypto_cipher_blocksize(tfm);
157	unsigned int nbytes = walk->nbytes;
158	u8 *src = walk->src.virt.addr;
159	u8 *iv = walk->iv;
160	u8 tmpbuf[bsize];
161
162	do {
163		memcpy(tmpbuf, src, bsize);
164		fn(crypto_cipher_tfm(tfm), src, src);
165		crypto_xor(src, iv, bsize);
166		memcpy(iv, tmpbuf, bsize);
167		crypto_xor(iv, src, bsize);
168
169		src += bsize;
170	} while ((nbytes -= bsize) >= bsize);
171
172	memcpy(walk->iv, iv, bsize);
173
174	return nbytes;
175}
176
177static int crypto_pcbc_decrypt(struct blkcipher_desc *desc,
178			       struct scatterlist *dst, struct scatterlist *src,
179			       unsigned int nbytes)
180{
181	struct blkcipher_walk walk;
182	struct crypto_blkcipher *tfm = desc->tfm;
183	struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
184	struct crypto_cipher *child = ctx->child;
 
 
185	int err;
186
187	blkcipher_walk_init(&walk, dst, src, nbytes);
188	err = blkcipher_walk_virt(desc, &walk);
189
190	while ((nbytes = walk.nbytes)) {
191		if (walk.src.virt.addr == walk.dst.virt.addr)
192			nbytes = crypto_pcbc_decrypt_inplace(desc, &walk,
193							     child);
194		else
195			nbytes = crypto_pcbc_decrypt_segment(desc, &walk,
196							     child);
197		err = blkcipher_walk_done(desc, &walk, nbytes);
198	}
199
200	return err;
201}
202
203static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm)
204{
205	struct crypto_instance *inst = (void *)tfm->__crt_alg;
206	struct crypto_spawn *spawn = crypto_instance_ctx(inst);
207	struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm);
208	struct crypto_cipher *cipher;
209
210	cipher = crypto_spawn_cipher(spawn);
211	if (IS_ERR(cipher))
212		return PTR_ERR(cipher);
213
214	ctx->child = cipher;
215	return 0;
216}
217
218static void crypto_pcbc_exit_tfm(struct crypto_tfm *tfm)
219{
220	struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm);
 
221	crypto_free_cipher(ctx->child);
222}
223
224static struct crypto_instance *crypto_pcbc_alloc(struct rtattr **tb)
 
 
 
 
 
 
225{
226	struct crypto_instance *inst;
 
 
227	struct crypto_alg *alg;
228	int err;
229
230	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
231	if (err)
232		return ERR_PTR(err);
233
234	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
235				  CRYPTO_ALG_TYPE_MASK);
236	if (IS_ERR(alg))
237		return ERR_CAST(alg);
238
239	inst = crypto_alloc_instance("pcbc", alg);
240	if (IS_ERR(inst))
241		goto out_put_alg;
 
242
243	inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
244	inst->alg.cra_priority = alg->cra_priority;
245	inst->alg.cra_blocksize = alg->cra_blocksize;
246	inst->alg.cra_alignmask = alg->cra_alignmask;
247	inst->alg.cra_type = &crypto_blkcipher_type;
248
249	/* We access the data as u32s when xoring. */
250	inst->alg.cra_alignmask |= __alignof__(u32) - 1;
251
252	inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
253	inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
254	inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
255
256	inst->alg.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
 
 
257
258	inst->alg.cra_init = crypto_pcbc_init_tfm;
259	inst->alg.cra_exit = crypto_pcbc_exit_tfm;
260
261	inst->alg.cra_blkcipher.setkey = crypto_pcbc_setkey;
262	inst->alg.cra_blkcipher.encrypt = crypto_pcbc_encrypt;
263	inst->alg.cra_blkcipher.decrypt = crypto_pcbc_decrypt;
264
265out_put_alg:
266	crypto_mod_put(alg);
267	return inst;
268}
269
270static void crypto_pcbc_free(struct crypto_instance *inst)
271{
272	crypto_drop_spawn(crypto_instance_ctx(inst));
273	kfree(inst);
 
274}
275
276static struct crypto_template crypto_pcbc_tmpl = {
277	.name = "pcbc",
278	.alloc = crypto_pcbc_alloc,
279	.free = crypto_pcbc_free,
280	.module = THIS_MODULE,
281};
282
283static int __init crypto_pcbc_module_init(void)
284{
285	return crypto_register_template(&crypto_pcbc_tmpl);
286}
287
288static void __exit crypto_pcbc_module_exit(void)
289{
290	crypto_unregister_template(&crypto_pcbc_tmpl);
291}
292
293module_init(crypto_pcbc_module_init);
294module_exit(crypto_pcbc_module_exit);
295
296MODULE_LICENSE("GPL");
297MODULE_DESCRIPTION("PCBC block cipher algorithm");
298MODULE_ALIAS_CRYPTO("pcbc");
v4.17
  1/*
  2 * PCBC: Propagating Cipher Block Chaining mode
  3 *
  4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
  5 * Written by David Howells (dhowells@redhat.com)
  6 *
  7 * Derived from cbc.c
  8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  9 *
 10 * This program is free software; you can redistribute it and/or modify it
 11 * under the terms of the GNU General Public License as published by the Free
 12 * Software Foundation; either version 2 of the License, or (at your option)
 13 * any later version.
 14 *
 15 */
 16
 17#include <crypto/internal/skcipher.h>
 18#include <linux/err.h>
 19#include <linux/init.h>
 20#include <linux/kernel.h>
 21#include <linux/module.h>
 
 22#include <linux/slab.h>
 23#include <linux/compiler.h>
 24
 25struct crypto_pcbc_ctx {
 26	struct crypto_cipher *child;
 27};
 28
 29static int crypto_pcbc_setkey(struct crypto_skcipher *parent, const u8 *key,
 30			      unsigned int keylen)
 31{
 32	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(parent);
 33	struct crypto_cipher *child = ctx->child;
 34	int err;
 35
 36	crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 37	crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) &
 38				       CRYPTO_TFM_REQ_MASK);
 39	err = crypto_cipher_setkey(child, key, keylen);
 40	crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) &
 41					  CRYPTO_TFM_RES_MASK);
 42	return err;
 43}
 44
 45static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
 46				       struct skcipher_walk *walk,
 47				       struct crypto_cipher *tfm)
 48{
 
 
 49	int bsize = crypto_cipher_blocksize(tfm);
 50	unsigned int nbytes = walk->nbytes;
 51	u8 *src = walk->src.virt.addr;
 52	u8 *dst = walk->dst.virt.addr;
 53	u8 *iv = walk->iv;
 54
 55	do {
 56		crypto_xor(iv, src, bsize);
 57		crypto_cipher_encrypt_one(tfm, dst, iv);
 58		crypto_xor_cpy(iv, dst, src, bsize);
 
 59
 60		src += bsize;
 61		dst += bsize;
 62	} while ((nbytes -= bsize) >= bsize);
 63
 64	return nbytes;
 65}
 66
 67static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
 68				       struct skcipher_walk *walk,
 69				       struct crypto_cipher *tfm)
 70{
 
 
 71	int bsize = crypto_cipher_blocksize(tfm);
 72	unsigned int nbytes = walk->nbytes;
 73	u8 *src = walk->src.virt.addr;
 74	u8 *iv = walk->iv;
 75	u8 tmpbuf[bsize];
 76
 77	do {
 78		memcpy(tmpbuf, src, bsize);
 79		crypto_xor(iv, src, bsize);
 80		crypto_cipher_encrypt_one(tfm, src, iv);
 81		crypto_xor_cpy(iv, tmpbuf, src, bsize);
 
 82
 83		src += bsize;
 84	} while ((nbytes -= bsize) >= bsize);
 85
 86	memcpy(walk->iv, iv, bsize);
 87
 88	return nbytes;
 89}
 90
 91static int crypto_pcbc_encrypt(struct skcipher_request *req)
 92{
 93	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 94	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
 
 
 
 95	struct crypto_cipher *child = ctx->child;
 96	struct skcipher_walk walk;
 97	unsigned int nbytes;
 98	int err;
 99
100	err = skcipher_walk_virt(&walk, req, false);
 
101
102	while ((nbytes = walk.nbytes)) {
103		if (walk.src.virt.addr == walk.dst.virt.addr)
104			nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
105							     child);
106		else
107			nbytes = crypto_pcbc_encrypt_segment(req, &walk,
108							     child);
109		err = skcipher_walk_done(&walk, nbytes);
110	}
111
112	return err;
113}
114
115static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
116				       struct skcipher_walk *walk,
117				       struct crypto_cipher *tfm)
118{
 
 
119	int bsize = crypto_cipher_blocksize(tfm);
120	unsigned int nbytes = walk->nbytes;
121	u8 *src = walk->src.virt.addr;
122	u8 *dst = walk->dst.virt.addr;
123	u8 *iv = walk->iv;
124
125	do {
126		crypto_cipher_decrypt_one(tfm, dst, src);
127		crypto_xor(dst, iv, bsize);
128		crypto_xor_cpy(iv, dst, src, bsize);
 
129
130		src += bsize;
131		dst += bsize;
132	} while ((nbytes -= bsize) >= bsize);
133
134	memcpy(walk->iv, iv, bsize);
135
136	return nbytes;
137}
138
139static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
140				       struct skcipher_walk *walk,
141				       struct crypto_cipher *tfm)
142{
 
 
143	int bsize = crypto_cipher_blocksize(tfm);
144	unsigned int nbytes = walk->nbytes;
145	u8 *src = walk->src.virt.addr;
146	u8 *iv = walk->iv;
147	u8 tmpbuf[bsize] __aligned(__alignof__(u32));
148
149	do {
150		memcpy(tmpbuf, src, bsize);
151		crypto_cipher_decrypt_one(tfm, src, src);
152		crypto_xor(src, iv, bsize);
153		crypto_xor_cpy(iv, src, tmpbuf, bsize);
 
154
155		src += bsize;
156	} while ((nbytes -= bsize) >= bsize);
157
158	memcpy(walk->iv, iv, bsize);
159
160	return nbytes;
161}
162
163static int crypto_pcbc_decrypt(struct skcipher_request *req)
164{
165	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
166	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
 
 
 
167	struct crypto_cipher *child = ctx->child;
168	struct skcipher_walk walk;
169	unsigned int nbytes;
170	int err;
171
172	err = skcipher_walk_virt(&walk, req, false);
 
173
174	while ((nbytes = walk.nbytes)) {
175		if (walk.src.virt.addr == walk.dst.virt.addr)
176			nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
177							     child);
178		else
179			nbytes = crypto_pcbc_decrypt_segment(req, &walk,
180							     child);
181		err = skcipher_walk_done(&walk, nbytes);
182	}
183
184	return err;
185}
186
187static int crypto_pcbc_init_tfm(struct crypto_skcipher *tfm)
188{
189	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
190	struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
191	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
192	struct crypto_cipher *cipher;
193
194	cipher = crypto_spawn_cipher(spawn);
195	if (IS_ERR(cipher))
196		return PTR_ERR(cipher);
197
198	ctx->child = cipher;
199	return 0;
200}
201
202static void crypto_pcbc_exit_tfm(struct crypto_skcipher *tfm)
203{
204	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
205
206	crypto_free_cipher(ctx->child);
207}
208
209static void crypto_pcbc_free(struct skcipher_instance *inst)
210{
211	crypto_drop_skcipher(skcipher_instance_ctx(inst));
212	kfree(inst);
213}
214
215static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
216{
217	struct skcipher_instance *inst;
218	struct crypto_attr_type *algt;
219	struct crypto_spawn *spawn;
220	struct crypto_alg *alg;
221	int err;
222
223	algt = crypto_get_attr_type(tb);
224	if (IS_ERR(algt))
225		return PTR_ERR(algt);
226
227	if (((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) &
228	    ~CRYPTO_ALG_INTERNAL)
229		return -EINVAL;
230
231	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
232	if (!inst)
233		return -ENOMEM;
234
235	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER |
236				      (algt->type & CRYPTO_ALG_INTERNAL),
237				  CRYPTO_ALG_TYPE_MASK |
238				  (algt->mask & CRYPTO_ALG_INTERNAL));
239	err = PTR_ERR(alg);
240	if (IS_ERR(alg))
241		goto err_free_inst;
242
243	spawn = skcipher_instance_ctx(inst);
244	err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst),
245				CRYPTO_ALG_TYPE_MASK);
246	crypto_mod_put(alg);
247	if (err)
248		goto err_free_inst;
249
250	err = crypto_inst_setname(skcipher_crypto_instance(inst), "pcbc", alg);
251	if (err)
252		goto err_drop_spawn;
 
253
254	inst->alg.base.cra_flags = alg->cra_flags & CRYPTO_ALG_INTERNAL;
255	inst->alg.base.cra_priority = alg->cra_priority;
256	inst->alg.base.cra_blocksize = alg->cra_blocksize;
257	inst->alg.base.cra_alignmask = alg->cra_alignmask;
258
259	inst->alg.ivsize = alg->cra_blocksize;
260	inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
261	inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
 
 
262
263	inst->alg.base.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
 
264
265	inst->alg.init = crypto_pcbc_init_tfm;
266	inst->alg.exit = crypto_pcbc_exit_tfm;
 
267
268	inst->alg.setkey = crypto_pcbc_setkey;
269	inst->alg.encrypt = crypto_pcbc_encrypt;
270	inst->alg.decrypt = crypto_pcbc_decrypt;
271
272	inst->free = crypto_pcbc_free;
 
273
274	err = skcipher_register_instance(tmpl, inst);
275	if (err)
276		goto err_drop_spawn;
277
278out:
279	return err;
 
 
280
281err_drop_spawn:
282	crypto_drop_spawn(spawn);
283err_free_inst:
284	kfree(inst);
285	goto out;
286}
287
288static struct crypto_template crypto_pcbc_tmpl = {
289	.name = "pcbc",
290	.create = crypto_pcbc_create,
 
291	.module = THIS_MODULE,
292};
293
294static int __init crypto_pcbc_module_init(void)
295{
296	return crypto_register_template(&crypto_pcbc_tmpl);
297}
298
299static void __exit crypto_pcbc_module_exit(void)
300{
301	crypto_unregister_template(&crypto_pcbc_tmpl);
302}
303
304module_init(crypto_pcbc_module_init);
305module_exit(crypto_pcbc_module_exit);
306
307MODULE_LICENSE("GPL");
308MODULE_DESCRIPTION("PCBC block cipher algorithm");
309MODULE_ALIAS_CRYPTO("pcbc");