Linux Audio

Check our new training course

Loading...
v4.6
  1/*
  2 * PCBC: Propagating Cipher Block Chaining mode
  3 *
  4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
  5 * Written by David Howells (dhowells@redhat.com)
  6 *
  7 * Derived from cbc.c
  8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  9 *
 10 * This program is free software; you can redistribute it and/or modify it
 11 * under the terms of the GNU General Public License as published by the Free
 12 * Software Foundation; either version 2 of the License, or (at your option)
 13 * any later version.
 14 *
 15 */
 16
 17#include <crypto/algapi.h>
 18#include <linux/err.h>
 19#include <linux/init.h>
 20#include <linux/kernel.h>
 21#include <linux/module.h>
 22#include <linux/scatterlist.h>
 23#include <linux/slab.h>
 24
 25struct crypto_pcbc_ctx {
 26	struct crypto_cipher *child;
 27};
 28
 29static int crypto_pcbc_setkey(struct crypto_tfm *parent, const u8 *key,
 30			      unsigned int keylen)
 31{
 32	struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(parent);
 33	struct crypto_cipher *child = ctx->child;
 34	int err;
 35
 36	crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 37	crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
 38				CRYPTO_TFM_REQ_MASK);
 39	err = crypto_cipher_setkey(child, key, keylen);
 40	crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
 41			     CRYPTO_TFM_RES_MASK);
 42	return err;
 43}
 44
 45static int crypto_pcbc_encrypt_segment(struct blkcipher_desc *desc,
 46				       struct blkcipher_walk *walk,
 47				       struct crypto_cipher *tfm)
 48{
 49	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 50		crypto_cipher_alg(tfm)->cia_encrypt;
 51	int bsize = crypto_cipher_blocksize(tfm);
 52	unsigned int nbytes = walk->nbytes;
 53	u8 *src = walk->src.virt.addr;
 54	u8 *dst = walk->dst.virt.addr;
 55	u8 *iv = walk->iv;
 56
 57	do {
 58		crypto_xor(iv, src, bsize);
 59		fn(crypto_cipher_tfm(tfm), dst, iv);
 60		memcpy(iv, dst, bsize);
 61		crypto_xor(iv, src, bsize);
 62
 63		src += bsize;
 64		dst += bsize;
 65	} while ((nbytes -= bsize) >= bsize);
 66
 67	return nbytes;
 68}
 69
 70static int crypto_pcbc_encrypt_inplace(struct blkcipher_desc *desc,
 71				       struct blkcipher_walk *walk,
 72				       struct crypto_cipher *tfm)
 73{
 74	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 75		crypto_cipher_alg(tfm)->cia_encrypt;
 76	int bsize = crypto_cipher_blocksize(tfm);
 77	unsigned int nbytes = walk->nbytes;
 78	u8 *src = walk->src.virt.addr;
 79	u8 *iv = walk->iv;
 80	u8 tmpbuf[bsize];
 81
 82	do {
 83		memcpy(tmpbuf, src, bsize);
 84		crypto_xor(iv, src, bsize);
 85		fn(crypto_cipher_tfm(tfm), src, iv);
 86		memcpy(iv, tmpbuf, bsize);
 87		crypto_xor(iv, src, bsize);
 88
 89		src += bsize;
 90	} while ((nbytes -= bsize) >= bsize);
 91
 92	memcpy(walk->iv, iv, bsize);
 93
 94	return nbytes;
 95}
 96
 97static int crypto_pcbc_encrypt(struct blkcipher_desc *desc,
 98			       struct scatterlist *dst, struct scatterlist *src,
 99			       unsigned int nbytes)
100{
101	struct blkcipher_walk walk;
102	struct crypto_blkcipher *tfm = desc->tfm;
103	struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
104	struct crypto_cipher *child = ctx->child;
 
 
105	int err;
106
107	blkcipher_walk_init(&walk, dst, src, nbytes);
108	err = blkcipher_walk_virt(desc, &walk);
109
110	while ((nbytes = walk.nbytes)) {
111		if (walk.src.virt.addr == walk.dst.virt.addr)
112			nbytes = crypto_pcbc_encrypt_inplace(desc, &walk,
113							     child);
114		else
115			nbytes = crypto_pcbc_encrypt_segment(desc, &walk,
116							     child);
117		err = blkcipher_walk_done(desc, &walk, nbytes);
118	}
119
120	return err;
121}
122
123static int crypto_pcbc_decrypt_segment(struct blkcipher_desc *desc,
124				       struct blkcipher_walk *walk,
125				       struct crypto_cipher *tfm)
126{
127	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
128		crypto_cipher_alg(tfm)->cia_decrypt;
129	int bsize = crypto_cipher_blocksize(tfm);
130	unsigned int nbytes = walk->nbytes;
131	u8 *src = walk->src.virt.addr;
132	u8 *dst = walk->dst.virt.addr;
133	u8 *iv = walk->iv;
134
135	do {
136		fn(crypto_cipher_tfm(tfm), dst, src);
137		crypto_xor(dst, iv, bsize);
138		memcpy(iv, src, bsize);
139		crypto_xor(iv, dst, bsize);
140
141		src += bsize;
142		dst += bsize;
143	} while ((nbytes -= bsize) >= bsize);
144
145	memcpy(walk->iv, iv, bsize);
146
147	return nbytes;
148}
149
150static int crypto_pcbc_decrypt_inplace(struct blkcipher_desc *desc,
151				       struct blkcipher_walk *walk,
152				       struct crypto_cipher *tfm)
153{
154	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
155		crypto_cipher_alg(tfm)->cia_decrypt;
156	int bsize = crypto_cipher_blocksize(tfm);
157	unsigned int nbytes = walk->nbytes;
158	u8 *src = walk->src.virt.addr;
159	u8 *iv = walk->iv;
160	u8 tmpbuf[bsize];
161
162	do {
163		memcpy(tmpbuf, src, bsize);
164		fn(crypto_cipher_tfm(tfm), src, src);
165		crypto_xor(src, iv, bsize);
166		memcpy(iv, tmpbuf, bsize);
167		crypto_xor(iv, src, bsize);
168
169		src += bsize;
170	} while ((nbytes -= bsize) >= bsize);
171
172	memcpy(walk->iv, iv, bsize);
173
174	return nbytes;
175}
176
177static int crypto_pcbc_decrypt(struct blkcipher_desc *desc,
178			       struct scatterlist *dst, struct scatterlist *src,
179			       unsigned int nbytes)
180{
181	struct blkcipher_walk walk;
182	struct crypto_blkcipher *tfm = desc->tfm;
183	struct crypto_pcbc_ctx *ctx = crypto_blkcipher_ctx(tfm);
184	struct crypto_cipher *child = ctx->child;
 
 
185	int err;
186
187	blkcipher_walk_init(&walk, dst, src, nbytes);
188	err = blkcipher_walk_virt(desc, &walk);
189
190	while ((nbytes = walk.nbytes)) {
191		if (walk.src.virt.addr == walk.dst.virt.addr)
192			nbytes = crypto_pcbc_decrypt_inplace(desc, &walk,
193							     child);
194		else
195			nbytes = crypto_pcbc_decrypt_segment(desc, &walk,
196							     child);
197		err = blkcipher_walk_done(desc, &walk, nbytes);
198	}
199
200	return err;
201}
202
203static int crypto_pcbc_init_tfm(struct crypto_tfm *tfm)
204{
205	struct crypto_instance *inst = (void *)tfm->__crt_alg;
206	struct crypto_spawn *spawn = crypto_instance_ctx(inst);
207	struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm);
208	struct crypto_cipher *cipher;
209
210	cipher = crypto_spawn_cipher(spawn);
211	if (IS_ERR(cipher))
212		return PTR_ERR(cipher);
213
214	ctx->child = cipher;
215	return 0;
216}
217
218static void crypto_pcbc_exit_tfm(struct crypto_tfm *tfm)
219{
220	struct crypto_pcbc_ctx *ctx = crypto_tfm_ctx(tfm);
 
221	crypto_free_cipher(ctx->child);
222}
223
224static struct crypto_instance *crypto_pcbc_alloc(struct rtattr **tb)
 
 
 
 
 
 
225{
226	struct crypto_instance *inst;
 
 
227	struct crypto_alg *alg;
228	int err;
229
230	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
231	if (err)
232		return ERR_PTR(err);
233
234	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER,
235				  CRYPTO_ALG_TYPE_MASK);
236	if (IS_ERR(alg))
237		return ERR_CAST(alg);
238
239	inst = crypto_alloc_instance("pcbc", alg);
240	if (IS_ERR(inst))
241		goto out_put_alg;
242
243	inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
244	inst->alg.cra_priority = alg->cra_priority;
245	inst->alg.cra_blocksize = alg->cra_blocksize;
246	inst->alg.cra_alignmask = alg->cra_alignmask;
247	inst->alg.cra_type = &crypto_blkcipher_type;
248
249	/* We access the data as u32s when xoring. */
250	inst->alg.cra_alignmask |= __alignof__(u32) - 1;
251
252	inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
253	inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
254	inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
255
256	inst->alg.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
257
258	inst->alg.cra_init = crypto_pcbc_init_tfm;
259	inst->alg.cra_exit = crypto_pcbc_exit_tfm;
260
261	inst->alg.cra_blkcipher.setkey = crypto_pcbc_setkey;
262	inst->alg.cra_blkcipher.encrypt = crypto_pcbc_encrypt;
263	inst->alg.cra_blkcipher.decrypt = crypto_pcbc_decrypt;
264
265out_put_alg:
266	crypto_mod_put(alg);
267	return inst;
268}
269
270static void crypto_pcbc_free(struct crypto_instance *inst)
271{
272	crypto_drop_spawn(crypto_instance_ctx(inst));
 
 
 
 
 
 
 
273	kfree(inst);
 
274}
275
276static struct crypto_template crypto_pcbc_tmpl = {
277	.name = "pcbc",
278	.alloc = crypto_pcbc_alloc,
279	.free = crypto_pcbc_free,
280	.module = THIS_MODULE,
281};
282
283static int __init crypto_pcbc_module_init(void)
284{
285	return crypto_register_template(&crypto_pcbc_tmpl);
286}
287
288static void __exit crypto_pcbc_module_exit(void)
289{
290	crypto_unregister_template(&crypto_pcbc_tmpl);
291}
292
293module_init(crypto_pcbc_module_init);
294module_exit(crypto_pcbc_module_exit);
295
296MODULE_LICENSE("GPL");
297MODULE_DESCRIPTION("PCBC block cipher algorithm");
298MODULE_ALIAS_CRYPTO("pcbc");
v4.10.11
  1/*
  2 * PCBC: Propagating Cipher Block Chaining mode
  3 *
  4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
  5 * Written by David Howells (dhowells@redhat.com)
  6 *
  7 * Derived from cbc.c
  8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  9 *
 10 * This program is free software; you can redistribute it and/or modify it
 11 * under the terms of the GNU General Public License as published by the Free
 12 * Software Foundation; either version 2 of the License, or (at your option)
 13 * any later version.
 14 *
 15 */
 16
 17#include <crypto/internal/skcipher.h>
 18#include <linux/err.h>
 19#include <linux/init.h>
 20#include <linux/kernel.h>
 21#include <linux/module.h>
 
 22#include <linux/slab.h>
 23
 24struct crypto_pcbc_ctx {
 25	struct crypto_cipher *child;
 26};
 27
 28static int crypto_pcbc_setkey(struct crypto_skcipher *parent, const u8 *key,
 29			      unsigned int keylen)
 30{
 31	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(parent);
 32	struct crypto_cipher *child = ctx->child;
 33	int err;
 34
 35	crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 36	crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) &
 37				       CRYPTO_TFM_REQ_MASK);
 38	err = crypto_cipher_setkey(child, key, keylen);
 39	crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) &
 40					  CRYPTO_TFM_RES_MASK);
 41	return err;
 42}
 43
 44static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
 45				       struct skcipher_walk *walk,
 46				       struct crypto_cipher *tfm)
 47{
 
 
 48	int bsize = crypto_cipher_blocksize(tfm);
 49	unsigned int nbytes = walk->nbytes;
 50	u8 *src = walk->src.virt.addr;
 51	u8 *dst = walk->dst.virt.addr;
 52	u8 *iv = walk->iv;
 53
 54	do {
 55		crypto_xor(iv, src, bsize);
 56		crypto_cipher_encrypt_one(tfm, dst, iv);
 57		memcpy(iv, dst, bsize);
 58		crypto_xor(iv, src, bsize);
 59
 60		src += bsize;
 61		dst += bsize;
 62	} while ((nbytes -= bsize) >= bsize);
 63
 64	return nbytes;
 65}
 66
 67static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
 68				       struct skcipher_walk *walk,
 69				       struct crypto_cipher *tfm)
 70{
 
 
 71	int bsize = crypto_cipher_blocksize(tfm);
 72	unsigned int nbytes = walk->nbytes;
 73	u8 *src = walk->src.virt.addr;
 74	u8 *iv = walk->iv;
 75	u8 tmpbuf[bsize];
 76
 77	do {
 78		memcpy(tmpbuf, src, bsize);
 79		crypto_xor(iv, src, bsize);
 80		crypto_cipher_encrypt_one(tfm, src, iv);
 81		memcpy(iv, tmpbuf, bsize);
 82		crypto_xor(iv, src, bsize);
 83
 84		src += bsize;
 85	} while ((nbytes -= bsize) >= bsize);
 86
 87	memcpy(walk->iv, iv, bsize);
 88
 89	return nbytes;
 90}
 91
 92static int crypto_pcbc_encrypt(struct skcipher_request *req)
 93{
 94	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 95	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
 
 
 
 96	struct crypto_cipher *child = ctx->child;
 97	struct skcipher_walk walk;
 98	unsigned int nbytes;
 99	int err;
100
101	err = skcipher_walk_virt(&walk, req, false);
 
102
103	while ((nbytes = walk.nbytes)) {
104		if (walk.src.virt.addr == walk.dst.virt.addr)
105			nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
106							     child);
107		else
108			nbytes = crypto_pcbc_encrypt_segment(req, &walk,
109							     child);
110		err = skcipher_walk_done(&walk, nbytes);
111	}
112
113	return err;
114}
115
116static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
117				       struct skcipher_walk *walk,
118				       struct crypto_cipher *tfm)
119{
 
 
120	int bsize = crypto_cipher_blocksize(tfm);
121	unsigned int nbytes = walk->nbytes;
122	u8 *src = walk->src.virt.addr;
123	u8 *dst = walk->dst.virt.addr;
124	u8 *iv = walk->iv;
125
126	do {
127		crypto_cipher_decrypt_one(tfm, dst, src);
128		crypto_xor(dst, iv, bsize);
129		memcpy(iv, src, bsize);
130		crypto_xor(iv, dst, bsize);
131
132		src += bsize;
133		dst += bsize;
134	} while ((nbytes -= bsize) >= bsize);
135
136	memcpy(walk->iv, iv, bsize);
137
138	return nbytes;
139}
140
141static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
142				       struct skcipher_walk *walk,
143				       struct crypto_cipher *tfm)
144{
 
 
145	int bsize = crypto_cipher_blocksize(tfm);
146	unsigned int nbytes = walk->nbytes;
147	u8 *src = walk->src.virt.addr;
148	u8 *iv = walk->iv;
149	u8 tmpbuf[bsize] __attribute__ ((aligned(__alignof__(u32))));
150
151	do {
152		memcpy(tmpbuf, src, bsize);
153		crypto_cipher_decrypt_one(tfm, src, src);
154		crypto_xor(src, iv, bsize);
155		memcpy(iv, tmpbuf, bsize);
156		crypto_xor(iv, src, bsize);
157
158		src += bsize;
159	} while ((nbytes -= bsize) >= bsize);
160
161	memcpy(walk->iv, iv, bsize);
162
163	return nbytes;
164}
165
166static int crypto_pcbc_decrypt(struct skcipher_request *req)
167{
168	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
169	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
 
 
 
170	struct crypto_cipher *child = ctx->child;
171	struct skcipher_walk walk;
172	unsigned int nbytes;
173	int err;
174
175	err = skcipher_walk_virt(&walk, req, false);
 
176
177	while ((nbytes = walk.nbytes)) {
178		if (walk.src.virt.addr == walk.dst.virt.addr)
179			nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
180							     child);
181		else
182			nbytes = crypto_pcbc_decrypt_segment(req, &walk,
183							     child);
184		err = skcipher_walk_done(&walk, nbytes);
185	}
186
187	return err;
188}
189
190static int crypto_pcbc_init_tfm(struct crypto_skcipher *tfm)
191{
192	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
193	struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
194	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
195	struct crypto_cipher *cipher;
196
197	cipher = crypto_spawn_cipher(spawn);
198	if (IS_ERR(cipher))
199		return PTR_ERR(cipher);
200
201	ctx->child = cipher;
202	return 0;
203}
204
205static void crypto_pcbc_exit_tfm(struct crypto_skcipher *tfm)
206{
207	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
208
209	crypto_free_cipher(ctx->child);
210}
211
212static void crypto_pcbc_free(struct skcipher_instance *inst)
213{
214	crypto_drop_skcipher(skcipher_instance_ctx(inst));
215	kfree(inst);
216}
217
218static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
219{
220	struct skcipher_instance *inst;
221	struct crypto_attr_type *algt;
222	struct crypto_spawn *spawn;
223	struct crypto_alg *alg;
224	int err;
225
226	algt = crypto_get_attr_type(tb);
227	if (IS_ERR(algt))
228		return PTR_ERR(algt);
229
230	if (((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) &
231	    ~CRYPTO_ALG_INTERNAL)
232		return -EINVAL;
233
234	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
235	if (!inst)
236		return -ENOMEM;
237
238	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER |
239				      (algt->type & CRYPTO_ALG_INTERNAL),
240				  CRYPTO_ALG_TYPE_MASK |
241				  (algt->mask & CRYPTO_ALG_INTERNAL));
242	err = PTR_ERR(alg);
243	if (IS_ERR(alg))
244		goto err_free_inst;
245
246	spawn = skcipher_instance_ctx(inst);
247	err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst),
248				CRYPTO_ALG_TYPE_MASK);
249	crypto_mod_put(alg);
250	if (err)
251		goto err_free_inst;
252
253	err = crypto_inst_setname(skcipher_crypto_instance(inst), "pcbc", alg);
254	if (err)
255		goto err_drop_spawn;
 
256
257	inst->alg.base.cra_flags = alg->cra_flags & CRYPTO_ALG_INTERNAL;
258	inst->alg.base.cra_priority = alg->cra_priority;
259	inst->alg.base.cra_blocksize = alg->cra_blocksize;
260	inst->alg.base.cra_alignmask = alg->cra_alignmask;
 
 
 
 
 
261
262	/* We access the data as u32s when xoring. */
263	inst->alg.base.cra_alignmask |= __alignof__(u32) - 1;
264
265	inst->alg.ivsize = alg->cra_blocksize;
266	inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
267	inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
268
269	inst->alg.base.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
270
271	inst->alg.init = crypto_pcbc_init_tfm;
272	inst->alg.exit = crypto_pcbc_exit_tfm;
273
274	inst->alg.setkey = crypto_pcbc_setkey;
275	inst->alg.encrypt = crypto_pcbc_encrypt;
276	inst->alg.decrypt = crypto_pcbc_decrypt;
277
278	inst->free = crypto_pcbc_free;
 
 
 
279
280	err = skcipher_register_instance(tmpl, inst);
281	if (err)
282		goto err_drop_spawn;
283
284out:
285	return err;
286
287err_drop_spawn:
288	crypto_drop_spawn(spawn);
289err_free_inst:
290	kfree(inst);
291	goto out;
292}
293
294static struct crypto_template crypto_pcbc_tmpl = {
295	.name = "pcbc",
296	.create = crypto_pcbc_create,
 
297	.module = THIS_MODULE,
298};
299
300static int __init crypto_pcbc_module_init(void)
301{
302	return crypto_register_template(&crypto_pcbc_tmpl);
303}
304
305static void __exit crypto_pcbc_module_exit(void)
306{
307	crypto_unregister_template(&crypto_pcbc_tmpl);
308}
309
310module_init(crypto_pcbc_module_init);
311module_exit(crypto_pcbc_module_exit);
312
313MODULE_LICENSE("GPL");
314MODULE_DESCRIPTION("PCBC block cipher algorithm");
315MODULE_ALIAS_CRYPTO("pcbc");