Linux Audio

Check our new training course

Loading...
v5.9
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * PCBC: Propagating Cipher Block Chaining mode
  4 *
  5 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
  6 * Written by David Howells (dhowells@redhat.com)
  7 *
  8 * Derived from cbc.c
  9 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
 
 
 
 
 
 
 10 */
 11
 12#include <crypto/algapi.h>
 13#include <crypto/internal/skcipher.h>
 14#include <linux/err.h>
 15#include <linux/init.h>
 16#include <linux/kernel.h>
 17#include <linux/module.h>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 18
 19static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
 20				       struct skcipher_walk *walk,
 21				       struct crypto_cipher *tfm)
 22{
 23	int bsize = crypto_cipher_blocksize(tfm);
 24	unsigned int nbytes = walk->nbytes;
 25	u8 *src = walk->src.virt.addr;
 26	u8 *dst = walk->dst.virt.addr;
 27	u8 * const iv = walk->iv;
 28
 29	do {
 30		crypto_xor(iv, src, bsize);
 31		crypto_cipher_encrypt_one(tfm, dst, iv);
 32		crypto_xor_cpy(iv, dst, src, bsize);
 
 33
 34		src += bsize;
 35		dst += bsize;
 36	} while ((nbytes -= bsize) >= bsize);
 37
 38	return nbytes;
 39}
 40
 41static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
 42				       struct skcipher_walk *walk,
 43				       struct crypto_cipher *tfm)
 44{
 45	int bsize = crypto_cipher_blocksize(tfm);
 46	unsigned int nbytes = walk->nbytes;
 47	u8 *src = walk->src.virt.addr;
 48	u8 * const iv = walk->iv;
 49	u8 tmpbuf[MAX_CIPHER_BLOCKSIZE];
 50
 51	do {
 52		memcpy(tmpbuf, src, bsize);
 53		crypto_xor(iv, src, bsize);
 54		crypto_cipher_encrypt_one(tfm, src, iv);
 55		crypto_xor_cpy(iv, tmpbuf, src, bsize);
 
 56
 57		src += bsize;
 58	} while ((nbytes -= bsize) >= bsize);
 59
 
 
 60	return nbytes;
 61}
 62
 63static int crypto_pcbc_encrypt(struct skcipher_request *req)
 64{
 65	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 66	struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
 
 67	struct skcipher_walk walk;
 68	unsigned int nbytes;
 69	int err;
 70
 71	err = skcipher_walk_virt(&walk, req, false);
 72
 73	while ((nbytes = walk.nbytes)) {
 74		if (walk.src.virt.addr == walk.dst.virt.addr)
 75			nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
 76							     cipher);
 77		else
 78			nbytes = crypto_pcbc_encrypt_segment(req, &walk,
 79							     cipher);
 80		err = skcipher_walk_done(&walk, nbytes);
 81	}
 82
 83	return err;
 84}
 85
 86static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
 87				       struct skcipher_walk *walk,
 88				       struct crypto_cipher *tfm)
 89{
 90	int bsize = crypto_cipher_blocksize(tfm);
 91	unsigned int nbytes = walk->nbytes;
 92	u8 *src = walk->src.virt.addr;
 93	u8 *dst = walk->dst.virt.addr;
 94	u8 * const iv = walk->iv;
 95
 96	do {
 97		crypto_cipher_decrypt_one(tfm, dst, src);
 98		crypto_xor(dst, iv, bsize);
 99		crypto_xor_cpy(iv, dst, src, bsize);
 
100
101		src += bsize;
102		dst += bsize;
103	} while ((nbytes -= bsize) >= bsize);
104
 
 
105	return nbytes;
106}
107
108static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
109				       struct skcipher_walk *walk,
110				       struct crypto_cipher *tfm)
111{
112	int bsize = crypto_cipher_blocksize(tfm);
113	unsigned int nbytes = walk->nbytes;
114	u8 *src = walk->src.virt.addr;
115	u8 * const iv = walk->iv;
116	u8 tmpbuf[MAX_CIPHER_BLOCKSIZE] __aligned(__alignof__(u32));
117
118	do {
119		memcpy(tmpbuf, src, bsize);
120		crypto_cipher_decrypt_one(tfm, src, src);
121		crypto_xor(src, iv, bsize);
122		crypto_xor_cpy(iv, src, tmpbuf, bsize);
 
123
124		src += bsize;
125	} while ((nbytes -= bsize) >= bsize);
126
 
 
127	return nbytes;
128}
129
130static int crypto_pcbc_decrypt(struct skcipher_request *req)
131{
132	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
133	struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
 
134	struct skcipher_walk walk;
135	unsigned int nbytes;
136	int err;
137
138	err = skcipher_walk_virt(&walk, req, false);
139
140	while ((nbytes = walk.nbytes)) {
141		if (walk.src.virt.addr == walk.dst.virt.addr)
142			nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
143							     cipher);
144		else
145			nbytes = crypto_pcbc_decrypt_segment(req, &walk,
146							     cipher);
147		err = skcipher_walk_done(&walk, nbytes);
148	}
149
150	return err;
151}
152
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
153static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
154{
155	struct skcipher_instance *inst;
 
 
 
156	int err;
157
158	inst = skcipher_alloc_instance_simple(tmpl, tb);
159	if (IS_ERR(inst))
160		return PTR_ERR(inst);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
161
 
 
 
 
 
 
162	inst->alg.encrypt = crypto_pcbc_encrypt;
163	inst->alg.decrypt = crypto_pcbc_decrypt;
164
 
 
165	err = skcipher_register_instance(tmpl, inst);
166	if (err)
167		inst->free(inst);
168
 
169	return err;
 
 
 
 
 
 
170}
171
172static struct crypto_template crypto_pcbc_tmpl = {
173	.name = "pcbc",
174	.create = crypto_pcbc_create,
175	.module = THIS_MODULE,
176};
177
178static int __init crypto_pcbc_module_init(void)
179{
180	return crypto_register_template(&crypto_pcbc_tmpl);
181}
182
183static void __exit crypto_pcbc_module_exit(void)
184{
185	crypto_unregister_template(&crypto_pcbc_tmpl);
186}
187
188subsys_initcall(crypto_pcbc_module_init);
189module_exit(crypto_pcbc_module_exit);
190
191MODULE_LICENSE("GPL");
192MODULE_DESCRIPTION("PCBC block cipher mode of operation");
193MODULE_ALIAS_CRYPTO("pcbc");
v4.10.11
 
  1/*
  2 * PCBC: Propagating Cipher Block Chaining mode
  3 *
  4 * Copyright (C) 2006 Red Hat, Inc. All Rights Reserved.
  5 * Written by David Howells (dhowells@redhat.com)
  6 *
  7 * Derived from cbc.c
  8 * - Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  9 *
 10 * This program is free software; you can redistribute it and/or modify it
 11 * under the terms of the GNU General Public License as published by the Free
 12 * Software Foundation; either version 2 of the License, or (at your option)
 13 * any later version.
 14 *
 15 */
 16
 
 17#include <crypto/internal/skcipher.h>
 18#include <linux/err.h>
 19#include <linux/init.h>
 20#include <linux/kernel.h>
 21#include <linux/module.h>
 22#include <linux/slab.h>
 23
 24struct crypto_pcbc_ctx {
 25	struct crypto_cipher *child;
 26};
 27
 28static int crypto_pcbc_setkey(struct crypto_skcipher *parent, const u8 *key,
 29			      unsigned int keylen)
 30{
 31	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(parent);
 32	struct crypto_cipher *child = ctx->child;
 33	int err;
 34
 35	crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 36	crypto_cipher_set_flags(child, crypto_skcipher_get_flags(parent) &
 37				       CRYPTO_TFM_REQ_MASK);
 38	err = crypto_cipher_setkey(child, key, keylen);
 39	crypto_skcipher_set_flags(parent, crypto_cipher_get_flags(child) &
 40					  CRYPTO_TFM_RES_MASK);
 41	return err;
 42}
 43
 44static int crypto_pcbc_encrypt_segment(struct skcipher_request *req,
 45				       struct skcipher_walk *walk,
 46				       struct crypto_cipher *tfm)
 47{
 48	int bsize = crypto_cipher_blocksize(tfm);
 49	unsigned int nbytes = walk->nbytes;
 50	u8 *src = walk->src.virt.addr;
 51	u8 *dst = walk->dst.virt.addr;
 52	u8 *iv = walk->iv;
 53
 54	do {
 55		crypto_xor(iv, src, bsize);
 56		crypto_cipher_encrypt_one(tfm, dst, iv);
 57		memcpy(iv, dst, bsize);
 58		crypto_xor(iv, src, bsize);
 59
 60		src += bsize;
 61		dst += bsize;
 62	} while ((nbytes -= bsize) >= bsize);
 63
 64	return nbytes;
 65}
 66
 67static int crypto_pcbc_encrypt_inplace(struct skcipher_request *req,
 68				       struct skcipher_walk *walk,
 69				       struct crypto_cipher *tfm)
 70{
 71	int bsize = crypto_cipher_blocksize(tfm);
 72	unsigned int nbytes = walk->nbytes;
 73	u8 *src = walk->src.virt.addr;
 74	u8 *iv = walk->iv;
 75	u8 tmpbuf[bsize];
 76
 77	do {
 78		memcpy(tmpbuf, src, bsize);
 79		crypto_xor(iv, src, bsize);
 80		crypto_cipher_encrypt_one(tfm, src, iv);
 81		memcpy(iv, tmpbuf, bsize);
 82		crypto_xor(iv, src, bsize);
 83
 84		src += bsize;
 85	} while ((nbytes -= bsize) >= bsize);
 86
 87	memcpy(walk->iv, iv, bsize);
 88
 89	return nbytes;
 90}
 91
 92static int crypto_pcbc_encrypt(struct skcipher_request *req)
 93{
 94	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
 95	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
 96	struct crypto_cipher *child = ctx->child;
 97	struct skcipher_walk walk;
 98	unsigned int nbytes;
 99	int err;
100
101	err = skcipher_walk_virt(&walk, req, false);
102
103	while ((nbytes = walk.nbytes)) {
104		if (walk.src.virt.addr == walk.dst.virt.addr)
105			nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
106							     child);
107		else
108			nbytes = crypto_pcbc_encrypt_segment(req, &walk,
109							     child);
110		err = skcipher_walk_done(&walk, nbytes);
111	}
112
113	return err;
114}
115
116static int crypto_pcbc_decrypt_segment(struct skcipher_request *req,
117				       struct skcipher_walk *walk,
118				       struct crypto_cipher *tfm)
119{
120	int bsize = crypto_cipher_blocksize(tfm);
121	unsigned int nbytes = walk->nbytes;
122	u8 *src = walk->src.virt.addr;
123	u8 *dst = walk->dst.virt.addr;
124	u8 *iv = walk->iv;
125
126	do {
127		crypto_cipher_decrypt_one(tfm, dst, src);
128		crypto_xor(dst, iv, bsize);
129		memcpy(iv, src, bsize);
130		crypto_xor(iv, dst, bsize);
131
132		src += bsize;
133		dst += bsize;
134	} while ((nbytes -= bsize) >= bsize);
135
136	memcpy(walk->iv, iv, bsize);
137
138	return nbytes;
139}
140
141static int crypto_pcbc_decrypt_inplace(struct skcipher_request *req,
142				       struct skcipher_walk *walk,
143				       struct crypto_cipher *tfm)
144{
145	int bsize = crypto_cipher_blocksize(tfm);
146	unsigned int nbytes = walk->nbytes;
147	u8 *src = walk->src.virt.addr;
148	u8 *iv = walk->iv;
149	u8 tmpbuf[bsize] __attribute__ ((aligned(__alignof__(u32))));
150
151	do {
152		memcpy(tmpbuf, src, bsize);
153		crypto_cipher_decrypt_one(tfm, src, src);
154		crypto_xor(src, iv, bsize);
155		memcpy(iv, tmpbuf, bsize);
156		crypto_xor(iv, src, bsize);
157
158		src += bsize;
159	} while ((nbytes -= bsize) >= bsize);
160
161	memcpy(walk->iv, iv, bsize);
162
163	return nbytes;
164}
165
166static int crypto_pcbc_decrypt(struct skcipher_request *req)
167{
168	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
169	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
170	struct crypto_cipher *child = ctx->child;
171	struct skcipher_walk walk;
172	unsigned int nbytes;
173	int err;
174
175	err = skcipher_walk_virt(&walk, req, false);
176
177	while ((nbytes = walk.nbytes)) {
178		if (walk.src.virt.addr == walk.dst.virt.addr)
179			nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
180							     child);
181		else
182			nbytes = crypto_pcbc_decrypt_segment(req, &walk,
183							     child);
184		err = skcipher_walk_done(&walk, nbytes);
185	}
186
187	return err;
188}
189
190static int crypto_pcbc_init_tfm(struct crypto_skcipher *tfm)
191{
192	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
193	struct crypto_spawn *spawn = skcipher_instance_ctx(inst);
194	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
195	struct crypto_cipher *cipher;
196
197	cipher = crypto_spawn_cipher(spawn);
198	if (IS_ERR(cipher))
199		return PTR_ERR(cipher);
200
201	ctx->child = cipher;
202	return 0;
203}
204
205static void crypto_pcbc_exit_tfm(struct crypto_skcipher *tfm)
206{
207	struct crypto_pcbc_ctx *ctx = crypto_skcipher_ctx(tfm);
208
209	crypto_free_cipher(ctx->child);
210}
211
212static void crypto_pcbc_free(struct skcipher_instance *inst)
213{
214	crypto_drop_skcipher(skcipher_instance_ctx(inst));
215	kfree(inst);
216}
217
218static int crypto_pcbc_create(struct crypto_template *tmpl, struct rtattr **tb)
219{
220	struct skcipher_instance *inst;
221	struct crypto_attr_type *algt;
222	struct crypto_spawn *spawn;
223	struct crypto_alg *alg;
224	int err;
225
226	algt = crypto_get_attr_type(tb);
227	if (IS_ERR(algt))
228		return PTR_ERR(algt);
229
230	if (((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) &
231	    ~CRYPTO_ALG_INTERNAL)
232		return -EINVAL;
233
234	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
235	if (!inst)
236		return -ENOMEM;
237
238	alg = crypto_get_attr_alg(tb, CRYPTO_ALG_TYPE_CIPHER |
239				      (algt->type & CRYPTO_ALG_INTERNAL),
240				  CRYPTO_ALG_TYPE_MASK |
241				  (algt->mask & CRYPTO_ALG_INTERNAL));
242	err = PTR_ERR(alg);
243	if (IS_ERR(alg))
244		goto err_free_inst;
245
246	spawn = skcipher_instance_ctx(inst);
247	err = crypto_init_spawn(spawn, alg, skcipher_crypto_instance(inst),
248				CRYPTO_ALG_TYPE_MASK);
249	crypto_mod_put(alg);
250	if (err)
251		goto err_free_inst;
252
253	err = crypto_inst_setname(skcipher_crypto_instance(inst), "pcbc", alg);
254	if (err)
255		goto err_drop_spawn;
256
257	inst->alg.base.cra_flags = alg->cra_flags & CRYPTO_ALG_INTERNAL;
258	inst->alg.base.cra_priority = alg->cra_priority;
259	inst->alg.base.cra_blocksize = alg->cra_blocksize;
260	inst->alg.base.cra_alignmask = alg->cra_alignmask;
261
262	/* We access the data as u32s when xoring. */
263	inst->alg.base.cra_alignmask |= __alignof__(u32) - 1;
264
265	inst->alg.ivsize = alg->cra_blocksize;
266	inst->alg.min_keysize = alg->cra_cipher.cia_min_keysize;
267	inst->alg.max_keysize = alg->cra_cipher.cia_max_keysize;
268
269	inst->alg.base.cra_ctxsize = sizeof(struct crypto_pcbc_ctx);
270
271	inst->alg.init = crypto_pcbc_init_tfm;
272	inst->alg.exit = crypto_pcbc_exit_tfm;
273
274	inst->alg.setkey = crypto_pcbc_setkey;
275	inst->alg.encrypt = crypto_pcbc_encrypt;
276	inst->alg.decrypt = crypto_pcbc_decrypt;
277
278	inst->free = crypto_pcbc_free;
279
280	err = skcipher_register_instance(tmpl, inst);
281	if (err)
282		goto err_drop_spawn;
283
284out:
285	return err;
286
287err_drop_spawn:
288	crypto_drop_spawn(spawn);
289err_free_inst:
290	kfree(inst);
291	goto out;
292}
293
294static struct crypto_template crypto_pcbc_tmpl = {
295	.name = "pcbc",
296	.create = crypto_pcbc_create,
297	.module = THIS_MODULE,
298};
299
300static int __init crypto_pcbc_module_init(void)
301{
302	return crypto_register_template(&crypto_pcbc_tmpl);
303}
304
305static void __exit crypto_pcbc_module_exit(void)
306{
307	crypto_unregister_template(&crypto_pcbc_tmpl);
308}
309
310module_init(crypto_pcbc_module_init);
311module_exit(crypto_pcbc_module_exit);
312
313MODULE_LICENSE("GPL");
314MODULE_DESCRIPTION("PCBC block cipher algorithm");
315MODULE_ALIAS_CRYPTO("pcbc");