Linux Audio

Check our new training course

Loading...
v5.9
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * CTR: Counter mode
  4 *
  5 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
 
 
 
 
 
 
  6 */
  7
  8#include <crypto/algapi.h>
  9#include <crypto/ctr.h>
 10#include <crypto/internal/skcipher.h>
 11#include <linux/err.h>
 12#include <linux/init.h>
 13#include <linux/kernel.h>
 14#include <linux/module.h>
 
 
 15#include <linux/slab.h>
 16
 
 
 
 
 17struct crypto_rfc3686_ctx {
 18	struct crypto_skcipher *child;
 19	u8 nonce[CTR_RFC3686_NONCE_SIZE];
 20};
 21
 22struct crypto_rfc3686_req_ctx {
 23	u8 iv[CTR_RFC3686_BLOCK_SIZE];
 24	struct skcipher_request subreq CRYPTO_MINALIGN_ATTR;
 25};
 
 
 
 
 
 
 
 
 
 
 
 
 26
 27static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
 28				   struct crypto_cipher *tfm)
 29{
 30	unsigned int bsize = crypto_cipher_blocksize(tfm);
 31	unsigned long alignmask = crypto_cipher_alignmask(tfm);
 32	u8 *ctrblk = walk->iv;
 33	u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
 34	u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
 35	u8 *src = walk->src.virt.addr;
 36	u8 *dst = walk->dst.virt.addr;
 37	unsigned int nbytes = walk->nbytes;
 38
 39	crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
 40	crypto_xor_cpy(dst, keystream, src, nbytes);
 
 41
 42	crypto_inc(ctrblk, bsize);
 43}
 44
 45static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
 46				    struct crypto_cipher *tfm)
 47{
 48	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 49		   crypto_cipher_alg(tfm)->cia_encrypt;
 50	unsigned int bsize = crypto_cipher_blocksize(tfm);
 51	u8 *ctrblk = walk->iv;
 52	u8 *src = walk->src.virt.addr;
 53	u8 *dst = walk->dst.virt.addr;
 54	unsigned int nbytes = walk->nbytes;
 55
 56	do {
 57		/* create keystream */
 58		fn(crypto_cipher_tfm(tfm), dst, ctrblk);
 59		crypto_xor(dst, src, bsize);
 60
 61		/* increment counter in counterblock */
 62		crypto_inc(ctrblk, bsize);
 63
 64		src += bsize;
 65		dst += bsize;
 66	} while ((nbytes -= bsize) >= bsize);
 67
 68	return nbytes;
 69}
 70
 71static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
 72				    struct crypto_cipher *tfm)
 73{
 74	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 75		   crypto_cipher_alg(tfm)->cia_encrypt;
 76	unsigned int bsize = crypto_cipher_blocksize(tfm);
 77	unsigned long alignmask = crypto_cipher_alignmask(tfm);
 78	unsigned int nbytes = walk->nbytes;
 79	u8 *ctrblk = walk->iv;
 80	u8 *src = walk->src.virt.addr;
 81	u8 tmp[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
 82	u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
 83
 84	do {
 85		/* create keystream */
 86		fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
 87		crypto_xor(src, keystream, bsize);
 88
 89		/* increment counter in counterblock */
 90		crypto_inc(ctrblk, bsize);
 91
 92		src += bsize;
 93	} while ((nbytes -= bsize) >= bsize);
 94
 95	return nbytes;
 96}
 97
 98static int crypto_ctr_crypt(struct skcipher_request *req)
 99{
100	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
101	struct crypto_cipher *cipher = skcipher_cipher_simple(tfm);
102	const unsigned int bsize = crypto_cipher_blocksize(cipher);
103	struct skcipher_walk walk;
104	unsigned int nbytes;
 
 
105	int err;
106
107	err = skcipher_walk_virt(&walk, req, false);
 
108
109	while (walk.nbytes >= bsize) {
110		if (walk.src.virt.addr == walk.dst.virt.addr)
111			nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
112		else
113			nbytes = crypto_ctr_crypt_segment(&walk, cipher);
114
115		err = skcipher_walk_done(&walk, nbytes);
116	}
117
118	if (walk.nbytes) {
119		crypto_ctr_crypt_final(&walk, cipher);
120		err = skcipher_walk_done(&walk, 0);
121	}
122
123	return err;
124}
125
126static int crypto_ctr_create(struct crypto_template *tmpl, struct rtattr **tb)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
127{
128	struct skcipher_instance *inst;
129	struct crypto_alg *alg;
130	int err;
131
132	inst = skcipher_alloc_instance_simple(tmpl, tb);
133	if (IS_ERR(inst))
134		return PTR_ERR(inst);
135
136	alg = skcipher_ialg_simple(inst);
 
 
 
137
138	/* Block size must be >= 4 bytes. */
139	err = -EINVAL;
140	if (alg->cra_blocksize < 4)
141		goto out_free_inst;
142
143	/* If this is false we'd fail the alignment of crypto_inc. */
144	if (alg->cra_blocksize % 4)
145		goto out_free_inst;
146
147	/* CTR mode is a stream cipher. */
148	inst->alg.base.cra_blocksize = 1;
 
149
150	/*
151	 * To simplify the implementation, configure the skcipher walk to only
152	 * give a partial block at the very end, never earlier.
153	 */
154	inst->alg.chunksize = alg->cra_blocksize;
155
156	inst->alg.encrypt = crypto_ctr_crypt;
157	inst->alg.decrypt = crypto_ctr_crypt;
158
159	err = skcipher_register_instance(tmpl, inst);
160	if (err) {
161out_free_inst:
162		inst->free(inst);
163	}
164
165	return err;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
166}
167
168static int crypto_rfc3686_setkey(struct crypto_skcipher *parent,
169				 const u8 *key, unsigned int keylen)
 
 
 
 
 
 
 
170{
171	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(parent);
172	struct crypto_skcipher *child = ctx->child;
 
173
174	/* the nonce is stored in bytes at end of key */
175	if (keylen < CTR_RFC3686_NONCE_SIZE)
176		return -EINVAL;
177
178	memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
179	       CTR_RFC3686_NONCE_SIZE);
180
181	keylen -= CTR_RFC3686_NONCE_SIZE;
182
183	crypto_skcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
184	crypto_skcipher_set_flags(child, crypto_skcipher_get_flags(parent) &
185					 CRYPTO_TFM_REQ_MASK);
186	return crypto_skcipher_setkey(child, key, keylen);
187}
188
189static int crypto_rfc3686_crypt(struct skcipher_request *req)
190{
191	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
192	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
193	struct crypto_skcipher *child = ctx->child;
194	unsigned long align = crypto_skcipher_alignmask(tfm);
195	struct crypto_rfc3686_req_ctx *rctx =
196		(void *)PTR_ALIGN((u8 *)skcipher_request_ctx(req), align + 1);
197	struct skcipher_request *subreq = &rctx->subreq;
198	u8 *iv = rctx->iv;
 
 
 
 
 
 
199
200	/* set up counter block */
201	memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
202	memcpy(iv + CTR_RFC3686_NONCE_SIZE, req->iv, CTR_RFC3686_IV_SIZE);
203
204	/* initialize counter portion of counter block */
205	*(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
206		cpu_to_be32(1);
207
208	skcipher_request_set_tfm(subreq, child);
209	skcipher_request_set_callback(subreq, req->base.flags,
210				      req->base.complete, req->base.data);
211	skcipher_request_set_crypt(subreq, req->src, req->dst,
212				   req->cryptlen, iv);
213
214	return crypto_skcipher_encrypt(subreq);
215}
216
217static int crypto_rfc3686_init_tfm(struct crypto_skcipher *tfm)
218{
219	struct skcipher_instance *inst = skcipher_alg_instance(tfm);
220	struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
221	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
222	struct crypto_skcipher *cipher;
223	unsigned long align;
224	unsigned int reqsize;
225
226	cipher = crypto_spawn_skcipher(spawn);
227	if (IS_ERR(cipher))
228		return PTR_ERR(cipher);
229
230	ctx->child = cipher;
231
232	align = crypto_skcipher_alignmask(tfm);
233	align &= ~(crypto_tfm_ctx_alignment() - 1);
234	reqsize = align + sizeof(struct crypto_rfc3686_req_ctx) +
235		  crypto_skcipher_reqsize(cipher);
236	crypto_skcipher_set_reqsize(tfm, reqsize);
237
238	return 0;
239}
240
241static void crypto_rfc3686_exit_tfm(struct crypto_skcipher *tfm)
242{
243	struct crypto_rfc3686_ctx *ctx = crypto_skcipher_ctx(tfm);
244
245	crypto_free_skcipher(ctx->child);
246}
247
248static void crypto_rfc3686_free(struct skcipher_instance *inst)
249{
250	struct crypto_skcipher_spawn *spawn = skcipher_instance_ctx(inst);
251
252	crypto_drop_skcipher(spawn);
253	kfree(inst);
254}
255
256static int crypto_rfc3686_create(struct crypto_template *tmpl,
257				 struct rtattr **tb)
258{
259	struct skcipher_instance *inst;
260	struct skcipher_alg *alg;
261	struct crypto_skcipher_spawn *spawn;
262	u32 mask;
263	int err;
264
265	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask);
266	if (err)
267		return err;
268
269	inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL);
270	if (!inst)
271		return -ENOMEM;
272
273	spawn = skcipher_instance_ctx(inst);
274
275	err = crypto_grab_skcipher(spawn, skcipher_crypto_instance(inst),
276				   crypto_attr_alg_name(tb[1]), 0, mask);
277	if (err)
278		goto err_free_inst;
279
280	alg = crypto_spawn_skcipher_alg(spawn);
 
 
 
 
281
282	/* We only support 16-byte blocks. */
283	err = -EINVAL;
284	if (crypto_skcipher_alg_ivsize(alg) != CTR_RFC3686_BLOCK_SIZE)
285		goto err_free_inst;
286
287	/* Not a stream cipher? */
288	if (alg->base.cra_blocksize != 1)
289		goto err_free_inst;
290
291	err = -ENAMETOOLONG;
292	if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME,
293		     "rfc3686(%s)", alg->base.cra_name) >= CRYPTO_MAX_ALG_NAME)
294		goto err_free_inst;
295	if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME,
296		     "rfc3686(%s)", alg->base.cra_driver_name) >=
297	    CRYPTO_MAX_ALG_NAME)
298		goto err_free_inst;
299
300	inst->alg.base.cra_priority = alg->base.cra_priority;
301	inst->alg.base.cra_blocksize = 1;
302	inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
303
304	inst->alg.ivsize = CTR_RFC3686_IV_SIZE;
305	inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
306	inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg) +
307				CTR_RFC3686_NONCE_SIZE;
308	inst->alg.max_keysize = crypto_skcipher_alg_max_keysize(alg) +
309				CTR_RFC3686_NONCE_SIZE;
310
311	inst->alg.setkey = crypto_rfc3686_setkey;
312	inst->alg.encrypt = crypto_rfc3686_crypt;
313	inst->alg.decrypt = crypto_rfc3686_crypt;
314
315	inst->alg.base.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
316
317	inst->alg.init = crypto_rfc3686_init_tfm;
318	inst->alg.exit = crypto_rfc3686_exit_tfm;
319
320	inst->free = crypto_rfc3686_free;
321
322	err = skcipher_register_instance(tmpl, inst);
323	if (err) {
324err_free_inst:
325		crypto_rfc3686_free(inst);
326	}
327	return err;
328}
329
330static struct crypto_template crypto_ctr_tmpls[] = {
331	{
332		.name = "ctr",
333		.create = crypto_ctr_create,
334		.module = THIS_MODULE,
335	}, {
336		.name = "rfc3686",
337		.create = crypto_rfc3686_create,
338		.module = THIS_MODULE,
339	},
340};
341
342static int __init crypto_ctr_module_init(void)
343{
344	return crypto_register_templates(crypto_ctr_tmpls,
345					 ARRAY_SIZE(crypto_ctr_tmpls));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
346}
347
348static void __exit crypto_ctr_module_exit(void)
349{
350	crypto_unregister_templates(crypto_ctr_tmpls,
351				    ARRAY_SIZE(crypto_ctr_tmpls));
352}
353
354subsys_initcall(crypto_ctr_module_init);
355module_exit(crypto_ctr_module_exit);
356
357MODULE_LICENSE("GPL");
358MODULE_DESCRIPTION("CTR block cipher mode of operation");
359MODULE_ALIAS_CRYPTO("rfc3686");
360MODULE_ALIAS_CRYPTO("ctr");
v3.1
 
  1/*
  2 * CTR: Counter mode
  3 *
  4 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
  5 *
  6 * This program is free software; you can redistribute it and/or modify it
  7 * under the terms of the GNU General Public License as published by the Free
  8 * Software Foundation; either version 2 of the License, or (at your option)
  9 * any later version.
 10 *
 11 */
 12
 13#include <crypto/algapi.h>
 14#include <crypto/ctr.h>
 
 15#include <linux/err.h>
 16#include <linux/init.h>
 17#include <linux/kernel.h>
 18#include <linux/module.h>
 19#include <linux/random.h>
 20#include <linux/scatterlist.h>
 21#include <linux/slab.h>
 22
 23struct crypto_ctr_ctx {
 24	struct crypto_cipher *child;
 25};
 26
 27struct crypto_rfc3686_ctx {
 28	struct crypto_blkcipher *child;
 29	u8 nonce[CTR_RFC3686_NONCE_SIZE];
 30};
 31
 32static int crypto_ctr_setkey(struct crypto_tfm *parent, const u8 *key,
 33			     unsigned int keylen)
 34{
 35	struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(parent);
 36	struct crypto_cipher *child = ctx->child;
 37	int err;
 38
 39	crypto_cipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
 40	crypto_cipher_set_flags(child, crypto_tfm_get_flags(parent) &
 41				CRYPTO_TFM_REQ_MASK);
 42	err = crypto_cipher_setkey(child, key, keylen);
 43	crypto_tfm_set_flags(parent, crypto_cipher_get_flags(child) &
 44			     CRYPTO_TFM_RES_MASK);
 45
 46	return err;
 47}
 48
 49static void crypto_ctr_crypt_final(struct blkcipher_walk *walk,
 50				   struct crypto_cipher *tfm)
 51{
 52	unsigned int bsize = crypto_cipher_blocksize(tfm);
 53	unsigned long alignmask = crypto_cipher_alignmask(tfm);
 54	u8 *ctrblk = walk->iv;
 55	u8 tmp[bsize + alignmask];
 56	u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
 57	u8 *src = walk->src.virt.addr;
 58	u8 *dst = walk->dst.virt.addr;
 59	unsigned int nbytes = walk->nbytes;
 60
 61	crypto_cipher_encrypt_one(tfm, keystream, ctrblk);
 62	crypto_xor(keystream, src, nbytes);
 63	memcpy(dst, keystream, nbytes);
 64
 65	crypto_inc(ctrblk, bsize);
 66}
 67
 68static int crypto_ctr_crypt_segment(struct blkcipher_walk *walk,
 69				    struct crypto_cipher *tfm)
 70{
 71	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 72		   crypto_cipher_alg(tfm)->cia_encrypt;
 73	unsigned int bsize = crypto_cipher_blocksize(tfm);
 74	u8 *ctrblk = walk->iv;
 75	u8 *src = walk->src.virt.addr;
 76	u8 *dst = walk->dst.virt.addr;
 77	unsigned int nbytes = walk->nbytes;
 78
 79	do {
 80		/* create keystream */
 81		fn(crypto_cipher_tfm(tfm), dst, ctrblk);
 82		crypto_xor(dst, src, bsize);
 83
 84		/* increment counter in counterblock */
 85		crypto_inc(ctrblk, bsize);
 86
 87		src += bsize;
 88		dst += bsize;
 89	} while ((nbytes -= bsize) >= bsize);
 90
 91	return nbytes;
 92}
 93
 94static int crypto_ctr_crypt_inplace(struct blkcipher_walk *walk,
 95				    struct crypto_cipher *tfm)
 96{
 97	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 98		   crypto_cipher_alg(tfm)->cia_encrypt;
 99	unsigned int bsize = crypto_cipher_blocksize(tfm);
100	unsigned long alignmask = crypto_cipher_alignmask(tfm);
101	unsigned int nbytes = walk->nbytes;
102	u8 *ctrblk = walk->iv;
103	u8 *src = walk->src.virt.addr;
104	u8 tmp[bsize + alignmask];
105	u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1);
106
107	do {
108		/* create keystream */
109		fn(crypto_cipher_tfm(tfm), keystream, ctrblk);
110		crypto_xor(src, keystream, bsize);
111
112		/* increment counter in counterblock */
113		crypto_inc(ctrblk, bsize);
114
115		src += bsize;
116	} while ((nbytes -= bsize) >= bsize);
117
118	return nbytes;
119}
120
121static int crypto_ctr_crypt(struct blkcipher_desc *desc,
122			      struct scatterlist *dst, struct scatterlist *src,
123			      unsigned int nbytes)
124{
125	struct blkcipher_walk walk;
126	struct crypto_blkcipher *tfm = desc->tfm;
127	struct crypto_ctr_ctx *ctx = crypto_blkcipher_ctx(tfm);
128	struct crypto_cipher *child = ctx->child;
129	unsigned int bsize = crypto_cipher_blocksize(child);
130	int err;
131
132	blkcipher_walk_init(&walk, dst, src, nbytes);
133	err = blkcipher_walk_virt_block(desc, &walk, bsize);
134
135	while (walk.nbytes >= bsize) {
136		if (walk.src.virt.addr == walk.dst.virt.addr)
137			nbytes = crypto_ctr_crypt_inplace(&walk, child);
138		else
139			nbytes = crypto_ctr_crypt_segment(&walk, child);
140
141		err = blkcipher_walk_done(desc, &walk, nbytes);
142	}
143
144	if (walk.nbytes) {
145		crypto_ctr_crypt_final(&walk, child);
146		err = blkcipher_walk_done(desc, &walk, 0);
147	}
148
149	return err;
150}
151
152static int crypto_ctr_init_tfm(struct crypto_tfm *tfm)
153{
154	struct crypto_instance *inst = (void *)tfm->__crt_alg;
155	struct crypto_spawn *spawn = crypto_instance_ctx(inst);
156	struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
157	struct crypto_cipher *cipher;
158
159	cipher = crypto_spawn_cipher(spawn);
160	if (IS_ERR(cipher))
161		return PTR_ERR(cipher);
162
163	ctx->child = cipher;
164
165	return 0;
166}
167
168static void crypto_ctr_exit_tfm(struct crypto_tfm *tfm)
169{
170	struct crypto_ctr_ctx *ctx = crypto_tfm_ctx(tfm);
171
172	crypto_free_cipher(ctx->child);
173}
174
175static struct crypto_instance *crypto_ctr_alloc(struct rtattr **tb)
176{
177	struct crypto_instance *inst;
178	struct crypto_alg *alg;
179	int err;
180
181	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
182	if (err)
183		return ERR_PTR(err);
184
185	alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_CIPHER,
186				  CRYPTO_ALG_TYPE_MASK);
187	if (IS_ERR(alg))
188		return ERR_CAST(alg);
189
190	/* Block size must be >= 4 bytes. */
191	err = -EINVAL;
192	if (alg->cra_blocksize < 4)
193		goto out_put_alg;
194
195	/* If this is false we'd fail the alignment of crypto_inc. */
196	if (alg->cra_blocksize % 4)
197		goto out_put_alg;
198
199	inst = crypto_alloc_instance("ctr", alg);
200	if (IS_ERR(inst))
201		goto out;
202
203	inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
204	inst->alg.cra_priority = alg->cra_priority;
205	inst->alg.cra_blocksize = 1;
206	inst->alg.cra_alignmask = alg->cra_alignmask | (__alignof__(u32) - 1);
207	inst->alg.cra_type = &crypto_blkcipher_type;
 
 
 
 
 
 
 
 
 
208
209	inst->alg.cra_blkcipher.ivsize = alg->cra_blocksize;
210	inst->alg.cra_blkcipher.min_keysize = alg->cra_cipher.cia_min_keysize;
211	inst->alg.cra_blkcipher.max_keysize = alg->cra_cipher.cia_max_keysize;
212
213	inst->alg.cra_ctxsize = sizeof(struct crypto_ctr_ctx);
214
215	inst->alg.cra_init = crypto_ctr_init_tfm;
216	inst->alg.cra_exit = crypto_ctr_exit_tfm;
217
218	inst->alg.cra_blkcipher.setkey = crypto_ctr_setkey;
219	inst->alg.cra_blkcipher.encrypt = crypto_ctr_crypt;
220	inst->alg.cra_blkcipher.decrypt = crypto_ctr_crypt;
221
222	inst->alg.cra_blkcipher.geniv = "chainiv";
223
224out:
225	crypto_mod_put(alg);
226	return inst;
227
228out_put_alg:
229	inst = ERR_PTR(err);
230	goto out;
231}
232
233static void crypto_ctr_free(struct crypto_instance *inst)
234{
235	crypto_drop_spawn(crypto_instance_ctx(inst));
236	kfree(inst);
237}
238
239static struct crypto_template crypto_ctr_tmpl = {
240	.name = "ctr",
241	.alloc = crypto_ctr_alloc,
242	.free = crypto_ctr_free,
243	.module = THIS_MODULE,
244};
245
246static int crypto_rfc3686_setkey(struct crypto_tfm *parent, const u8 *key,
247				 unsigned int keylen)
248{
249	struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(parent);
250	struct crypto_blkcipher *child = ctx->child;
251	int err;
252
253	/* the nonce is stored in bytes at end of key */
254	if (keylen < CTR_RFC3686_NONCE_SIZE)
255		return -EINVAL;
256
257	memcpy(ctx->nonce, key + (keylen - CTR_RFC3686_NONCE_SIZE),
258	       CTR_RFC3686_NONCE_SIZE);
259
260	keylen -= CTR_RFC3686_NONCE_SIZE;
261
262	crypto_blkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
263	crypto_blkcipher_set_flags(child, crypto_tfm_get_flags(parent) &
264					  CRYPTO_TFM_REQ_MASK);
265	err = crypto_blkcipher_setkey(child, key, keylen);
266	crypto_tfm_set_flags(parent, crypto_blkcipher_get_flags(child) &
267				     CRYPTO_TFM_RES_MASK);
268
269	return err;
270}
271
272static int crypto_rfc3686_crypt(struct blkcipher_desc *desc,
273				struct scatterlist *dst,
274				struct scatterlist *src, unsigned int nbytes)
275{
276	struct crypto_blkcipher *tfm = desc->tfm;
277	struct crypto_rfc3686_ctx *ctx = crypto_blkcipher_ctx(tfm);
278	struct crypto_blkcipher *child = ctx->child;
279	unsigned long alignmask = crypto_blkcipher_alignmask(tfm);
280	u8 ivblk[CTR_RFC3686_BLOCK_SIZE + alignmask];
281	u8 *iv = PTR_ALIGN(ivblk + 0, alignmask + 1);
282	u8 *info = desc->info;
283	int err;
284
285	/* set up counter block */
286	memcpy(iv, ctx->nonce, CTR_RFC3686_NONCE_SIZE);
287	memcpy(iv + CTR_RFC3686_NONCE_SIZE, info, CTR_RFC3686_IV_SIZE);
288
289	/* initialize counter portion of counter block */
290	*(__be32 *)(iv + CTR_RFC3686_NONCE_SIZE + CTR_RFC3686_IV_SIZE) =
291		cpu_to_be32(1);
292
293	desc->tfm = child;
294	desc->info = iv;
295	err = crypto_blkcipher_encrypt_iv(desc, dst, src, nbytes);
296	desc->tfm = tfm;
297	desc->info = info;
298
299	return err;
300}
301
302static int crypto_rfc3686_init_tfm(struct crypto_tfm *tfm)
303{
304	struct crypto_instance *inst = (void *)tfm->__crt_alg;
305	struct crypto_spawn *spawn = crypto_instance_ctx(inst);
306	struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
307	struct crypto_blkcipher *cipher;
 
 
308
309	cipher = crypto_spawn_blkcipher(spawn);
310	if (IS_ERR(cipher))
311		return PTR_ERR(cipher);
312
313	ctx->child = cipher;
314
 
 
 
 
 
 
315	return 0;
316}
317
318static void crypto_rfc3686_exit_tfm(struct crypto_tfm *tfm)
 
 
 
 
 
 
 
319{
320	struct crypto_rfc3686_ctx *ctx = crypto_tfm_ctx(tfm);
321
322	crypto_free_blkcipher(ctx->child);
 
323}
324
325static struct crypto_instance *crypto_rfc3686_alloc(struct rtattr **tb)
 
326{
327	struct crypto_instance *inst;
328	struct crypto_alg *alg;
 
 
329	int err;
330
331	err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_BLKCIPHER);
 
 
 
 
 
 
 
 
 
 
 
332	if (err)
333		return ERR_PTR(err);
334
335	alg = crypto_attr_alg(tb[1], CRYPTO_ALG_TYPE_BLKCIPHER,
336				  CRYPTO_ALG_TYPE_MASK);
337	err = PTR_ERR(alg);
338	if (IS_ERR(alg))
339		return ERR_PTR(err);
340
341	/* We only support 16-byte blocks. */
342	err = -EINVAL;
343	if (alg->cra_blkcipher.ivsize != CTR_RFC3686_BLOCK_SIZE)
344		goto out_put_alg;
345
346	/* Not a stream cipher? */
347	if (alg->cra_blocksize != 1)
348		goto out_put_alg;
349
350	inst = crypto_alloc_instance("rfc3686", alg);
351	if (IS_ERR(inst))
352		goto out;
353
354	inst->alg.cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER;
355	inst->alg.cra_priority = alg->cra_priority;
356	inst->alg.cra_blocksize = 1;
357	inst->alg.cra_alignmask = alg->cra_alignmask;
358	inst->alg.cra_type = &crypto_blkcipher_type;
359
360	inst->alg.cra_blkcipher.ivsize = CTR_RFC3686_IV_SIZE;
361	inst->alg.cra_blkcipher.min_keysize = alg->cra_blkcipher.min_keysize
362					      + CTR_RFC3686_NONCE_SIZE;
363	inst->alg.cra_blkcipher.max_keysize = alg->cra_blkcipher.max_keysize
364					      + CTR_RFC3686_NONCE_SIZE;
365
366	inst->alg.cra_blkcipher.geniv = "seqiv";
367
368	inst->alg.cra_ctxsize = sizeof(struct crypto_rfc3686_ctx);
369
370	inst->alg.cra_init = crypto_rfc3686_init_tfm;
371	inst->alg.cra_exit = crypto_rfc3686_exit_tfm;
372
373	inst->alg.cra_blkcipher.setkey = crypto_rfc3686_setkey;
374	inst->alg.cra_blkcipher.encrypt = crypto_rfc3686_crypt;
375	inst->alg.cra_blkcipher.decrypt = crypto_rfc3686_crypt;
376
377out:
378	crypto_mod_put(alg);
379	return inst;
380
381out_put_alg:
382	inst = ERR_PTR(err);
383	goto out;
 
 
 
384}
385
386static struct crypto_template crypto_rfc3686_tmpl = {
387	.name = "rfc3686",
388	.alloc = crypto_rfc3686_alloc,
389	.free = crypto_ctr_free,
390	.module = THIS_MODULE,
 
 
 
 
 
391};
392
393static int __init crypto_ctr_module_init(void)
394{
395	int err;
396
397	err = crypto_register_template(&crypto_ctr_tmpl);
398	if (err)
399		goto out;
400
401	err = crypto_register_template(&crypto_rfc3686_tmpl);
402	if (err)
403		goto out_drop_ctr;
404
405out:
406	return err;
407
408out_drop_ctr:
409	crypto_unregister_template(&crypto_ctr_tmpl);
410	goto out;
411}
412
413static void __exit crypto_ctr_module_exit(void)
414{
415	crypto_unregister_template(&crypto_rfc3686_tmpl);
416	crypto_unregister_template(&crypto_ctr_tmpl);
417}
418
419module_init(crypto_ctr_module_init);
420module_exit(crypto_ctr_module_exit);
421
422MODULE_LICENSE("GPL");
423MODULE_DESCRIPTION("CTR Counter block mode");
424MODULE_ALIAS("rfc3686");