Linux Audio

Check our new training course

Loading...
v4.6
 
  1/*
  2 * Cryptographic API.
  3 *
  4 * Cipher operations.
  5 *
  6 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  8 *
  9 * This program is free software; you can redistribute it and/or modify it
 10 * under the terms of the GNU General Public License as published by the Free
 11 * Software Foundation; either version 2 of the License, or (at your option)
 12 * any later version.
 13 *
 14 */
 15
 
 
 16#include <linux/kernel.h>
 17#include <linux/crypto.h>
 18#include <linux/errno.h>
 19#include <linux/slab.h>
 20#include <linux/string.h>
 21#include "internal.h"
 22
 23static int setkey_unaligned(struct crypto_tfm *tfm, const u8 *key,
 24			    unsigned int keylen)
 25{
 26	struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
 27	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
 28	int ret;
 29	u8 *buffer, *alignbuffer;
 30	unsigned long absize;
 31
 32	absize = keylen + alignmask;
 33	buffer = kmalloc(absize, GFP_ATOMIC);
 34	if (!buffer)
 35		return -ENOMEM;
 36
 37	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 38	memcpy(alignbuffer, key, keylen);
 39	ret = cia->cia_setkey(tfm, alignbuffer, keylen);
 40	memset(alignbuffer, 0, keylen);
 41	kfree(buffer);
 42	return ret;
 43
 44}
 45
 46static int setkey(struct crypto_tfm *tfm, const u8 *key, unsigned int keylen)
 
 47{
 48	struct cipher_alg *cia = &tfm->__crt_alg->cra_cipher;
 49	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
 50
 51	tfm->crt_flags &= ~CRYPTO_TFM_RES_MASK;
 52	if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize) {
 53		tfm->crt_flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
 54		return -EINVAL;
 55	}
 56
 57	if ((unsigned long)key & alignmask)
 58		return setkey_unaligned(tfm, key, keylen);
 59
 60	return cia->cia_setkey(tfm, key, keylen);
 61}
 
 62
 63static void cipher_crypt_unaligned(void (*fn)(struct crypto_tfm *, u8 *,
 64					      const u8 *),
 65				   struct crypto_tfm *tfm,
 66				   u8 *dst, const u8 *src)
 67{
 68	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
 69	unsigned int size = crypto_tfm_alg_blocksize(tfm);
 70	u8 buffer[size + alignmask];
 71	u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 72
 73	memcpy(tmp, src, size);
 74	fn(tfm, tmp, tmp);
 75	memcpy(dst, tmp, size);
 
 
 
 
 
 
 
 
 76}
 77
 78static void cipher_encrypt_unaligned(struct crypto_tfm *tfm,
 79				     u8 *dst, const u8 *src)
 80{
 81	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
 82	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
 83
 84	if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
 85		cipher_crypt_unaligned(cipher->cia_encrypt, tfm, dst, src);
 86		return;
 87	}
 88
 89	cipher->cia_encrypt(tfm, dst, src);
 
 
 
 90}
 
 91
 92static void cipher_decrypt_unaligned(struct crypto_tfm *tfm,
 93				     u8 *dst, const u8 *src)
 94{
 95	unsigned long alignmask = crypto_tfm_alg_alignmask(tfm);
 96	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
 
 
 97
 98	if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
 99		cipher_crypt_unaligned(cipher->cia_decrypt, tfm, dst, src);
100		return;
101	}
102
103	cipher->cia_decrypt(tfm, dst, src);
104}
105
106int crypto_init_cipher_ops(struct crypto_tfm *tfm)
107{
108	struct cipher_tfm *ops = &tfm->crt_cipher;
109	struct cipher_alg *cipher = &tfm->__crt_alg->cra_cipher;
 
 
110
111	ops->cit_setkey = setkey;
112	ops->cit_encrypt_one = crypto_tfm_alg_alignmask(tfm) ?
113		cipher_encrypt_unaligned : cipher->cia_encrypt;
114	ops->cit_decrypt_one = crypto_tfm_alg_alignmask(tfm) ?
115		cipher_decrypt_unaligned : cipher->cia_decrypt;
116
117	return 0;
118}
119
120void crypto_exit_cipher_ops(struct crypto_tfm *tfm)
121{
122}
v6.9.4
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Cryptographic API.
  4 *
  5 * Single-block cipher operations.
  6 *
  7 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  8 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
 
 
 
 
 
 
  9 */
 10
 11#include <crypto/algapi.h>
 12#include <crypto/internal/cipher.h>
 13#include <linux/kernel.h>
 14#include <linux/crypto.h>
 15#include <linux/errno.h>
 16#include <linux/slab.h>
 17#include <linux/string.h>
 18#include "internal.h"
 19
 20static int setkey_unaligned(struct crypto_cipher *tfm, const u8 *key,
 21			    unsigned int keylen)
 22{
 23	struct cipher_alg *cia = crypto_cipher_alg(tfm);
 24	unsigned long alignmask = crypto_cipher_alignmask(tfm);
 25	int ret;
 26	u8 *buffer, *alignbuffer;
 27	unsigned long absize;
 28
 29	absize = keylen + alignmask;
 30	buffer = kmalloc(absize, GFP_ATOMIC);
 31	if (!buffer)
 32		return -ENOMEM;
 33
 34	alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 35	memcpy(alignbuffer, key, keylen);
 36	ret = cia->cia_setkey(crypto_cipher_tfm(tfm), alignbuffer, keylen);
 37	memset(alignbuffer, 0, keylen);
 38	kfree(buffer);
 39	return ret;
 40
 41}
 42
 43int crypto_cipher_setkey(struct crypto_cipher *tfm,
 44			 const u8 *key, unsigned int keylen)
 45{
 46	struct cipher_alg *cia = crypto_cipher_alg(tfm);
 47	unsigned long alignmask = crypto_cipher_alignmask(tfm);
 48
 49	if (keylen < cia->cia_min_keysize || keylen > cia->cia_max_keysize)
 
 
 50		return -EINVAL;
 
 51
 52	if ((unsigned long)key & alignmask)
 53		return setkey_unaligned(tfm, key, keylen);
 54
 55	return cia->cia_setkey(crypto_cipher_tfm(tfm), key, keylen);
 56}
 57EXPORT_SYMBOL_NS_GPL(crypto_cipher_setkey, CRYPTO_INTERNAL);
 58
 59static inline void cipher_crypt_one(struct crypto_cipher *tfm,
 60				    u8 *dst, const u8 *src, bool enc)
 
 
 61{
 62	unsigned long alignmask = crypto_cipher_alignmask(tfm);
 63	struct cipher_alg *cia = crypto_cipher_alg(tfm);
 64	void (*fn)(struct crypto_tfm *, u8 *, const u8 *) =
 65		enc ? cia->cia_encrypt : cia->cia_decrypt;
 66
 67	if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) {
 68		unsigned int bs = crypto_cipher_blocksize(tfm);
 69		u8 buffer[MAX_CIPHER_BLOCKSIZE + MAX_CIPHER_ALIGNMASK];
 70		u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1);
 71
 72		memcpy(tmp, src, bs);
 73		fn(crypto_cipher_tfm(tfm), tmp, tmp);
 74		memcpy(dst, tmp, bs);
 75	} else {
 76		fn(crypto_cipher_tfm(tfm), dst, src);
 77	}
 78}
 79
 80void crypto_cipher_encrypt_one(struct crypto_cipher *tfm,
 81			       u8 *dst, const u8 *src)
 82{
 83	cipher_crypt_one(tfm, dst, src, true);
 84}
 85EXPORT_SYMBOL_NS_GPL(crypto_cipher_encrypt_one, CRYPTO_INTERNAL);
 
 
 
 
 86
 87void crypto_cipher_decrypt_one(struct crypto_cipher *tfm,
 88			       u8 *dst, const u8 *src)
 89{
 90	cipher_crypt_one(tfm, dst, src, false);
 91}
 92EXPORT_SYMBOL_NS_GPL(crypto_cipher_decrypt_one, CRYPTO_INTERNAL);
 93
 94struct crypto_cipher *crypto_clone_cipher(struct crypto_cipher *cipher)
 
 95{
 96	struct crypto_tfm *tfm = crypto_cipher_tfm(cipher);
 97	struct crypto_alg *alg = tfm->__crt_alg;
 98	struct crypto_cipher *ncipher;
 99	struct crypto_tfm *ntfm;
100
101	if (alg->cra_init)
102		return ERR_PTR(-ENOSYS);
 
 
103
104	if (unlikely(!crypto_mod_get(alg)))
105		return ERR_PTR(-ESTALE);
106
107	ntfm = __crypto_alloc_tfmgfp(alg, CRYPTO_ALG_TYPE_CIPHER,
108				     CRYPTO_ALG_TYPE_MASK, GFP_ATOMIC);
109	if (IS_ERR(ntfm)) {
110		crypto_mod_put(alg);
111		return ERR_CAST(ntfm);
112	}
113
114	ntfm->crt_flags = tfm->crt_flags;
 
 
 
 
115
116	ncipher = __crypto_cipher_cast(ntfm);
 
117
118	return ncipher;
 
119}
120EXPORT_SYMBOL_GPL(crypto_clone_cipher);