Linux Audio

Check our new training course

Loading...
v6.8
   1// SPDX-License-Identifier: GPL-2.0+
   2/*
   3 * caam - Freescale FSL CAAM support for crypto API
   4 *
   5 * Copyright 2008-2011 Freescale Semiconductor, Inc.
   6 * Copyright 2016-2019, 2023 NXP
   7 *
   8 * Based on talitos crypto API driver.
   9 *
  10 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
  11 *
  12 * ---------------                     ---------------
  13 * | JobDesc #1  |-------------------->|  ShareDesc  |
  14 * | *(packet 1) |                     |   (PDB)     |
  15 * ---------------      |------------->|  (hashKey)  |
  16 *       .              |              | (cipherKey) |
  17 *       .              |    |-------->| (operation) |
  18 * ---------------      |    |         ---------------
  19 * | JobDesc #2  |------|    |
  20 * | *(packet 2) |           |
  21 * ---------------           |
  22 *       .                   |
  23 *       .                   |
  24 * ---------------           |
  25 * | JobDesc #3  |------------
  26 * | *(packet 3) |
  27 * ---------------
  28 *
  29 * The SharedDesc never changes for a connection unless rekeyed, but
  30 * each packet will likely be in a different place. So all we need
  31 * to know to process the packet is where the input is, where the
  32 * output goes, and what context we want to process with. Context is
  33 * in the SharedDesc, packet references in the JobDesc.
  34 *
  35 * So, a job desc looks like:
  36 *
  37 * ---------------------
  38 * | Header            |
  39 * | ShareDesc Pointer |
  40 * | SEQ_OUT_PTR       |
  41 * | (output buffer)   |
  42 * | (output length)   |
  43 * | SEQ_IN_PTR        |
  44 * | (input buffer)    |
  45 * | (input length)    |
  46 * ---------------------
  47 */
  48
  49#include "compat.h"
  50
  51#include "regs.h"
  52#include "intern.h"
  53#include "desc_constr.h"
  54#include "jr.h"
  55#include "error.h"
  56#include "sg_sw_sec4.h"
  57#include "key_gen.h"
  58#include "caamalg_desc.h"
  59#include <asm/unaligned.h>
  60#include <crypto/internal/aead.h>
  61#include <crypto/internal/engine.h>
  62#include <crypto/internal/skcipher.h>
  63#include <crypto/xts.h>
  64#include <linux/dma-mapping.h>
  65#include <linux/device.h>
  66#include <linux/err.h>
  67#include <linux/module.h>
  68#include <linux/kernel.h>
  69#include <linux/slab.h>
  70#include <linux/string.h>
  71
  72/*
  73 * crypto alg
  74 */
  75#define CAAM_CRA_PRIORITY		3000
  76/* max key is sum of AES_MAX_KEY_SIZE, max split key size */
  77#define CAAM_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + \
  78					 CTR_RFC3686_NONCE_SIZE + \
  79					 SHA512_DIGEST_SIZE * 2)
  80
  81#define AEAD_DESC_JOB_IO_LEN		(DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
  82#define GCM_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
  83					 CAAM_CMD_SZ * 4)
  84#define AUTHENC_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
  85					 CAAM_CMD_SZ * 5)
  86
  87#define CHACHAPOLY_DESC_JOB_IO_LEN	(AEAD_DESC_JOB_IO_LEN + CAAM_CMD_SZ * 6)
  88
  89#define DESC_MAX_USED_BYTES		(CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN_MIN)
  90#define DESC_MAX_USED_LEN		(DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
  91
 
 
 
 
 
 
 
 
 
  92struct caam_alg_entry {
  93	int class1_alg_type;
  94	int class2_alg_type;
  95	bool rfc3686;
  96	bool geniv;
  97	bool nodkp;
  98};
  99
 100struct caam_aead_alg {
 101	struct aead_engine_alg aead;
 102	struct caam_alg_entry caam;
 103	bool registered;
 104};
 105
 106struct caam_skcipher_alg {
 107	struct skcipher_engine_alg skcipher;
 108	struct caam_alg_entry caam;
 109	bool registered;
 110};
 111
 112/*
 113 * per-session context
 114 */
 115struct caam_ctx {
 116	u32 sh_desc_enc[DESC_MAX_USED_LEN];
 117	u32 sh_desc_dec[DESC_MAX_USED_LEN];
 
 118	u8 key[CAAM_MAX_KEY_SIZE];
 119	dma_addr_t sh_desc_enc_dma;
 120	dma_addr_t sh_desc_dec_dma;
 
 121	dma_addr_t key_dma;
 122	enum dma_data_direction dir;
 123	struct device *jrdev;
 124	struct alginfo adata;
 125	struct alginfo cdata;
 126	unsigned int authsize;
 127	bool xts_key_fallback;
 128	struct crypto_skcipher *fallback;
 129};
 130
 131struct caam_skcipher_req_ctx {
 132	struct skcipher_edesc *edesc;
 133	struct skcipher_request fallback_req;
 134};
 135
 136struct caam_aead_req_ctx {
 137	struct aead_edesc *edesc;
 138};
 139
 140static int aead_null_set_sh_desc(struct crypto_aead *aead)
 141{
 142	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 143	struct device *jrdev = ctx->jrdev;
 144	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 145	u32 *desc;
 146	int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
 147			ctx->adata.keylen_pad;
 148
 149	/*
 150	 * Job Descriptor and Shared Descriptors
 151	 * must all fit into the 64-word Descriptor h/w Buffer
 152	 */
 153	if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
 154		ctx->adata.key_inline = true;
 155		ctx->adata.key_virt = ctx->key;
 156	} else {
 157		ctx->adata.key_inline = false;
 158		ctx->adata.key_dma = ctx->key_dma;
 159	}
 160
 161	/* aead_encrypt shared descriptor */
 162	desc = ctx->sh_desc_enc;
 163	cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
 164				    ctrlpriv->era);
 165	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 166				   desc_bytes(desc), ctx->dir);
 167
 168	/*
 169	 * Job Descriptor and Shared Descriptors
 170	 * must all fit into the 64-word Descriptor h/w Buffer
 171	 */
 172	if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
 173		ctx->adata.key_inline = true;
 174		ctx->adata.key_virt = ctx->key;
 175	} else {
 176		ctx->adata.key_inline = false;
 177		ctx->adata.key_dma = ctx->key_dma;
 178	}
 179
 180	/* aead_decrypt shared descriptor */
 181	desc = ctx->sh_desc_dec;
 182	cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
 183				    ctrlpriv->era);
 184	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 185				   desc_bytes(desc), ctx->dir);
 186
 187	return 0;
 188}
 189
 190static int aead_set_sh_desc(struct crypto_aead *aead)
 191{
 192	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
 193						 struct caam_aead_alg,
 194						 aead.base);
 195	unsigned int ivsize = crypto_aead_ivsize(aead);
 196	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 197	struct device *jrdev = ctx->jrdev;
 198	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 199	u32 ctx1_iv_off = 0;
 200	u32 *desc, *nonce = NULL;
 201	u32 inl_mask;
 202	unsigned int data_len[2];
 203	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
 204			       OP_ALG_AAI_CTR_MOD128);
 205	const bool is_rfc3686 = alg->caam.rfc3686;
 206
 207	if (!ctx->authsize)
 208		return 0;
 209
 210	/* NULL encryption / decryption */
 211	if (!ctx->cdata.keylen)
 212		return aead_null_set_sh_desc(aead);
 213
 214	/*
 215	 * AES-CTR needs to load IV in CONTEXT1 reg
 216	 * at an offset of 128bits (16bytes)
 217	 * CONTEXT1[255:128] = IV
 218	 */
 219	if (ctr_mode)
 220		ctx1_iv_off = 16;
 221
 222	/*
 223	 * RFC3686 specific:
 224	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
 225	 */
 226	if (is_rfc3686) {
 227		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
 228		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
 229				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
 230	}
 231
 232	/*
 233	 * In case |user key| > |derived key|, using DKP<imm,imm>
 234	 * would result in invalid opcodes (last bytes of user key) in
 235	 * the resulting descriptor. Use DKP<ptr,imm> instead => both
 236	 * virtual and dma key addresses are needed.
 237	 */
 238	ctx->adata.key_virt = ctx->key;
 239	ctx->adata.key_dma = ctx->key_dma;
 240
 241	ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
 242	ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
 243
 244	data_len[0] = ctx->adata.keylen_pad;
 245	data_len[1] = ctx->cdata.keylen;
 246
 247	if (alg->caam.geniv)
 248		goto skip_enc;
 249
 250	/*
 251	 * Job Descriptor and Shared Descriptors
 252	 * must all fit into the 64-word Descriptor h/w Buffer
 253	 */
 254	if (desc_inline_query(DESC_AEAD_ENC_LEN +
 255			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
 256			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
 257			      ARRAY_SIZE(data_len)) < 0)
 258		return -EINVAL;
 259
 
 
 
 
 
 
 
 
 
 
 260	ctx->adata.key_inline = !!(inl_mask & 1);
 261	ctx->cdata.key_inline = !!(inl_mask & 2);
 262
 263	/* aead_encrypt shared descriptor */
 264	desc = ctx->sh_desc_enc;
 265	cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
 266			       ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
 267			       false, ctrlpriv->era);
 268	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 269				   desc_bytes(desc), ctx->dir);
 270
 271skip_enc:
 272	/*
 273	 * Job Descriptor and Shared Descriptors
 274	 * must all fit into the 64-word Descriptor h/w Buffer
 275	 */
 276	if (desc_inline_query(DESC_AEAD_DEC_LEN +
 277			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
 278			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
 279			      ARRAY_SIZE(data_len)) < 0)
 280		return -EINVAL;
 281
 
 
 
 
 
 
 
 
 
 
 282	ctx->adata.key_inline = !!(inl_mask & 1);
 283	ctx->cdata.key_inline = !!(inl_mask & 2);
 284
 285	/* aead_decrypt shared descriptor */
 286	desc = ctx->sh_desc_dec;
 287	cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
 288			       ctx->authsize, alg->caam.geniv, is_rfc3686,
 289			       nonce, ctx1_iv_off, false, ctrlpriv->era);
 290	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 291				   desc_bytes(desc), ctx->dir);
 292
 293	if (!alg->caam.geniv)
 294		goto skip_givenc;
 295
 296	/*
 297	 * Job Descriptor and Shared Descriptors
 298	 * must all fit into the 64-word Descriptor h/w Buffer
 299	 */
 300	if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
 301			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
 302			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
 303			      ARRAY_SIZE(data_len)) < 0)
 304		return -EINVAL;
 305
 
 
 
 
 
 
 
 
 
 
 306	ctx->adata.key_inline = !!(inl_mask & 1);
 307	ctx->cdata.key_inline = !!(inl_mask & 2);
 308
 309	/* aead_givencrypt shared descriptor */
 310	desc = ctx->sh_desc_enc;
 311	cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
 312				  ctx->authsize, is_rfc3686, nonce,
 313				  ctx1_iv_off, false, ctrlpriv->era);
 314	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 315				   desc_bytes(desc), ctx->dir);
 316
 317skip_givenc:
 318	return 0;
 319}
 320
 321static int aead_setauthsize(struct crypto_aead *authenc,
 322				    unsigned int authsize)
 323{
 324	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 325
 326	ctx->authsize = authsize;
 327	aead_set_sh_desc(authenc);
 328
 329	return 0;
 330}
 331
 332static int gcm_set_sh_desc(struct crypto_aead *aead)
 333{
 334	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 335	struct device *jrdev = ctx->jrdev;
 336	unsigned int ivsize = crypto_aead_ivsize(aead);
 337	u32 *desc;
 338	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
 339			ctx->cdata.keylen;
 340
 341	if (!ctx->cdata.keylen || !ctx->authsize)
 342		return 0;
 343
 344	/*
 345	 * AES GCM encrypt shared descriptor
 346	 * Job Descriptor and Shared Descriptor
 347	 * must fit into the 64-word Descriptor h/w Buffer
 348	 */
 349	if (rem_bytes >= DESC_GCM_ENC_LEN) {
 350		ctx->cdata.key_inline = true;
 351		ctx->cdata.key_virt = ctx->key;
 352	} else {
 353		ctx->cdata.key_inline = false;
 354		ctx->cdata.key_dma = ctx->key_dma;
 355	}
 356
 357	desc = ctx->sh_desc_enc;
 358	cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
 359	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 360				   desc_bytes(desc), ctx->dir);
 361
 362	/*
 363	 * Job Descriptor and Shared Descriptors
 364	 * must all fit into the 64-word Descriptor h/w Buffer
 365	 */
 366	if (rem_bytes >= DESC_GCM_DEC_LEN) {
 367		ctx->cdata.key_inline = true;
 368		ctx->cdata.key_virt = ctx->key;
 369	} else {
 370		ctx->cdata.key_inline = false;
 371		ctx->cdata.key_dma = ctx->key_dma;
 372	}
 373
 374	desc = ctx->sh_desc_dec;
 375	cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
 376	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 377				   desc_bytes(desc), ctx->dir);
 378
 379	return 0;
 380}
 381
 382static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
 383{
 384	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 385	int err;
 386
 387	err = crypto_gcm_check_authsize(authsize);
 388	if (err)
 389		return err;
 390
 391	ctx->authsize = authsize;
 392	gcm_set_sh_desc(authenc);
 393
 394	return 0;
 395}
 396
 397static int rfc4106_set_sh_desc(struct crypto_aead *aead)
 398{
 399	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 400	struct device *jrdev = ctx->jrdev;
 401	unsigned int ivsize = crypto_aead_ivsize(aead);
 402	u32 *desc;
 403	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
 404			ctx->cdata.keylen;
 405
 406	if (!ctx->cdata.keylen || !ctx->authsize)
 407		return 0;
 408
 409	/*
 410	 * RFC4106 encrypt shared descriptor
 411	 * Job Descriptor and Shared Descriptor
 412	 * must fit into the 64-word Descriptor h/w Buffer
 413	 */
 414	if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
 415		ctx->cdata.key_inline = true;
 416		ctx->cdata.key_virt = ctx->key;
 417	} else {
 418		ctx->cdata.key_inline = false;
 419		ctx->cdata.key_dma = ctx->key_dma;
 420	}
 421
 422	desc = ctx->sh_desc_enc;
 423	cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
 424				  false);
 425	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 426				   desc_bytes(desc), ctx->dir);
 427
 428	/*
 429	 * Job Descriptor and Shared Descriptors
 430	 * must all fit into the 64-word Descriptor h/w Buffer
 431	 */
 432	if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
 433		ctx->cdata.key_inline = true;
 434		ctx->cdata.key_virt = ctx->key;
 435	} else {
 436		ctx->cdata.key_inline = false;
 437		ctx->cdata.key_dma = ctx->key_dma;
 438	}
 439
 440	desc = ctx->sh_desc_dec;
 441	cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
 442				  false);
 443	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 444				   desc_bytes(desc), ctx->dir);
 445
 446	return 0;
 447}
 448
 449static int rfc4106_setauthsize(struct crypto_aead *authenc,
 450			       unsigned int authsize)
 451{
 452	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 453	int err;
 454
 455	err = crypto_rfc4106_check_authsize(authsize);
 456	if (err)
 457		return err;
 458
 459	ctx->authsize = authsize;
 460	rfc4106_set_sh_desc(authenc);
 461
 462	return 0;
 463}
 464
 465static int rfc4543_set_sh_desc(struct crypto_aead *aead)
 466{
 467	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 468	struct device *jrdev = ctx->jrdev;
 469	unsigned int ivsize = crypto_aead_ivsize(aead);
 470	u32 *desc;
 471	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
 472			ctx->cdata.keylen;
 473
 474	if (!ctx->cdata.keylen || !ctx->authsize)
 475		return 0;
 476
 477	/*
 478	 * RFC4543 encrypt shared descriptor
 479	 * Job Descriptor and Shared Descriptor
 480	 * must fit into the 64-word Descriptor h/w Buffer
 481	 */
 482	if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
 483		ctx->cdata.key_inline = true;
 484		ctx->cdata.key_virt = ctx->key;
 485	} else {
 486		ctx->cdata.key_inline = false;
 487		ctx->cdata.key_dma = ctx->key_dma;
 488	}
 489
 490	desc = ctx->sh_desc_enc;
 491	cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
 492				  false);
 493	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 494				   desc_bytes(desc), ctx->dir);
 495
 496	/*
 497	 * Job Descriptor and Shared Descriptors
 498	 * must all fit into the 64-word Descriptor h/w Buffer
 499	 */
 500	if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
 501		ctx->cdata.key_inline = true;
 502		ctx->cdata.key_virt = ctx->key;
 503	} else {
 504		ctx->cdata.key_inline = false;
 505		ctx->cdata.key_dma = ctx->key_dma;
 506	}
 507
 508	desc = ctx->sh_desc_dec;
 509	cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
 510				  false);
 511	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 512				   desc_bytes(desc), ctx->dir);
 513
 514	return 0;
 515}
 516
 517static int rfc4543_setauthsize(struct crypto_aead *authenc,
 518			       unsigned int authsize)
 519{
 520	struct caam_ctx *ctx = crypto_aead_ctx_dma(authenc);
 521
 522	if (authsize != 16)
 523		return -EINVAL;
 524
 525	ctx->authsize = authsize;
 526	rfc4543_set_sh_desc(authenc);
 527
 528	return 0;
 529}
 530
 531static int chachapoly_set_sh_desc(struct crypto_aead *aead)
 532{
 533	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 534	struct device *jrdev = ctx->jrdev;
 535	unsigned int ivsize = crypto_aead_ivsize(aead);
 536	u32 *desc;
 537
 538	if (!ctx->cdata.keylen || !ctx->authsize)
 539		return 0;
 540
 541	desc = ctx->sh_desc_enc;
 542	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
 543			       ctx->authsize, true, false);
 544	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 545				   desc_bytes(desc), ctx->dir);
 546
 547	desc = ctx->sh_desc_dec;
 548	cnstr_shdsc_chachapoly(desc, &ctx->cdata, &ctx->adata, ivsize,
 549			       ctx->authsize, false, false);
 550	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 551				   desc_bytes(desc), ctx->dir);
 552
 553	return 0;
 554}
 555
 556static int chachapoly_setauthsize(struct crypto_aead *aead,
 557				  unsigned int authsize)
 558{
 559	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 560
 561	if (authsize != POLY1305_DIGEST_SIZE)
 562		return -EINVAL;
 563
 564	ctx->authsize = authsize;
 565	return chachapoly_set_sh_desc(aead);
 566}
 567
 568static int chachapoly_setkey(struct crypto_aead *aead, const u8 *key,
 569			     unsigned int keylen)
 570{
 571	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 572	unsigned int ivsize = crypto_aead_ivsize(aead);
 573	unsigned int saltlen = CHACHAPOLY_IV_SIZE - ivsize;
 574
 575	if (keylen != CHACHA_KEY_SIZE + saltlen)
 576		return -EINVAL;
 577
 578	memcpy(ctx->key, key, keylen);
 579	ctx->cdata.key_virt = ctx->key;
 580	ctx->cdata.keylen = keylen - saltlen;
 581
 582	return chachapoly_set_sh_desc(aead);
 583}
 584
 585static int aead_setkey(struct crypto_aead *aead,
 586			       const u8 *key, unsigned int keylen)
 587{
 588	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 589	struct device *jrdev = ctx->jrdev;
 590	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 591	struct crypto_authenc_keys keys;
 592	int ret = 0;
 593
 594	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
 595		goto badkey;
 596
 597	dev_dbg(jrdev, "keylen %d enckeylen %d authkeylen %d\n",
 
 598	       keys.authkeylen + keys.enckeylen, keys.enckeylen,
 599	       keys.authkeylen);
 600	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
 601			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
 
 602
 603	/*
 604	 * If DKP is supported, use it in the shared descriptor to generate
 605	 * the split key.
 606	 */
 607	if (ctrlpriv->era >= 6) {
 608		ctx->adata.keylen = keys.authkeylen;
 609		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
 610						      OP_ALG_ALGSEL_MASK);
 611
 612		if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
 613			goto badkey;
 614
 615		memcpy(ctx->key, keys.authkey, keys.authkeylen);
 616		memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
 617		       keys.enckeylen);
 618		dma_sync_single_for_device(jrdev, ctx->key_dma,
 619					   ctx->adata.keylen_pad +
 620					   keys.enckeylen, ctx->dir);
 621		goto skip_split_key;
 622	}
 623
 624	ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
 625			    keys.authkeylen, CAAM_MAX_KEY_SIZE -
 626			    keys.enckeylen);
 627	if (ret) {
 628		goto badkey;
 629	}
 630
 631	/* postpend encryption key to auth split key */
 632	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
 633	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
 634				   keys.enckeylen, ctx->dir);
 635
 636	print_hex_dump_debug("ctx.key@"__stringify(__LINE__)": ",
 637			     DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
 638			     ctx->adata.keylen_pad + keys.enckeylen, 1);
 
 639
 640skip_split_key:
 641	ctx->cdata.keylen = keys.enckeylen;
 642	memzero_explicit(&keys, sizeof(keys));
 643	return aead_set_sh_desc(aead);
 644badkey:
 
 645	memzero_explicit(&keys, sizeof(keys));
 646	return -EINVAL;
 647}
 648
 649static int des3_aead_setkey(struct crypto_aead *aead, const u8 *key,
 650			    unsigned int keylen)
 651{
 652	struct crypto_authenc_keys keys;
 653	int err;
 654
 655	err = crypto_authenc_extractkeys(&keys, key, keylen);
 656	if (unlikely(err))
 657		return err;
 658
 659	err = verify_aead_des3_key(aead, keys.enckey, keys.enckeylen) ?:
 660	      aead_setkey(aead, key, keylen);
 661
 662	memzero_explicit(&keys, sizeof(keys));
 663	return err;
 664}
 665
 666static int gcm_setkey(struct crypto_aead *aead,
 667		      const u8 *key, unsigned int keylen)
 668{
 669	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 670	struct device *jrdev = ctx->jrdev;
 671	int err;
 672
 673	err = aes_check_keylen(keylen);
 674	if (err)
 675		return err;
 676
 677	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
 678			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
 679
 680	memcpy(ctx->key, key, keylen);
 681	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
 682	ctx->cdata.keylen = keylen;
 683
 684	return gcm_set_sh_desc(aead);
 685}
 686
 687static int rfc4106_setkey(struct crypto_aead *aead,
 688			  const u8 *key, unsigned int keylen)
 689{
 690	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 691	struct device *jrdev = ctx->jrdev;
 692	int err;
 693
 694	err = aes_check_keylen(keylen - 4);
 695	if (err)
 696		return err;
 697
 698	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
 699			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
 
 
 700
 701	memcpy(ctx->key, key, keylen);
 702
 703	/*
 704	 * The last four bytes of the key material are used as the salt value
 705	 * in the nonce. Update the AES key length.
 706	 */
 707	ctx->cdata.keylen = keylen - 4;
 708	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
 709				   ctx->dir);
 710	return rfc4106_set_sh_desc(aead);
 711}
 712
 713static int rfc4543_setkey(struct crypto_aead *aead,
 714			  const u8 *key, unsigned int keylen)
 715{
 716	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
 717	struct device *jrdev = ctx->jrdev;
 718	int err;
 719
 720	err = aes_check_keylen(keylen - 4);
 721	if (err)
 722		return err;
 723
 724	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
 725			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
 
 
 726
 727	memcpy(ctx->key, key, keylen);
 728
 729	/*
 730	 * The last four bytes of the key material are used as the salt value
 731	 * in the nonce. Update the AES key length.
 732	 */
 733	ctx->cdata.keylen = keylen - 4;
 734	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
 735				   ctx->dir);
 736	return rfc4543_set_sh_desc(aead);
 737}
 738
 739static int skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
 740			   unsigned int keylen, const u32 ctx1_iv_off)
 741{
 742	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 743	struct caam_skcipher_alg *alg =
 744		container_of(crypto_skcipher_alg(skcipher), typeof(*alg),
 745			     skcipher.base);
 746	struct device *jrdev = ctx->jrdev;
 747	unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
 748	u32 *desc;
 749	const bool is_rfc3686 = alg->caam.rfc3686;
 
 
 
 
 750
 751	print_hex_dump_debug("key in @"__stringify(__LINE__)": ",
 752			     DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 753
 754	ctx->cdata.keylen = keylen;
 755	ctx->cdata.key_virt = key;
 756	ctx->cdata.key_inline = true;
 757
 758	/* skcipher_encrypt shared descriptor */
 759	desc = ctx->sh_desc_enc;
 760	cnstr_shdsc_skcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
 761				   ctx1_iv_off);
 762	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 763				   desc_bytes(desc), ctx->dir);
 764
 765	/* skcipher_decrypt shared descriptor */
 766	desc = ctx->sh_desc_dec;
 767	cnstr_shdsc_skcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
 768				   ctx1_iv_off);
 769	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 770				   desc_bytes(desc), ctx->dir);
 771
 772	return 0;
 773}
 774
 775static int aes_skcipher_setkey(struct crypto_skcipher *skcipher,
 776			       const u8 *key, unsigned int keylen)
 777{
 778	int err;
 779
 780	err = aes_check_keylen(keylen);
 781	if (err)
 782		return err;
 783
 784	return skcipher_setkey(skcipher, key, keylen, 0);
 785}
 786
 787static int rfc3686_skcipher_setkey(struct crypto_skcipher *skcipher,
 788				   const u8 *key, unsigned int keylen)
 789{
 790	u32 ctx1_iv_off;
 791	int err;
 792
 793	/*
 794	 * RFC3686 specific:
 795	 *	| CONTEXT1[255:128] = {NONCE, IV, COUNTER}
 796	 *	| *key = {KEY, NONCE}
 797	 */
 798	ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
 799	keylen -= CTR_RFC3686_NONCE_SIZE;
 800
 801	err = aes_check_keylen(keylen);
 802	if (err)
 803		return err;
 804
 805	return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
 806}
 807
 808static int ctr_skcipher_setkey(struct crypto_skcipher *skcipher,
 809			       const u8 *key, unsigned int keylen)
 810{
 811	u32 ctx1_iv_off;
 812	int err;
 813
 814	/*
 815	 * AES-CTR needs to load IV in CONTEXT1 reg
 816	 * at an offset of 128bits (16bytes)
 817	 * CONTEXT1[255:128] = IV
 818	 */
 819	ctx1_iv_off = 16;
 820
 821	err = aes_check_keylen(keylen);
 822	if (err)
 823		return err;
 824
 825	return skcipher_setkey(skcipher, key, keylen, ctx1_iv_off);
 826}
 827
 828static int des_skcipher_setkey(struct crypto_skcipher *skcipher,
 829			       const u8 *key, unsigned int keylen)
 830{
 831	return verify_skcipher_des_key(skcipher, key) ?:
 832	       skcipher_setkey(skcipher, key, keylen, 0);
 833}
 834
 835static int des3_skcipher_setkey(struct crypto_skcipher *skcipher,
 836				const u8 *key, unsigned int keylen)
 837{
 838	return verify_skcipher_des3_key(skcipher, key) ?:
 839	       skcipher_setkey(skcipher, key, keylen, 0);
 840}
 841
 842static int xts_skcipher_setkey(struct crypto_skcipher *skcipher, const u8 *key,
 843			       unsigned int keylen)
 844{
 845	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 846	struct device *jrdev = ctx->jrdev;
 847	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 848	u32 *desc;
 849	int err;
 850
 851	err = xts_verify_key(skcipher, key, keylen);
 852	if (err) {
 853		dev_dbg(jrdev, "key size mismatch\n");
 854		return err;
 855	}
 856
 857	if (keylen != 2 * AES_KEYSIZE_128 && keylen != 2 * AES_KEYSIZE_256)
 858		ctx->xts_key_fallback = true;
 859
 860	if (ctrlpriv->era <= 8 || ctx->xts_key_fallback) {
 861		err = crypto_skcipher_setkey(ctx->fallback, key, keylen);
 862		if (err)
 863			return err;
 864	}
 865
 866	ctx->cdata.keylen = keylen;
 867	ctx->cdata.key_virt = key;
 868	ctx->cdata.key_inline = true;
 869
 870	/* xts_skcipher_encrypt shared descriptor */
 871	desc = ctx->sh_desc_enc;
 872	cnstr_shdsc_xts_skcipher_encap(desc, &ctx->cdata);
 873	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 874				   desc_bytes(desc), ctx->dir);
 875
 876	/* xts_skcipher_decrypt shared descriptor */
 877	desc = ctx->sh_desc_dec;
 878	cnstr_shdsc_xts_skcipher_decap(desc, &ctx->cdata);
 879	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 880				   desc_bytes(desc), ctx->dir);
 881
 882	return 0;
 883}
 884
 885/*
 886 * aead_edesc - s/w-extended aead descriptor
 887 * @src_nents: number of segments in input s/w scatterlist
 888 * @dst_nents: number of segments in output s/w scatterlist
 889 * @mapped_src_nents: number of segments in input h/w link table
 890 * @mapped_dst_nents: number of segments in output h/w link table
 891 * @sec4_sg_bytes: length of dma mapped sec4_sg space
 892 * @bklog: stored to determine if the request needs backlog
 893 * @sec4_sg_dma: bus physical mapped address of h/w link table
 894 * @sec4_sg: pointer to h/w link table
 895 * @hw_desc: the h/w job descriptor followed by any referenced link tables
 896 */
 897struct aead_edesc {
 898	int src_nents;
 899	int dst_nents;
 900	int mapped_src_nents;
 901	int mapped_dst_nents;
 902	int sec4_sg_bytes;
 903	bool bklog;
 904	dma_addr_t sec4_sg_dma;
 905	struct sec4_sg_entry *sec4_sg;
 906	u32 hw_desc[];
 907};
 908
 909/*
 910 * skcipher_edesc - s/w-extended skcipher descriptor
 911 * @src_nents: number of segments in input s/w scatterlist
 912 * @dst_nents: number of segments in output s/w scatterlist
 913 * @mapped_src_nents: number of segments in input h/w link table
 914 * @mapped_dst_nents: number of segments in output h/w link table
 915 * @iv_dma: dma address of iv for checking continuity and link table
 916 * @sec4_sg_bytes: length of dma mapped sec4_sg space
 917 * @bklog: stored to determine if the request needs backlog
 918 * @sec4_sg_dma: bus physical mapped address of h/w link table
 919 * @sec4_sg: pointer to h/w link table
 920 * @hw_desc: the h/w job descriptor followed by any referenced link tables
 921 *	     and IV
 922 */
 923struct skcipher_edesc {
 924	int src_nents;
 925	int dst_nents;
 926	int mapped_src_nents;
 927	int mapped_dst_nents;
 928	dma_addr_t iv_dma;
 929	int sec4_sg_bytes;
 930	bool bklog;
 931	dma_addr_t sec4_sg_dma;
 932	struct sec4_sg_entry *sec4_sg;
 933	u32 hw_desc[];
 934};
 935
 936static void caam_unmap(struct device *dev, struct scatterlist *src,
 937		       struct scatterlist *dst, int src_nents,
 938		       int dst_nents,
 939		       dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
 940		       int sec4_sg_bytes)
 941{
 942	if (dst != src) {
 943		if (src_nents)
 944			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
 945		if (dst_nents)
 946			dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
 947	} else {
 948		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
 949	}
 950
 951	if (iv_dma)
 952		dma_unmap_single(dev, iv_dma, ivsize, DMA_BIDIRECTIONAL);
 953	if (sec4_sg_bytes)
 954		dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
 955				 DMA_TO_DEVICE);
 956}
 957
 958static void aead_unmap(struct device *dev,
 959		       struct aead_edesc *edesc,
 960		       struct aead_request *req)
 961{
 962	caam_unmap(dev, req->src, req->dst,
 963		   edesc->src_nents, edesc->dst_nents, 0, 0,
 964		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
 965}
 966
 967static void skcipher_unmap(struct device *dev, struct skcipher_edesc *edesc,
 968			   struct skcipher_request *req)
 
 969{
 970	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
 971	int ivsize = crypto_skcipher_ivsize(skcipher);
 972
 973	caam_unmap(dev, req->src, req->dst,
 974		   edesc->src_nents, edesc->dst_nents,
 975		   edesc->iv_dma, ivsize,
 976		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
 977}
 978
 979static void aead_crypt_done(struct device *jrdev, u32 *desc, u32 err,
 980			    void *context)
 981{
 982	struct aead_request *req = context;
 983	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
 984	struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
 985	struct aead_edesc *edesc;
 986	int ecode = 0;
 987	bool has_bklog;
 988
 989	dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
 
 
 990
 991	edesc = rctx->edesc;
 992	has_bklog = edesc->bklog;
 993
 994	if (err)
 995		ecode = caam_jr_strstatus(jrdev, err);
 996
 997	aead_unmap(jrdev, edesc, req);
 998
 999	kfree(edesc);
1000
1001	/*
1002	 * If no backlog flag, the completion of the request is done
1003	 * by CAAM, not crypto engine.
1004	 */
1005	if (!has_bklog)
1006		aead_request_complete(req, ecode);
1007	else
1008		crypto_finalize_aead_request(jrp->engine, req, ecode);
1009}
1010
1011static inline u8 *skcipher_edesc_iv(struct skcipher_edesc *edesc)
 
1012{
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1013
1014	return PTR_ALIGN((u8 *)edesc->sec4_sg + edesc->sec4_sg_bytes,
1015			 dma_get_cache_alignment());
1016}
1017
1018static void skcipher_crypt_done(struct device *jrdev, u32 *desc, u32 err,
1019				void *context)
1020{
1021	struct skcipher_request *req = context;
1022	struct skcipher_edesc *edesc;
1023	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1024	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1025	struct caam_drv_private_jr *jrp = dev_get_drvdata(jrdev);
1026	int ivsize = crypto_skcipher_ivsize(skcipher);
1027	int ecode = 0;
1028	bool has_bklog;
1029
1030	dev_dbg(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
 
 
 
 
1031
1032	edesc = rctx->edesc;
1033	has_bklog = edesc->bklog;
1034	if (err)
1035		ecode = caam_jr_strstatus(jrdev, err);
 
 
 
 
 
 
 
 
 
1036
1037	skcipher_unmap(jrdev, edesc, req);
1038
1039	/*
1040	 * The crypto API expects us to set the IV (req->iv) to the last
1041	 * ciphertext block (CBC mode) or last counter (CTR mode).
1042	 * This is used e.g. by the CTS mode.
1043	 */
1044	if (ivsize && !ecode) {
1045		memcpy(req->iv, skcipher_edesc_iv(edesc), ivsize);
1046
1047		print_hex_dump_debug("dstiv  @" __stringify(__LINE__)": ",
1048				     DUMP_PREFIX_ADDRESS, 16, 4, req->iv,
1049				     ivsize, 1);
1050	}
1051
1052	caam_dump_sg("dst    @" __stringify(__LINE__)": ",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1053		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
1054		     edesc->dst_nents > 1 ? 100 : req->cryptlen, 1);
1055
1056	kfree(edesc);
1057
1058	/*
1059	 * If no backlog flag, the completion of the request is done
1060	 * by CAAM, not crypto engine.
1061	 */
1062	if (!has_bklog)
1063		skcipher_request_complete(req, ecode);
1064	else
1065		crypto_finalize_skcipher_request(jrp->engine, req, ecode);
 
 
1066}
1067
1068/*
1069 * Fill in aead job descriptor
1070 */
1071static void init_aead_job(struct aead_request *req,
1072			  struct aead_edesc *edesc,
1073			  bool all_contig, bool encrypt)
1074{
1075	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1076	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1077	int authsize = ctx->authsize;
1078	u32 *desc = edesc->hw_desc;
1079	u32 out_options, in_options;
1080	dma_addr_t dst_dma, src_dma;
1081	int len, sec4_sg_index = 0;
1082	dma_addr_t ptr;
1083	u32 *sh_desc;
1084
1085	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1086	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1087
1088	len = desc_len(sh_desc);
1089	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1090
1091	if (all_contig) {
1092		src_dma = edesc->mapped_src_nents ? sg_dma_address(req->src) :
1093						    0;
1094		in_options = 0;
1095	} else {
1096		src_dma = edesc->sec4_sg_dma;
1097		sec4_sg_index += edesc->mapped_src_nents;
1098		in_options = LDST_SGF;
1099	}
1100
1101	append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
1102			  in_options);
1103
1104	dst_dma = src_dma;
1105	out_options = in_options;
1106
1107	if (unlikely(req->src != req->dst)) {
1108		if (!edesc->mapped_dst_nents) {
1109			dst_dma = 0;
1110			out_options = 0;
1111		} else if (edesc->mapped_dst_nents == 1) {
1112			dst_dma = sg_dma_address(req->dst);
1113			out_options = 0;
1114		} else {
1115			dst_dma = edesc->sec4_sg_dma +
1116				  sec4_sg_index *
1117				  sizeof(struct sec4_sg_entry);
1118			out_options = LDST_SGF;
1119		}
1120	}
1121
1122	if (encrypt)
1123		append_seq_out_ptr(desc, dst_dma,
1124				   req->assoclen + req->cryptlen + authsize,
1125				   out_options);
1126	else
1127		append_seq_out_ptr(desc, dst_dma,
1128				   req->assoclen + req->cryptlen - authsize,
1129				   out_options);
1130}
1131
1132static void init_gcm_job(struct aead_request *req,
1133			 struct aead_edesc *edesc,
1134			 bool all_contig, bool encrypt)
1135{
1136	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1137	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1138	unsigned int ivsize = crypto_aead_ivsize(aead);
1139	u32 *desc = edesc->hw_desc;
1140	bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1141	unsigned int last;
1142
1143	init_aead_job(req, edesc, all_contig, encrypt);
1144	append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1145
1146	/* BUG This should not be specific to generic GCM. */
1147	last = 0;
1148	if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1149		last = FIFOLD_TYPE_LAST1;
1150
1151	/* Read GCM IV */
1152	append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1153			 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1154	/* Append Salt */
1155	if (!generic_gcm)
1156		append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1157	/* Append IV */
1158	append_data(desc, req->iv, ivsize);
1159	/* End of blank commands */
1160}
1161
1162static void init_chachapoly_job(struct aead_request *req,
1163				struct aead_edesc *edesc, bool all_contig,
1164				bool encrypt)
1165{
1166	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1167	unsigned int ivsize = crypto_aead_ivsize(aead);
1168	unsigned int assoclen = req->assoclen;
1169	u32 *desc = edesc->hw_desc;
1170	u32 ctx_iv_off = 4;
1171
1172	init_aead_job(req, edesc, all_contig, encrypt);
1173
1174	if (ivsize != CHACHAPOLY_IV_SIZE) {
1175		/* IPsec specific: CONTEXT1[223:128] = {NONCE, IV} */
1176		ctx_iv_off += 4;
1177
1178		/*
1179		 * The associated data comes already with the IV but we need
1180		 * to skip it when we authenticate or encrypt...
1181		 */
1182		assoclen -= ivsize;
1183	}
1184
1185	append_math_add_imm_u32(desc, REG3, ZERO, IMM, assoclen);
1186
1187	/*
1188	 * For IPsec load the IV further in the same register.
1189	 * For RFC7539 simply load the 12 bytes nonce in a single operation
1190	 */
1191	append_load_as_imm(desc, req->iv, ivsize, LDST_CLASS_1_CCB |
1192			   LDST_SRCDST_BYTE_CONTEXT |
1193			   ctx_iv_off << LDST_OFFSET_SHIFT);
1194}
1195
1196static void init_authenc_job(struct aead_request *req,
1197			     struct aead_edesc *edesc,
1198			     bool all_contig, bool encrypt)
1199{
1200	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1201	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1202						 struct caam_aead_alg,
1203						 aead.base);
1204	unsigned int ivsize = crypto_aead_ivsize(aead);
1205	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1206	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1207	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1208			       OP_ALG_AAI_CTR_MOD128);
1209	const bool is_rfc3686 = alg->caam.rfc3686;
1210	u32 *desc = edesc->hw_desc;
1211	u32 ivoffset = 0;
1212
1213	/*
1214	 * AES-CTR needs to load IV in CONTEXT1 reg
1215	 * at an offset of 128bits (16bytes)
1216	 * CONTEXT1[255:128] = IV
1217	 */
1218	if (ctr_mode)
1219		ivoffset = 16;
1220
1221	/*
1222	 * RFC3686 specific:
1223	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1224	 */
1225	if (is_rfc3686)
1226		ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1227
1228	init_aead_job(req, edesc, all_contig, encrypt);
1229
1230	/*
1231	 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1232	 * having DPOVRD as destination.
1233	 */
1234	if (ctrlpriv->era < 3)
1235		append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1236	else
1237		append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1238
1239	if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1240		append_load_as_imm(desc, req->iv, ivsize,
1241				   LDST_CLASS_1_CCB |
1242				   LDST_SRCDST_BYTE_CONTEXT |
1243				   (ivoffset << LDST_OFFSET_SHIFT));
1244}
1245
1246/*
1247 * Fill in skcipher job descriptor
1248 */
1249static void init_skcipher_job(struct skcipher_request *req,
1250			      struct skcipher_edesc *edesc,
1251			      const bool encrypt)
 
1252{
1253	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1254	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
1255	struct device *jrdev = ctx->jrdev;
1256	int ivsize = crypto_skcipher_ivsize(skcipher);
1257	u32 *desc = edesc->hw_desc;
1258	u32 *sh_desc;
1259	u32 in_options = 0, out_options = 0;
1260	dma_addr_t src_dma, dst_dma, ptr;
1261	int len, sec4_sg_index = 0;
1262
1263	print_hex_dump_debug("presciv@"__stringify(__LINE__)": ",
1264			     DUMP_PREFIX_ADDRESS, 16, 4, req->iv, ivsize, 1);
1265	dev_dbg(jrdev, "asked=%d, cryptlen%d\n",
1266	       (int)edesc->src_nents > 1 ? 100 : req->cryptlen, req->cryptlen);
1267
1268	caam_dump_sg("src    @" __stringify(__LINE__)": ",
 
 
1269		     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1270		     edesc->src_nents > 1 ? 100 : req->cryptlen, 1);
1271
1272	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
1273	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
1274
1275	len = desc_len(sh_desc);
1276	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1277
1278	if (ivsize || edesc->mapped_src_nents > 1) {
 
 
 
1279		src_dma = edesc->sec4_sg_dma;
1280		sec4_sg_index = edesc->mapped_src_nents + !!ivsize;
1281		in_options = LDST_SGF;
 
 
 
 
 
 
 
 
 
 
 
1282	} else {
1283		src_dma = sg_dma_address(req->src);
 
 
 
 
 
 
1284	}
 
 
1285
1286	append_seq_in_ptr(desc, src_dma, req->cryptlen + ivsize, in_options);
 
 
 
 
 
 
 
 
 
 
 
 
 
1287
1288	if (likely(req->src == req->dst)) {
1289		dst_dma = src_dma + !!ivsize * sizeof(struct sec4_sg_entry);
1290		out_options = in_options;
1291	} else if (!ivsize && edesc->mapped_dst_nents == 1) {
1292		dst_dma = sg_dma_address(req->dst);
 
 
 
 
 
 
 
 
 
 
1293	} else {
1294		dst_dma = edesc->sec4_sg_dma + sec4_sg_index *
1295			  sizeof(struct sec4_sg_entry);
1296		out_options = LDST_SGF;
1297	}
 
1298
1299	append_seq_out_ptr(desc, dst_dma, req->cryptlen + ivsize, out_options);
 
 
 
 
 
 
 
 
1300}
1301
1302/*
1303 * allocate and map the aead extended descriptor
1304 */
1305static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1306					   int desc_bytes, bool *all_contig_ptr,
1307					   bool encrypt)
1308{
1309	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1310	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1311	struct device *jrdev = ctx->jrdev;
1312	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1313	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1314		       GFP_KERNEL : GFP_ATOMIC;
1315	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1316	int src_len, dst_len = 0;
1317	struct aead_edesc *edesc;
1318	int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1319	unsigned int authsize = ctx->authsize;
1320
1321	if (unlikely(req->dst != req->src)) {
1322		src_len = req->assoclen + req->cryptlen;
1323		dst_len = src_len + (encrypt ? authsize : (-authsize));
1324
1325		src_nents = sg_nents_for_len(req->src, src_len);
1326		if (unlikely(src_nents < 0)) {
1327			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1328				src_len);
1329			return ERR_PTR(src_nents);
1330		}
1331
1332		dst_nents = sg_nents_for_len(req->dst, dst_len);
 
 
 
1333		if (unlikely(dst_nents < 0)) {
1334			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1335				dst_len);
 
1336			return ERR_PTR(dst_nents);
1337		}
1338	} else {
1339		src_len = req->assoclen + req->cryptlen +
1340			  (encrypt ? authsize : 0);
1341
1342		src_nents = sg_nents_for_len(req->src, src_len);
1343		if (unlikely(src_nents < 0)) {
1344			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1345				src_len);
 
1346			return ERR_PTR(src_nents);
1347		}
1348	}
1349
1350	if (likely(req->src == req->dst)) {
1351		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1352					      DMA_BIDIRECTIONAL);
1353		if (unlikely(!mapped_src_nents)) {
1354			dev_err(jrdev, "unable to map source\n");
1355			return ERR_PTR(-ENOMEM);
1356		}
1357	} else {
1358		/* Cover also the case of null (zero length) input data */
1359		if (src_nents) {
1360			mapped_src_nents = dma_map_sg(jrdev, req->src,
1361						      src_nents, DMA_TO_DEVICE);
1362			if (unlikely(!mapped_src_nents)) {
1363				dev_err(jrdev, "unable to map source\n");
1364				return ERR_PTR(-ENOMEM);
1365			}
1366		} else {
1367			mapped_src_nents = 0;
1368		}
1369
1370		/* Cover also the case of null (zero length) output data */
1371		if (dst_nents) {
1372			mapped_dst_nents = dma_map_sg(jrdev, req->dst,
1373						      dst_nents,
1374						      DMA_FROM_DEVICE);
1375			if (unlikely(!mapped_dst_nents)) {
1376				dev_err(jrdev, "unable to map destination\n");
1377				dma_unmap_sg(jrdev, req->src, src_nents,
1378					     DMA_TO_DEVICE);
1379				return ERR_PTR(-ENOMEM);
1380			}
1381		} else {
1382			mapped_dst_nents = 0;
1383		}
1384	}
1385
1386	/*
1387	 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1388	 * the end of the table by allocating more S/G entries.
1389	 */
1390	sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1391	if (mapped_dst_nents > 1)
1392		sec4_sg_len += pad_sg_nents(mapped_dst_nents);
1393	else
1394		sec4_sg_len = pad_sg_nents(sec4_sg_len);
1395
1396	sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1397
1398	/* allocate space for base edesc and hw desc commands, link tables */
1399	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes, flags);
 
1400	if (!edesc) {
1401		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1402			   0, 0, 0);
1403		return ERR_PTR(-ENOMEM);
1404	}
1405
1406	edesc->src_nents = src_nents;
1407	edesc->dst_nents = dst_nents;
1408	edesc->mapped_src_nents = mapped_src_nents;
1409	edesc->mapped_dst_nents = mapped_dst_nents;
1410	edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1411			 desc_bytes;
1412
1413	rctx->edesc = edesc;
1414
1415	*all_contig_ptr = !(mapped_src_nents > 1);
1416
1417	sec4_sg_index = 0;
1418	if (mapped_src_nents > 1) {
1419		sg_to_sec4_sg_last(req->src, src_len,
1420				   edesc->sec4_sg + sec4_sg_index, 0);
1421		sec4_sg_index += mapped_src_nents;
1422	}
1423	if (mapped_dst_nents > 1) {
1424		sg_to_sec4_sg_last(req->dst, dst_len,
1425				   edesc->sec4_sg + sec4_sg_index, 0);
1426	}
1427
1428	if (!sec4_sg_bytes)
1429		return edesc;
1430
1431	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1432					    sec4_sg_bytes, DMA_TO_DEVICE);
1433	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1434		dev_err(jrdev, "unable to map S/G table\n");
1435		aead_unmap(jrdev, edesc, req);
1436		kfree(edesc);
1437		return ERR_PTR(-ENOMEM);
1438	}
1439
1440	edesc->sec4_sg_bytes = sec4_sg_bytes;
1441
1442	return edesc;
1443}
1444
1445static int aead_enqueue_req(struct device *jrdev, struct aead_request *req)
1446{
1447	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1448	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1449	struct aead_edesc *edesc = rctx->edesc;
1450	u32 *desc = edesc->hw_desc;
1451	int ret;
1452
1453	/*
1454	 * Only the backlog request are sent to crypto-engine since the others
1455	 * can be handled by CAAM, if free, especially since JR has up to 1024
1456	 * entries (more than the 10 entries from crypto-engine).
1457	 */
1458	if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1459		ret = crypto_transfer_aead_request_to_engine(jrpriv->engine,
1460							     req);
1461	else
1462		ret = caam_jr_enqueue(jrdev, desc, aead_crypt_done, req);
1463
1464	if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1465		aead_unmap(jrdev, edesc, req);
1466		kfree(rctx->edesc);
1467	}
1468
1469	return ret;
1470}
1471
1472static inline int chachapoly_crypt(struct aead_request *req, bool encrypt)
1473{
1474	struct aead_edesc *edesc;
1475	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1476	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1477	struct device *jrdev = ctx->jrdev;
1478	bool all_contig;
1479	u32 *desc;
 
1480
1481	edesc = aead_edesc_alloc(req, CHACHAPOLY_DESC_JOB_IO_LEN, &all_contig,
1482				 encrypt);
1483	if (IS_ERR(edesc))
1484		return PTR_ERR(edesc);
1485
1486	desc = edesc->hw_desc;
 
 
 
 
 
 
1487
1488	init_chachapoly_job(req, edesc, all_contig, encrypt);
1489	print_hex_dump_debug("chachapoly jobdesc@" __stringify(__LINE__)": ",
1490			     DUMP_PREFIX_ADDRESS, 16, 4, desc, desc_bytes(desc),
1491			     1);
 
 
 
 
1492
1493	return aead_enqueue_req(jrdev, req);
1494}
1495
1496static int chachapoly_encrypt(struct aead_request *req)
1497{
1498	return chachapoly_crypt(req, true);
1499}
1500
1501static int chachapoly_decrypt(struct aead_request *req)
1502{
1503	return chachapoly_crypt(req, false);
1504}
1505
1506static inline int aead_crypt(struct aead_request *req, bool encrypt)
1507{
1508	struct aead_edesc *edesc;
1509	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1510	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1511	struct device *jrdev = ctx->jrdev;
1512	bool all_contig;
 
 
1513
1514	/* allocate extended descriptor */
1515	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1516				 &all_contig, encrypt);
1517	if (IS_ERR(edesc))
1518		return PTR_ERR(edesc);
1519
1520	/* Create and submit job descriptor */
1521	init_authenc_job(req, edesc, all_contig, encrypt);
1522
1523	print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1524			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1525			     desc_bytes(edesc->hw_desc), 1);
1526
1527	return aead_enqueue_req(jrdev, req);
1528}
1529
1530static int aead_encrypt(struct aead_request *req)
1531{
1532	return aead_crypt(req, true);
1533}
1534
1535static int aead_decrypt(struct aead_request *req)
1536{
1537	return aead_crypt(req, false);
1538}
1539
1540static int aead_do_one_req(struct crypto_engine *engine, void *areq)
1541{
1542	struct aead_request *req = aead_request_cast(areq);
1543	struct caam_ctx *ctx = crypto_aead_ctx_dma(crypto_aead_reqtfm(req));
1544	struct caam_aead_req_ctx *rctx = aead_request_ctx(req);
1545	u32 *desc = rctx->edesc->hw_desc;
1546	int ret;
1547
1548	rctx->edesc->bklog = true;
1549
1550	ret = caam_jr_enqueue(ctx->jrdev, desc, aead_crypt_done, req);
1551
1552	if (ret == -ENOSPC && engine->retry_support)
1553		return ret;
1554
1555	if (ret != -EINPROGRESS) {
1556		aead_unmap(ctx->jrdev, rctx->edesc, req);
1557		kfree(rctx->edesc);
1558	} else {
1559		ret = 0;
 
1560	}
1561
1562	return ret;
1563}
1564
1565static inline int gcm_crypt(struct aead_request *req, bool encrypt)
1566{
1567	struct aead_edesc *edesc;
1568	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1569	struct caam_ctx *ctx = crypto_aead_ctx_dma(aead);
1570	struct device *jrdev = ctx->jrdev;
1571	bool all_contig;
 
 
1572
1573	/* allocate extended descriptor */
1574	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig,
1575				 encrypt);
1576	if (IS_ERR(edesc))
1577		return PTR_ERR(edesc);
1578
1579	/* Create and submit job descriptor */
1580	init_gcm_job(req, edesc, all_contig, encrypt);
 
 
 
 
 
1581
1582	print_hex_dump_debug("aead jobdesc@"__stringify(__LINE__)": ",
1583			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1584			     desc_bytes(edesc->hw_desc), 1);
 
 
 
 
 
1585
1586	return aead_enqueue_req(jrdev, req);
1587}
1588
1589static int gcm_encrypt(struct aead_request *req)
1590{
1591	return gcm_crypt(req, true);
1592}
1593
1594static int gcm_decrypt(struct aead_request *req)
1595{
1596	return gcm_crypt(req, false);
1597}
1598
1599static int ipsec_gcm_encrypt(struct aead_request *req)
1600{
1601	return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_encrypt(req);
1602}
 
 
 
 
 
1603
1604static int ipsec_gcm_decrypt(struct aead_request *req)
1605{
1606	return crypto_ipsec_check_assoclen(req->assoclen) ? : gcm_decrypt(req);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1607}
1608
1609/*
1610 * allocate and map the skcipher extended descriptor for skcipher
1611 */
1612static struct skcipher_edesc *skcipher_edesc_alloc(struct skcipher_request *req,
1613						   int desc_bytes)
 
1614{
1615	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1616	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
1617	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1618	struct device *jrdev = ctx->jrdev;
1619	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1620		       GFP_KERNEL : GFP_ATOMIC;
1621	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1622	struct skcipher_edesc *edesc;
1623	dma_addr_t iv_dma = 0;
1624	u8 *iv;
1625	int ivsize = crypto_skcipher_ivsize(skcipher);
1626	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1627	unsigned int aligned_size;
1628
1629	src_nents = sg_nents_for_len(req->src, req->cryptlen);
1630	if (unlikely(src_nents < 0)) {
1631		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1632			req->cryptlen);
1633		return ERR_PTR(src_nents);
1634	}
1635
1636	if (req->dst != req->src) {
1637		dst_nents = sg_nents_for_len(req->dst, req->cryptlen);
1638		if (unlikely(dst_nents < 0)) {
1639			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1640				req->cryptlen);
1641			return ERR_PTR(dst_nents);
1642		}
1643	}
1644
1645	if (likely(req->src == req->dst)) {
1646		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1647					      DMA_BIDIRECTIONAL);
1648		if (unlikely(!mapped_src_nents)) {
1649			dev_err(jrdev, "unable to map source\n");
1650			return ERR_PTR(-ENOMEM);
1651		}
1652	} else {
1653		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1654					      DMA_TO_DEVICE);
1655		if (unlikely(!mapped_src_nents)) {
1656			dev_err(jrdev, "unable to map source\n");
1657			return ERR_PTR(-ENOMEM);
1658		}
 
1659		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1660					      DMA_FROM_DEVICE);
1661		if (unlikely(!mapped_dst_nents)) {
1662			dev_err(jrdev, "unable to map destination\n");
1663			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1664			return ERR_PTR(-ENOMEM);
1665		}
1666	}
1667
1668	if (!ivsize && mapped_src_nents == 1)
1669		sec4_sg_ents = 0; // no need for an input hw s/g table
1670	else
1671		sec4_sg_ents = mapped_src_nents + !!ivsize;
1672	dst_sg_idx = sec4_sg_ents;
 
 
1673
1674	/*
1675	 * Input, output HW S/G tables: [IV, src][dst, IV]
1676	 * IV entries point to the same buffer
1677	 * If src == dst, S/G entries are reused (S/G tables overlap)
1678	 *
1679	 * HW reads 4 S/G entries at a time; make sure the reads don't go beyond
1680	 * the end of the table by allocating more S/G entries. Logic:
1681	 * if (output S/G)
1682	 *      pad output S/G, if needed
1683	 * else if (input S/G) ...
1684	 *      pad input S/G, if needed
1685	 */
1686	if (ivsize || mapped_dst_nents > 1) {
1687		if (req->src == req->dst)
1688			sec4_sg_ents = !!ivsize + pad_sg_nents(sec4_sg_ents);
1689		else
1690			sec4_sg_ents += pad_sg_nents(mapped_dst_nents +
1691						     !!ivsize);
1692	} else {
1693		sec4_sg_ents = pad_sg_nents(sec4_sg_ents);
 
1694	}
1695
 
1696	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1697
1698	/*
1699	 * allocate space for base edesc and hw desc commands, link tables, IV
1700	 */
1701	aligned_size = sizeof(*edesc) + desc_bytes + sec4_sg_bytes;
1702	aligned_size = ALIGN(aligned_size, dma_get_cache_alignment());
1703	aligned_size += ~(ARCH_KMALLOC_MINALIGN - 1) &
1704			(dma_get_cache_alignment() - 1);
1705	aligned_size += ALIGN(ivsize, dma_get_cache_alignment());
1706	edesc = kzalloc(aligned_size, flags);
1707	if (!edesc) {
1708		dev_err(jrdev, "could not allocate extended descriptor\n");
1709		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1710			   0, 0, 0);
1711		return ERR_PTR(-ENOMEM);
1712	}
1713
1714	edesc->src_nents = src_nents;
1715	edesc->dst_nents = dst_nents;
1716	edesc->mapped_src_nents = mapped_src_nents;
1717	edesc->mapped_dst_nents = mapped_dst_nents;
1718	edesc->sec4_sg_bytes = sec4_sg_bytes;
1719	edesc->sec4_sg = (struct sec4_sg_entry *)((u8 *)edesc->hw_desc +
1720						  desc_bytes);
1721	rctx->edesc = edesc;
1722
1723	/* Make sure IV is located in a DMAable area */
1724	if (ivsize) {
1725		iv = skcipher_edesc_iv(edesc);
1726		memcpy(iv, req->iv, ivsize);
1727
1728		iv_dma = dma_map_single(jrdev, iv, ivsize, DMA_BIDIRECTIONAL);
1729		if (dma_mapping_error(jrdev, iv_dma)) {
1730			dev_err(jrdev, "unable to map IV\n");
1731			caam_unmap(jrdev, req->src, req->dst, src_nents,
1732				   dst_nents, 0, 0, 0, 0);
1733			kfree(edesc);
1734			return ERR_PTR(-ENOMEM);
1735		}
1736
 
1737		dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
 
 
1738	}
1739	if (dst_sg_idx)
1740		sg_to_sec4_sg(req->src, req->cryptlen, edesc->sec4_sg +
1741			      !!ivsize, 0);
1742
1743	if (req->src != req->dst && (ivsize || mapped_dst_nents > 1))
1744		sg_to_sec4_sg(req->dst, req->cryptlen, edesc->sec4_sg +
1745			      dst_sg_idx, 0);
1746
1747	if (ivsize)
1748		dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx +
1749				   mapped_dst_nents, iv_dma, ivsize, 0);
1750
1751	if (ivsize || mapped_dst_nents > 1)
1752		sg_to_sec4_set_last(edesc->sec4_sg + dst_sg_idx +
1753				    mapped_dst_nents - 1 + !!ivsize);
1754
1755	if (sec4_sg_bytes) {
1756		edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1757						    sec4_sg_bytes,
1758						    DMA_TO_DEVICE);
1759		if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1760			dev_err(jrdev, "unable to map S/G table\n");
1761			caam_unmap(jrdev, req->src, req->dst, src_nents,
1762				   dst_nents, iv_dma, ivsize, 0, 0);
1763			kfree(edesc);
1764			return ERR_PTR(-ENOMEM);
1765		}
1766	}
1767
1768	edesc->iv_dma = iv_dma;
1769
1770	print_hex_dump_debug("skcipher sec4_sg@" __stringify(__LINE__)": ",
1771			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1772			     sec4_sg_bytes, 1);
 
 
1773
 
1774	return edesc;
1775}
1776
1777static int skcipher_do_one_req(struct crypto_engine *engine, void *areq)
1778{
1779	struct skcipher_request *req = skcipher_request_cast(areq);
1780	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(crypto_skcipher_reqtfm(req));
1781	struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1782	u32 *desc = rctx->edesc->hw_desc;
1783	int ret;
1784
1785	rctx->edesc->bklog = true;
1786
1787	ret = caam_jr_enqueue(ctx->jrdev, desc, skcipher_crypt_done, req);
 
 
 
 
1788
1789	if (ret == -ENOSPC && engine->retry_support)
1790		return ret;
 
 
 
 
 
 
 
 
1791
1792	if (ret != -EINPROGRESS) {
1793		skcipher_unmap(ctx->jrdev, rctx->edesc, req);
1794		kfree(rctx->edesc);
1795	} else {
1796		ret = 0;
 
1797	}
1798
1799	return ret;
1800}
1801
1802static inline bool xts_skcipher_ivsize(struct skcipher_request *req)
1803{
1804	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1805	unsigned int ivsize = crypto_skcipher_ivsize(skcipher);
 
 
 
 
 
 
 
 
 
 
 
1806
1807	return !!get_unaligned((u64 *)(req->iv + (ivsize / 2)));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1808}
1809
1810static inline int skcipher_crypt(struct skcipher_request *req, bool encrypt)
1811{
1812	struct skcipher_edesc *edesc;
1813	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1814	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(skcipher);
 
 
 
 
 
 
 
1815	struct device *jrdev = ctx->jrdev;
1816	struct caam_drv_private_jr *jrpriv = dev_get_drvdata(jrdev);
1817	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
1818	u32 *desc;
1819	int ret = 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1820
1821	/*
1822	 * XTS is expected to return an error even for input length = 0
1823	 * Note that the case input length < block size will be caught during
1824	 * HW offloading and return an error.
1825	 */
1826	if (!req->cryptlen && !ctx->fallback)
1827		return 0;
 
 
 
 
 
1828
1829	if (ctx->fallback && ((ctrlpriv->era <= 8 && xts_skcipher_ivsize(req)) ||
1830			      ctx->xts_key_fallback)) {
1831		struct caam_skcipher_req_ctx *rctx = skcipher_request_ctx(req);
1832
1833		skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback);
1834		skcipher_request_set_callback(&rctx->fallback_req,
1835					      req->base.flags,
1836					      req->base.complete,
1837					      req->base.data);
1838		skcipher_request_set_crypt(&rctx->fallback_req, req->src,
1839					   req->dst, req->cryptlen, req->iv);
1840
1841		return encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) :
1842				 crypto_skcipher_decrypt(&rctx->fallback_req);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1843	}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1844
1845	/* allocate extended descriptor */
1846	edesc = skcipher_edesc_alloc(req, DESC_JOB_IO_LEN * CAAM_CMD_SZ);
 
1847	if (IS_ERR(edesc))
1848		return PTR_ERR(edesc);
1849
1850	/* Create and submit job descriptor*/
1851	init_skcipher_job(req, edesc, encrypt);
1852
1853	print_hex_dump_debug("skcipher jobdesc@" __stringify(__LINE__)": ",
1854			     DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1855			     desc_bytes(edesc->hw_desc), 1);
1856
 
 
1857	desc = edesc->hw_desc;
1858	/*
1859	 * Only the backlog request are sent to crypto-engine since the others
1860	 * can be handled by CAAM, if free, especially since JR has up to 1024
1861	 * entries (more than the 10 entries from crypto-engine).
1862	 */
1863	if (req->base.flags & CRYPTO_TFM_REQ_MAY_BACKLOG)
1864		ret = crypto_transfer_skcipher_request_to_engine(jrpriv->engine,
1865								 req);
1866	else
1867		ret = caam_jr_enqueue(jrdev, desc, skcipher_crypt_done, req);
1868
1869	if ((ret != -EINPROGRESS) && (ret != -EBUSY)) {
1870		skcipher_unmap(jrdev, edesc, req);
 
 
1871		kfree(edesc);
1872	}
1873
1874	return ret;
1875}
1876
1877static int skcipher_encrypt(struct skcipher_request *req)
1878{
1879	return skcipher_crypt(req, true);
1880}
1881
1882static int skcipher_decrypt(struct skcipher_request *req)
1883{
1884	return skcipher_crypt(req, false);
1885}
 
 
 
 
1886
1887static struct caam_skcipher_alg driver_algs[] = {
 
1888	{
1889		.skcipher.base = {
1890			.base = {
1891				.cra_name = "cbc(aes)",
1892				.cra_driver_name = "cbc-aes-caam",
1893				.cra_blocksize = AES_BLOCK_SIZE,
1894			},
1895			.setkey = aes_skcipher_setkey,
1896			.encrypt = skcipher_encrypt,
1897			.decrypt = skcipher_decrypt,
 
1898			.min_keysize = AES_MIN_KEY_SIZE,
1899			.max_keysize = AES_MAX_KEY_SIZE,
1900			.ivsize = AES_BLOCK_SIZE,
1901		},
1902		.skcipher.op = {
1903			.do_one_request = skcipher_do_one_req,
1904		},
1905		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
1906	},
1907	{
1908		.skcipher.base = {
1909			.base = {
1910				.cra_name = "cbc(des3_ede)",
1911				.cra_driver_name = "cbc-3des-caam",
1912				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1913			},
1914			.setkey = des3_skcipher_setkey,
1915			.encrypt = skcipher_encrypt,
1916			.decrypt = skcipher_decrypt,
 
1917			.min_keysize = DES3_EDE_KEY_SIZE,
1918			.max_keysize = DES3_EDE_KEY_SIZE,
1919			.ivsize = DES3_EDE_BLOCK_SIZE,
1920		},
1921		.skcipher.op = {
1922			.do_one_request = skcipher_do_one_req,
1923		},
1924		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
1925	},
1926	{
1927		.skcipher.base = {
1928			.base = {
1929				.cra_name = "cbc(des)",
1930				.cra_driver_name = "cbc-des-caam",
1931				.cra_blocksize = DES_BLOCK_SIZE,
1932			},
1933			.setkey = des_skcipher_setkey,
1934			.encrypt = skcipher_encrypt,
1935			.decrypt = skcipher_decrypt,
 
1936			.min_keysize = DES_KEY_SIZE,
1937			.max_keysize = DES_KEY_SIZE,
1938			.ivsize = DES_BLOCK_SIZE,
1939		},
1940		.skcipher.op = {
1941			.do_one_request = skcipher_do_one_req,
1942		},
1943		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
1944	},
1945	{
1946		.skcipher.base = {
1947			.base = {
1948				.cra_name = "ctr(aes)",
1949				.cra_driver_name = "ctr-aes-caam",
1950				.cra_blocksize = 1,
1951			},
1952			.setkey = ctr_skcipher_setkey,
1953			.encrypt = skcipher_encrypt,
1954			.decrypt = skcipher_decrypt,
1955			.min_keysize = AES_MIN_KEY_SIZE,
1956			.max_keysize = AES_MAX_KEY_SIZE,
1957			.ivsize = AES_BLOCK_SIZE,
1958			.chunksize = AES_BLOCK_SIZE,
1959		},
1960		.skcipher.op = {
1961			.do_one_request = skcipher_do_one_req,
1962		},
1963		.caam.class1_alg_type = OP_ALG_ALGSEL_AES |
1964					OP_ALG_AAI_CTR_MOD128,
1965	},
1966	{
1967		.skcipher.base = {
1968			.base = {
1969				.cra_name = "rfc3686(ctr(aes))",
1970				.cra_driver_name = "rfc3686-ctr-aes-caam",
1971				.cra_blocksize = 1,
1972			},
1973			.setkey = rfc3686_skcipher_setkey,
1974			.encrypt = skcipher_encrypt,
1975			.decrypt = skcipher_decrypt,
 
1976			.min_keysize = AES_MIN_KEY_SIZE +
1977				       CTR_RFC3686_NONCE_SIZE,
1978			.max_keysize = AES_MAX_KEY_SIZE +
1979				       CTR_RFC3686_NONCE_SIZE,
1980			.ivsize = CTR_RFC3686_IV_SIZE,
1981			.chunksize = AES_BLOCK_SIZE,
1982		},
1983		.skcipher.op = {
1984			.do_one_request = skcipher_do_one_req,
1985		},
1986		.caam = {
1987			.class1_alg_type = OP_ALG_ALGSEL_AES |
1988					   OP_ALG_AAI_CTR_MOD128,
1989			.rfc3686 = true,
1990		},
1991	},
1992	{
1993		.skcipher.base = {
1994			.base = {
1995				.cra_name = "xts(aes)",
1996				.cra_driver_name = "xts-aes-caam",
1997				.cra_flags = CRYPTO_ALG_NEED_FALLBACK,
1998				.cra_blocksize = AES_BLOCK_SIZE,
1999			},
2000			.setkey = xts_skcipher_setkey,
2001			.encrypt = skcipher_encrypt,
2002			.decrypt = skcipher_decrypt,
2003			.min_keysize = 2 * AES_MIN_KEY_SIZE,
2004			.max_keysize = 2 * AES_MAX_KEY_SIZE,
2005			.ivsize = AES_BLOCK_SIZE,
2006		},
2007		.skcipher.op = {
2008			.do_one_request = skcipher_do_one_req,
2009		},
2010		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
2011	},
2012	{
2013		.skcipher.base = {
2014			.base = {
2015				.cra_name = "ecb(des)",
2016				.cra_driver_name = "ecb-des-caam",
2017				.cra_blocksize = DES_BLOCK_SIZE,
2018			},
2019			.setkey = des_skcipher_setkey,
2020			.encrypt = skcipher_encrypt,
2021			.decrypt = skcipher_decrypt,
2022			.min_keysize = DES_KEY_SIZE,
2023			.max_keysize = DES_KEY_SIZE,
2024		},
2025		.skcipher.op = {
2026			.do_one_request = skcipher_do_one_req,
2027		},
2028		.caam.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_ECB,
2029	},
2030	{
2031		.skcipher.base = {
2032			.base = {
2033				.cra_name = "ecb(aes)",
2034				.cra_driver_name = "ecb-aes-caam",
2035				.cra_blocksize = AES_BLOCK_SIZE,
2036			},
2037			.setkey = aes_skcipher_setkey,
2038			.encrypt = skcipher_encrypt,
2039			.decrypt = skcipher_decrypt,
2040			.min_keysize = AES_MIN_KEY_SIZE,
2041			.max_keysize = AES_MAX_KEY_SIZE,
2042		},
2043		.skcipher.op = {
2044			.do_one_request = skcipher_do_one_req,
2045		},
2046		.caam.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_ECB,
2047	},
2048	{
2049		.skcipher.base = {
2050			.base = {
2051				.cra_name = "ecb(des3_ede)",
2052				.cra_driver_name = "ecb-des3-caam",
2053				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2054			},
2055			.setkey = des3_skcipher_setkey,
2056			.encrypt = skcipher_encrypt,
2057			.decrypt = skcipher_decrypt,
2058			.min_keysize = DES3_EDE_KEY_SIZE,
2059			.max_keysize = DES3_EDE_KEY_SIZE,
2060		},
2061		.skcipher.op = {
2062			.do_one_request = skcipher_do_one_req,
2063		},
2064		.caam.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_ECB,
2065	},
2066};
2067
2068static struct caam_aead_alg driver_aeads[] = {
2069	{
2070		.aead.base = {
2071			.base = {
2072				.cra_name = "rfc4106(gcm(aes))",
2073				.cra_driver_name = "rfc4106-gcm-aes-caam",
2074				.cra_blocksize = 1,
2075			},
2076			.setkey = rfc4106_setkey,
2077			.setauthsize = rfc4106_setauthsize,
2078			.encrypt = ipsec_gcm_encrypt,
2079			.decrypt = ipsec_gcm_decrypt,
2080			.ivsize = GCM_RFC4106_IV_SIZE,
2081			.maxauthsize = AES_BLOCK_SIZE,
2082		},
2083		.aead.op = {
2084			.do_one_request = aead_do_one_req,
2085		},
2086		.caam = {
2087			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2088			.nodkp = true,
2089		},
2090	},
2091	{
2092		.aead.base = {
2093			.base = {
2094				.cra_name = "rfc4543(gcm(aes))",
2095				.cra_driver_name = "rfc4543-gcm-aes-caam",
2096				.cra_blocksize = 1,
2097			},
2098			.setkey = rfc4543_setkey,
2099			.setauthsize = rfc4543_setauthsize,
2100			.encrypt = ipsec_gcm_encrypt,
2101			.decrypt = ipsec_gcm_decrypt,
2102			.ivsize = GCM_RFC4543_IV_SIZE,
2103			.maxauthsize = AES_BLOCK_SIZE,
2104		},
2105		.aead.op = {
2106			.do_one_request = aead_do_one_req,
2107		},
2108		.caam = {
2109			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2110			.nodkp = true,
2111		},
2112	},
2113	/* Galois Counter Mode */
2114	{
2115		.aead.base = {
2116			.base = {
2117				.cra_name = "gcm(aes)",
2118				.cra_driver_name = "gcm-aes-caam",
2119				.cra_blocksize = 1,
2120			},
2121			.setkey = gcm_setkey,
2122			.setauthsize = gcm_setauthsize,
2123			.encrypt = gcm_encrypt,
2124			.decrypt = gcm_decrypt,
2125			.ivsize = GCM_AES_IV_SIZE,
2126			.maxauthsize = AES_BLOCK_SIZE,
2127		},
2128		.aead.op = {
2129			.do_one_request = aead_do_one_req,
2130		},
2131		.caam = {
2132			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
2133			.nodkp = true,
2134		},
2135	},
2136	/* single-pass ipsec_esp descriptor */
2137	{
2138		.aead.base = {
2139			.base = {
2140				.cra_name = "authenc(hmac(md5),"
2141					    "ecb(cipher_null))",
2142				.cra_driver_name = "authenc-hmac-md5-"
2143						   "ecb-cipher_null-caam",
2144				.cra_blocksize = NULL_BLOCK_SIZE,
2145			},
2146			.setkey = aead_setkey,
2147			.setauthsize = aead_setauthsize,
2148			.encrypt = aead_encrypt,
2149			.decrypt = aead_decrypt,
2150			.ivsize = NULL_IV_SIZE,
2151			.maxauthsize = MD5_DIGEST_SIZE,
2152		},
2153		.aead.op = {
2154			.do_one_request = aead_do_one_req,
2155		},
2156		.caam = {
2157			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2158					   OP_ALG_AAI_HMAC_PRECOMP,
2159		},
2160	},
2161	{
2162		.aead.base = {
2163			.base = {
2164				.cra_name = "authenc(hmac(sha1),"
2165					    "ecb(cipher_null))",
2166				.cra_driver_name = "authenc-hmac-sha1-"
2167						   "ecb-cipher_null-caam",
2168				.cra_blocksize = NULL_BLOCK_SIZE,
2169			},
2170			.setkey = aead_setkey,
2171			.setauthsize = aead_setauthsize,
2172			.encrypt = aead_encrypt,
2173			.decrypt = aead_decrypt,
2174			.ivsize = NULL_IV_SIZE,
2175			.maxauthsize = SHA1_DIGEST_SIZE,
2176		},
2177		.aead.op = {
2178			.do_one_request = aead_do_one_req,
2179		},
2180		.caam = {
2181			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2182					   OP_ALG_AAI_HMAC_PRECOMP,
2183		},
2184	},
2185	{
2186		.aead.base = {
2187			.base = {
2188				.cra_name = "authenc(hmac(sha224),"
2189					    "ecb(cipher_null))",
2190				.cra_driver_name = "authenc-hmac-sha224-"
2191						   "ecb-cipher_null-caam",
2192				.cra_blocksize = NULL_BLOCK_SIZE,
2193			},
2194			.setkey = aead_setkey,
2195			.setauthsize = aead_setauthsize,
2196			.encrypt = aead_encrypt,
2197			.decrypt = aead_decrypt,
2198			.ivsize = NULL_IV_SIZE,
2199			.maxauthsize = SHA224_DIGEST_SIZE,
2200		},
2201		.aead.op = {
2202			.do_one_request = aead_do_one_req,
2203		},
2204		.caam = {
2205			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2206					   OP_ALG_AAI_HMAC_PRECOMP,
2207		},
2208	},
2209	{
2210		.aead.base = {
2211			.base = {
2212				.cra_name = "authenc(hmac(sha256),"
2213					    "ecb(cipher_null))",
2214				.cra_driver_name = "authenc-hmac-sha256-"
2215						   "ecb-cipher_null-caam",
2216				.cra_blocksize = NULL_BLOCK_SIZE,
2217			},
2218			.setkey = aead_setkey,
2219			.setauthsize = aead_setauthsize,
2220			.encrypt = aead_encrypt,
2221			.decrypt = aead_decrypt,
2222			.ivsize = NULL_IV_SIZE,
2223			.maxauthsize = SHA256_DIGEST_SIZE,
2224		},
2225		.aead.op = {
2226			.do_one_request = aead_do_one_req,
2227		},
2228		.caam = {
2229			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2230					   OP_ALG_AAI_HMAC_PRECOMP,
2231		},
2232	},
2233	{
2234		.aead.base = {
2235			.base = {
2236				.cra_name = "authenc(hmac(sha384),"
2237					    "ecb(cipher_null))",
2238				.cra_driver_name = "authenc-hmac-sha384-"
2239						   "ecb-cipher_null-caam",
2240				.cra_blocksize = NULL_BLOCK_SIZE,
2241			},
2242			.setkey = aead_setkey,
2243			.setauthsize = aead_setauthsize,
2244			.encrypt = aead_encrypt,
2245			.decrypt = aead_decrypt,
2246			.ivsize = NULL_IV_SIZE,
2247			.maxauthsize = SHA384_DIGEST_SIZE,
2248		},
2249		.aead.op = {
2250			.do_one_request = aead_do_one_req,
2251		},
2252		.caam = {
2253			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2254					   OP_ALG_AAI_HMAC_PRECOMP,
2255		},
2256	},
2257	{
2258		.aead.base = {
2259			.base = {
2260				.cra_name = "authenc(hmac(sha512),"
2261					    "ecb(cipher_null))",
2262				.cra_driver_name = "authenc-hmac-sha512-"
2263						   "ecb-cipher_null-caam",
2264				.cra_blocksize = NULL_BLOCK_SIZE,
2265			},
2266			.setkey = aead_setkey,
2267			.setauthsize = aead_setauthsize,
2268			.encrypt = aead_encrypt,
2269			.decrypt = aead_decrypt,
2270			.ivsize = NULL_IV_SIZE,
2271			.maxauthsize = SHA512_DIGEST_SIZE,
2272		},
2273		.aead.op = {
2274			.do_one_request = aead_do_one_req,
2275		},
2276		.caam = {
2277			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2278					   OP_ALG_AAI_HMAC_PRECOMP,
2279		},
2280	},
2281	{
2282		.aead.base = {
2283			.base = {
2284				.cra_name = "authenc(hmac(md5),cbc(aes))",
2285				.cra_driver_name = "authenc-hmac-md5-"
2286						   "cbc-aes-caam",
2287				.cra_blocksize = AES_BLOCK_SIZE,
2288			},
2289			.setkey = aead_setkey,
2290			.setauthsize = aead_setauthsize,
2291			.encrypt = aead_encrypt,
2292			.decrypt = aead_decrypt,
2293			.ivsize = AES_BLOCK_SIZE,
2294			.maxauthsize = MD5_DIGEST_SIZE,
2295		},
2296		.aead.op = {
2297			.do_one_request = aead_do_one_req,
2298		},
2299		.caam = {
2300			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2301			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2302					   OP_ALG_AAI_HMAC_PRECOMP,
2303		},
2304	},
2305	{
2306		.aead.base = {
2307			.base = {
2308				.cra_name = "echainiv(authenc(hmac(md5),"
2309					    "cbc(aes)))",
2310				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2311						   "cbc-aes-caam",
2312				.cra_blocksize = AES_BLOCK_SIZE,
2313			},
2314			.setkey = aead_setkey,
2315			.setauthsize = aead_setauthsize,
2316			.encrypt = aead_encrypt,
2317			.decrypt = aead_decrypt,
2318			.ivsize = AES_BLOCK_SIZE,
2319			.maxauthsize = MD5_DIGEST_SIZE,
2320		},
2321		.aead.op = {
2322			.do_one_request = aead_do_one_req,
2323		},
2324		.caam = {
2325			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2326			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2327					   OP_ALG_AAI_HMAC_PRECOMP,
2328			.geniv = true,
2329		},
2330	},
2331	{
2332		.aead.base = {
2333			.base = {
2334				.cra_name = "authenc(hmac(sha1),cbc(aes))",
2335				.cra_driver_name = "authenc-hmac-sha1-"
2336						   "cbc-aes-caam",
2337				.cra_blocksize = AES_BLOCK_SIZE,
2338			},
2339			.setkey = aead_setkey,
2340			.setauthsize = aead_setauthsize,
2341			.encrypt = aead_encrypt,
2342			.decrypt = aead_decrypt,
2343			.ivsize = AES_BLOCK_SIZE,
2344			.maxauthsize = SHA1_DIGEST_SIZE,
2345		},
2346		.aead.op = {
2347			.do_one_request = aead_do_one_req,
2348		},
2349		.caam = {
2350			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2351			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2352					   OP_ALG_AAI_HMAC_PRECOMP,
2353		},
2354	},
2355	{
2356		.aead.base = {
2357			.base = {
2358				.cra_name = "echainiv(authenc(hmac(sha1),"
2359					    "cbc(aes)))",
2360				.cra_driver_name = "echainiv-authenc-"
2361						   "hmac-sha1-cbc-aes-caam",
2362				.cra_blocksize = AES_BLOCK_SIZE,
2363			},
2364			.setkey = aead_setkey,
2365			.setauthsize = aead_setauthsize,
2366			.encrypt = aead_encrypt,
2367			.decrypt = aead_decrypt,
2368			.ivsize = AES_BLOCK_SIZE,
2369			.maxauthsize = SHA1_DIGEST_SIZE,
2370		},
2371		.aead.op = {
2372			.do_one_request = aead_do_one_req,
2373		},
2374		.caam = {
2375			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2376			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2377					   OP_ALG_AAI_HMAC_PRECOMP,
2378			.geniv = true,
2379		},
2380	},
2381	{
2382		.aead.base = {
2383			.base = {
2384				.cra_name = "authenc(hmac(sha224),cbc(aes))",
2385				.cra_driver_name = "authenc-hmac-sha224-"
2386						   "cbc-aes-caam",
2387				.cra_blocksize = AES_BLOCK_SIZE,
2388			},
2389			.setkey = aead_setkey,
2390			.setauthsize = aead_setauthsize,
2391			.encrypt = aead_encrypt,
2392			.decrypt = aead_decrypt,
2393			.ivsize = AES_BLOCK_SIZE,
2394			.maxauthsize = SHA224_DIGEST_SIZE,
2395		},
2396		.aead.op = {
2397			.do_one_request = aead_do_one_req,
2398		},
2399		.caam = {
2400			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2401			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2402					   OP_ALG_AAI_HMAC_PRECOMP,
2403		},
2404	},
2405	{
2406		.aead.base = {
2407			.base = {
2408				.cra_name = "echainiv(authenc(hmac(sha224),"
2409					    "cbc(aes)))",
2410				.cra_driver_name = "echainiv-authenc-"
2411						   "hmac-sha224-cbc-aes-caam",
2412				.cra_blocksize = AES_BLOCK_SIZE,
2413			},
2414			.setkey = aead_setkey,
2415			.setauthsize = aead_setauthsize,
2416			.encrypt = aead_encrypt,
2417			.decrypt = aead_decrypt,
2418			.ivsize = AES_BLOCK_SIZE,
2419			.maxauthsize = SHA224_DIGEST_SIZE,
2420		},
2421		.aead.op = {
2422			.do_one_request = aead_do_one_req,
2423		},
2424		.caam = {
2425			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2426			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2427					   OP_ALG_AAI_HMAC_PRECOMP,
2428			.geniv = true,
2429		},
2430	},
2431	{
2432		.aead.base = {
2433			.base = {
2434				.cra_name = "authenc(hmac(sha256),cbc(aes))",
2435				.cra_driver_name = "authenc-hmac-sha256-"
2436						   "cbc-aes-caam",
2437				.cra_blocksize = AES_BLOCK_SIZE,
2438			},
2439			.setkey = aead_setkey,
2440			.setauthsize = aead_setauthsize,
2441			.encrypt = aead_encrypt,
2442			.decrypt = aead_decrypt,
2443			.ivsize = AES_BLOCK_SIZE,
2444			.maxauthsize = SHA256_DIGEST_SIZE,
2445		},
2446		.aead.op = {
2447			.do_one_request = aead_do_one_req,
2448		},
2449		.caam = {
2450			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2451			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2452					   OP_ALG_AAI_HMAC_PRECOMP,
2453		},
2454	},
2455	{
2456		.aead.base = {
2457			.base = {
2458				.cra_name = "echainiv(authenc(hmac(sha256),"
2459					    "cbc(aes)))",
2460				.cra_driver_name = "echainiv-authenc-"
2461						   "hmac-sha256-cbc-aes-caam",
2462				.cra_blocksize = AES_BLOCK_SIZE,
2463			},
2464			.setkey = aead_setkey,
2465			.setauthsize = aead_setauthsize,
2466			.encrypt = aead_encrypt,
2467			.decrypt = aead_decrypt,
2468			.ivsize = AES_BLOCK_SIZE,
2469			.maxauthsize = SHA256_DIGEST_SIZE,
2470		},
2471		.aead.op = {
2472			.do_one_request = aead_do_one_req,
2473		},
2474		.caam = {
2475			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2476			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2477					   OP_ALG_AAI_HMAC_PRECOMP,
2478			.geniv = true,
2479		},
2480	},
2481	{
2482		.aead.base = {
2483			.base = {
2484				.cra_name = "authenc(hmac(sha384),cbc(aes))",
2485				.cra_driver_name = "authenc-hmac-sha384-"
2486						   "cbc-aes-caam",
2487				.cra_blocksize = AES_BLOCK_SIZE,
2488			},
2489			.setkey = aead_setkey,
2490			.setauthsize = aead_setauthsize,
2491			.encrypt = aead_encrypt,
2492			.decrypt = aead_decrypt,
2493			.ivsize = AES_BLOCK_SIZE,
2494			.maxauthsize = SHA384_DIGEST_SIZE,
2495		},
2496		.aead.op = {
2497			.do_one_request = aead_do_one_req,
2498		},
2499		.caam = {
2500			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2501			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2502					   OP_ALG_AAI_HMAC_PRECOMP,
2503		},
2504	},
2505	{
2506		.aead.base = {
2507			.base = {
2508				.cra_name = "echainiv(authenc(hmac(sha384),"
2509					    "cbc(aes)))",
2510				.cra_driver_name = "echainiv-authenc-"
2511						   "hmac-sha384-cbc-aes-caam",
2512				.cra_blocksize = AES_BLOCK_SIZE,
2513			},
2514			.setkey = aead_setkey,
2515			.setauthsize = aead_setauthsize,
2516			.encrypt = aead_encrypt,
2517			.decrypt = aead_decrypt,
2518			.ivsize = AES_BLOCK_SIZE,
2519			.maxauthsize = SHA384_DIGEST_SIZE,
2520		},
2521		.aead.op = {
2522			.do_one_request = aead_do_one_req,
2523		},
2524		.caam = {
2525			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2526			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2527					   OP_ALG_AAI_HMAC_PRECOMP,
2528			.geniv = true,
2529		},
2530	},
2531	{
2532		.aead.base = {
2533			.base = {
2534				.cra_name = "authenc(hmac(sha512),cbc(aes))",
2535				.cra_driver_name = "authenc-hmac-sha512-"
2536						   "cbc-aes-caam",
2537				.cra_blocksize = AES_BLOCK_SIZE,
2538			},
2539			.setkey = aead_setkey,
2540			.setauthsize = aead_setauthsize,
2541			.encrypt = aead_encrypt,
2542			.decrypt = aead_decrypt,
2543			.ivsize = AES_BLOCK_SIZE,
2544			.maxauthsize = SHA512_DIGEST_SIZE,
2545		},
2546		.aead.op = {
2547			.do_one_request = aead_do_one_req,
2548		},
2549		.caam = {
2550			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2551			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2552					   OP_ALG_AAI_HMAC_PRECOMP,
2553		},
2554	},
2555	{
2556		.aead.base = {
2557			.base = {
2558				.cra_name = "echainiv(authenc(hmac(sha512),"
2559					    "cbc(aes)))",
2560				.cra_driver_name = "echainiv-authenc-"
2561						   "hmac-sha512-cbc-aes-caam",
2562				.cra_blocksize = AES_BLOCK_SIZE,
2563			},
2564			.setkey = aead_setkey,
2565			.setauthsize = aead_setauthsize,
2566			.encrypt = aead_encrypt,
2567			.decrypt = aead_decrypt,
2568			.ivsize = AES_BLOCK_SIZE,
2569			.maxauthsize = SHA512_DIGEST_SIZE,
2570		},
2571		.aead.op = {
2572			.do_one_request = aead_do_one_req,
2573		},
2574		.caam = {
2575			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2576			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2577					   OP_ALG_AAI_HMAC_PRECOMP,
2578			.geniv = true,
2579		},
2580	},
2581	{
2582		.aead.base = {
2583			.base = {
2584				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2585				.cra_driver_name = "authenc-hmac-md5-"
2586						   "cbc-des3_ede-caam",
2587				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2588			},
2589			.setkey = des3_aead_setkey,
2590			.setauthsize = aead_setauthsize,
2591			.encrypt = aead_encrypt,
2592			.decrypt = aead_decrypt,
2593			.ivsize = DES3_EDE_BLOCK_SIZE,
2594			.maxauthsize = MD5_DIGEST_SIZE,
2595		},
2596		.aead.op = {
2597			.do_one_request = aead_do_one_req,
2598		},
2599		.caam = {
2600			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2601			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2602					   OP_ALG_AAI_HMAC_PRECOMP,
2603		}
2604	},
2605	{
2606		.aead.base = {
2607			.base = {
2608				.cra_name = "echainiv(authenc(hmac(md5),"
2609					    "cbc(des3_ede)))",
2610				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2611						   "cbc-des3_ede-caam",
2612				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2613			},
2614			.setkey = des3_aead_setkey,
2615			.setauthsize = aead_setauthsize,
2616			.encrypt = aead_encrypt,
2617			.decrypt = aead_decrypt,
2618			.ivsize = DES3_EDE_BLOCK_SIZE,
2619			.maxauthsize = MD5_DIGEST_SIZE,
2620		},
2621		.aead.op = {
2622			.do_one_request = aead_do_one_req,
2623		},
2624		.caam = {
2625			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2626			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2627					   OP_ALG_AAI_HMAC_PRECOMP,
2628			.geniv = true,
2629		}
2630	},
2631	{
2632		.aead.base = {
2633			.base = {
2634				.cra_name = "authenc(hmac(sha1),"
2635					    "cbc(des3_ede))",
2636				.cra_driver_name = "authenc-hmac-sha1-"
2637						   "cbc-des3_ede-caam",
2638				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2639			},
2640			.setkey = des3_aead_setkey,
2641			.setauthsize = aead_setauthsize,
2642			.encrypt = aead_encrypt,
2643			.decrypt = aead_decrypt,
2644			.ivsize = DES3_EDE_BLOCK_SIZE,
2645			.maxauthsize = SHA1_DIGEST_SIZE,
2646		},
2647		.aead.op = {
2648			.do_one_request = aead_do_one_req,
2649		},
2650		.caam = {
2651			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2652			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2653					   OP_ALG_AAI_HMAC_PRECOMP,
2654		},
2655	},
2656	{
2657		.aead.base = {
2658			.base = {
2659				.cra_name = "echainiv(authenc(hmac(sha1),"
2660					    "cbc(des3_ede)))",
2661				.cra_driver_name = "echainiv-authenc-"
2662						   "hmac-sha1-"
2663						   "cbc-des3_ede-caam",
2664				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2665			},
2666			.setkey = des3_aead_setkey,
2667			.setauthsize = aead_setauthsize,
2668			.encrypt = aead_encrypt,
2669			.decrypt = aead_decrypt,
2670			.ivsize = DES3_EDE_BLOCK_SIZE,
2671			.maxauthsize = SHA1_DIGEST_SIZE,
2672		},
2673		.aead.op = {
2674			.do_one_request = aead_do_one_req,
2675		},
2676		.caam = {
2677			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2678			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2679					   OP_ALG_AAI_HMAC_PRECOMP,
2680			.geniv = true,
2681		},
2682	},
2683	{
2684		.aead.base = {
2685			.base = {
2686				.cra_name = "authenc(hmac(sha224),"
2687					    "cbc(des3_ede))",
2688				.cra_driver_name = "authenc-hmac-sha224-"
2689						   "cbc-des3_ede-caam",
2690				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2691			},
2692			.setkey = des3_aead_setkey,
2693			.setauthsize = aead_setauthsize,
2694			.encrypt = aead_encrypt,
2695			.decrypt = aead_decrypt,
2696			.ivsize = DES3_EDE_BLOCK_SIZE,
2697			.maxauthsize = SHA224_DIGEST_SIZE,
2698		},
2699		.aead.op = {
2700			.do_one_request = aead_do_one_req,
2701		},
2702		.caam = {
2703			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2704			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2705					   OP_ALG_AAI_HMAC_PRECOMP,
2706		},
2707	},
2708	{
2709		.aead.base = {
2710			.base = {
2711				.cra_name = "echainiv(authenc(hmac(sha224),"
2712					    "cbc(des3_ede)))",
2713				.cra_driver_name = "echainiv-authenc-"
2714						   "hmac-sha224-"
2715						   "cbc-des3_ede-caam",
2716				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2717			},
2718			.setkey = des3_aead_setkey,
2719			.setauthsize = aead_setauthsize,
2720			.encrypt = aead_encrypt,
2721			.decrypt = aead_decrypt,
2722			.ivsize = DES3_EDE_BLOCK_SIZE,
2723			.maxauthsize = SHA224_DIGEST_SIZE,
2724		},
2725		.aead.op = {
2726			.do_one_request = aead_do_one_req,
2727		},
2728		.caam = {
2729			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2730			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2731					   OP_ALG_AAI_HMAC_PRECOMP,
2732			.geniv = true,
2733		},
2734	},
2735	{
2736		.aead.base = {
2737			.base = {
2738				.cra_name = "authenc(hmac(sha256),"
2739					    "cbc(des3_ede))",
2740				.cra_driver_name = "authenc-hmac-sha256-"
2741						   "cbc-des3_ede-caam",
2742				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2743			},
2744			.setkey = des3_aead_setkey,
2745			.setauthsize = aead_setauthsize,
2746			.encrypt = aead_encrypt,
2747			.decrypt = aead_decrypt,
2748			.ivsize = DES3_EDE_BLOCK_SIZE,
2749			.maxauthsize = SHA256_DIGEST_SIZE,
2750		},
2751		.aead.op = {
2752			.do_one_request = aead_do_one_req,
2753		},
2754		.caam = {
2755			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2756			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2757					   OP_ALG_AAI_HMAC_PRECOMP,
2758		},
2759	},
2760	{
2761		.aead.base = {
2762			.base = {
2763				.cra_name = "echainiv(authenc(hmac(sha256),"
2764					    "cbc(des3_ede)))",
2765				.cra_driver_name = "echainiv-authenc-"
2766						   "hmac-sha256-"
2767						   "cbc-des3_ede-caam",
2768				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2769			},
2770			.setkey = des3_aead_setkey,
2771			.setauthsize = aead_setauthsize,
2772			.encrypt = aead_encrypt,
2773			.decrypt = aead_decrypt,
2774			.ivsize = DES3_EDE_BLOCK_SIZE,
2775			.maxauthsize = SHA256_DIGEST_SIZE,
2776		},
2777		.aead.op = {
2778			.do_one_request = aead_do_one_req,
2779		},
2780		.caam = {
2781			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2782			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2783					   OP_ALG_AAI_HMAC_PRECOMP,
2784			.geniv = true,
2785		},
2786	},
2787	{
2788		.aead.base = {
2789			.base = {
2790				.cra_name = "authenc(hmac(sha384),"
2791					    "cbc(des3_ede))",
2792				.cra_driver_name = "authenc-hmac-sha384-"
2793						   "cbc-des3_ede-caam",
2794				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2795			},
2796			.setkey = des3_aead_setkey,
2797			.setauthsize = aead_setauthsize,
2798			.encrypt = aead_encrypt,
2799			.decrypt = aead_decrypt,
2800			.ivsize = DES3_EDE_BLOCK_SIZE,
2801			.maxauthsize = SHA384_DIGEST_SIZE,
2802		},
2803		.aead.op = {
2804			.do_one_request = aead_do_one_req,
2805		},
2806		.caam = {
2807			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2808			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2809					   OP_ALG_AAI_HMAC_PRECOMP,
2810		},
2811	},
2812	{
2813		.aead.base = {
2814			.base = {
2815				.cra_name = "echainiv(authenc(hmac(sha384),"
2816					    "cbc(des3_ede)))",
2817				.cra_driver_name = "echainiv-authenc-"
2818						   "hmac-sha384-"
2819						   "cbc-des3_ede-caam",
2820				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2821			},
2822			.setkey = des3_aead_setkey,
2823			.setauthsize = aead_setauthsize,
2824			.encrypt = aead_encrypt,
2825			.decrypt = aead_decrypt,
2826			.ivsize = DES3_EDE_BLOCK_SIZE,
2827			.maxauthsize = SHA384_DIGEST_SIZE,
2828		},
2829		.aead.op = {
2830			.do_one_request = aead_do_one_req,
2831		},
2832		.caam = {
2833			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2834			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2835					   OP_ALG_AAI_HMAC_PRECOMP,
2836			.geniv = true,
2837		},
2838	},
2839	{
2840		.aead.base = {
2841			.base = {
2842				.cra_name = "authenc(hmac(sha512),"
2843					    "cbc(des3_ede))",
2844				.cra_driver_name = "authenc-hmac-sha512-"
2845						   "cbc-des3_ede-caam",
2846				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2847			},
2848			.setkey = des3_aead_setkey,
2849			.setauthsize = aead_setauthsize,
2850			.encrypt = aead_encrypt,
2851			.decrypt = aead_decrypt,
2852			.ivsize = DES3_EDE_BLOCK_SIZE,
2853			.maxauthsize = SHA512_DIGEST_SIZE,
2854		},
2855		.aead.op = {
2856			.do_one_request = aead_do_one_req,
2857		},
2858		.caam = {
2859			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2860			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2861					   OP_ALG_AAI_HMAC_PRECOMP,
2862		},
2863	},
2864	{
2865		.aead.base = {
2866			.base = {
2867				.cra_name = "echainiv(authenc(hmac(sha512),"
2868					    "cbc(des3_ede)))",
2869				.cra_driver_name = "echainiv-authenc-"
2870						   "hmac-sha512-"
2871						   "cbc-des3_ede-caam",
2872				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2873			},
2874			.setkey = des3_aead_setkey,
2875			.setauthsize = aead_setauthsize,
2876			.encrypt = aead_encrypt,
2877			.decrypt = aead_decrypt,
2878			.ivsize = DES3_EDE_BLOCK_SIZE,
2879			.maxauthsize = SHA512_DIGEST_SIZE,
2880		},
2881		.aead.op = {
2882			.do_one_request = aead_do_one_req,
2883		},
2884		.caam = {
2885			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2886			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2887					   OP_ALG_AAI_HMAC_PRECOMP,
2888			.geniv = true,
2889		},
2890	},
2891	{
2892		.aead.base = {
2893			.base = {
2894				.cra_name = "authenc(hmac(md5),cbc(des))",
2895				.cra_driver_name = "authenc-hmac-md5-"
2896						   "cbc-des-caam",
2897				.cra_blocksize = DES_BLOCK_SIZE,
2898			},
2899			.setkey = aead_setkey,
2900			.setauthsize = aead_setauthsize,
2901			.encrypt = aead_encrypt,
2902			.decrypt = aead_decrypt,
2903			.ivsize = DES_BLOCK_SIZE,
2904			.maxauthsize = MD5_DIGEST_SIZE,
2905		},
2906		.aead.op = {
2907			.do_one_request = aead_do_one_req,
2908		},
2909		.caam = {
2910			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2911			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2912					   OP_ALG_AAI_HMAC_PRECOMP,
2913		},
2914	},
2915	{
2916		.aead.base = {
2917			.base = {
2918				.cra_name = "echainiv(authenc(hmac(md5),"
2919					    "cbc(des)))",
2920				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2921						   "cbc-des-caam",
2922				.cra_blocksize = DES_BLOCK_SIZE,
2923			},
2924			.setkey = aead_setkey,
2925			.setauthsize = aead_setauthsize,
2926			.encrypt = aead_encrypt,
2927			.decrypt = aead_decrypt,
2928			.ivsize = DES_BLOCK_SIZE,
2929			.maxauthsize = MD5_DIGEST_SIZE,
2930		},
2931		.aead.op = {
2932			.do_one_request = aead_do_one_req,
2933		},
2934		.caam = {
2935			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2936			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2937					   OP_ALG_AAI_HMAC_PRECOMP,
2938			.geniv = true,
2939		},
2940	},
2941	{
2942		.aead.base = {
2943			.base = {
2944				.cra_name = "authenc(hmac(sha1),cbc(des))",
2945				.cra_driver_name = "authenc-hmac-sha1-"
2946						   "cbc-des-caam",
2947				.cra_blocksize = DES_BLOCK_SIZE,
2948			},
2949			.setkey = aead_setkey,
2950			.setauthsize = aead_setauthsize,
2951			.encrypt = aead_encrypt,
2952			.decrypt = aead_decrypt,
2953			.ivsize = DES_BLOCK_SIZE,
2954			.maxauthsize = SHA1_DIGEST_SIZE,
2955		},
2956		.aead.op = {
2957			.do_one_request = aead_do_one_req,
2958		},
2959		.caam = {
2960			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2961			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2962					   OP_ALG_AAI_HMAC_PRECOMP,
2963		},
2964	},
2965	{
2966		.aead.base = {
2967			.base = {
2968				.cra_name = "echainiv(authenc(hmac(sha1),"
2969					    "cbc(des)))",
2970				.cra_driver_name = "echainiv-authenc-"
2971						   "hmac-sha1-cbc-des-caam",
2972				.cra_blocksize = DES_BLOCK_SIZE,
2973			},
2974			.setkey = aead_setkey,
2975			.setauthsize = aead_setauthsize,
2976			.encrypt = aead_encrypt,
2977			.decrypt = aead_decrypt,
2978			.ivsize = DES_BLOCK_SIZE,
2979			.maxauthsize = SHA1_DIGEST_SIZE,
2980		},
2981		.aead.op = {
2982			.do_one_request = aead_do_one_req,
2983		},
2984		.caam = {
2985			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2986			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2987					   OP_ALG_AAI_HMAC_PRECOMP,
2988			.geniv = true,
2989		},
2990	},
2991	{
2992		.aead.base = {
2993			.base = {
2994				.cra_name = "authenc(hmac(sha224),cbc(des))",
2995				.cra_driver_name = "authenc-hmac-sha224-"
2996						   "cbc-des-caam",
2997				.cra_blocksize = DES_BLOCK_SIZE,
2998			},
2999			.setkey = aead_setkey,
3000			.setauthsize = aead_setauthsize,
3001			.encrypt = aead_encrypt,
3002			.decrypt = aead_decrypt,
3003			.ivsize = DES_BLOCK_SIZE,
3004			.maxauthsize = SHA224_DIGEST_SIZE,
3005		},
3006		.aead.op = {
3007			.do_one_request = aead_do_one_req,
3008		},
3009		.caam = {
3010			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3011			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3012					   OP_ALG_AAI_HMAC_PRECOMP,
3013		},
3014	},
3015	{
3016		.aead.base = {
3017			.base = {
3018				.cra_name = "echainiv(authenc(hmac(sha224),"
3019					    "cbc(des)))",
3020				.cra_driver_name = "echainiv-authenc-"
3021						   "hmac-sha224-cbc-des-caam",
3022				.cra_blocksize = DES_BLOCK_SIZE,
3023			},
3024			.setkey = aead_setkey,
3025			.setauthsize = aead_setauthsize,
3026			.encrypt = aead_encrypt,
3027			.decrypt = aead_decrypt,
3028			.ivsize = DES_BLOCK_SIZE,
3029			.maxauthsize = SHA224_DIGEST_SIZE,
3030		},
3031		.aead.op = {
3032			.do_one_request = aead_do_one_req,
3033		},
3034		.caam = {
3035			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3036			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3037					   OP_ALG_AAI_HMAC_PRECOMP,
3038			.geniv = true,
3039		},
3040	},
3041	{
3042		.aead.base = {
3043			.base = {
3044				.cra_name = "authenc(hmac(sha256),cbc(des))",
3045				.cra_driver_name = "authenc-hmac-sha256-"
3046						   "cbc-des-caam",
3047				.cra_blocksize = DES_BLOCK_SIZE,
3048			},
3049			.setkey = aead_setkey,
3050			.setauthsize = aead_setauthsize,
3051			.encrypt = aead_encrypt,
3052			.decrypt = aead_decrypt,
3053			.ivsize = DES_BLOCK_SIZE,
3054			.maxauthsize = SHA256_DIGEST_SIZE,
3055		},
3056		.aead.op = {
3057			.do_one_request = aead_do_one_req,
3058		},
3059		.caam = {
3060			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3061			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3062					   OP_ALG_AAI_HMAC_PRECOMP,
3063		},
3064	},
3065	{
3066		.aead.base = {
3067			.base = {
3068				.cra_name = "echainiv(authenc(hmac(sha256),"
3069					    "cbc(des)))",
3070				.cra_driver_name = "echainiv-authenc-"
3071						   "hmac-sha256-cbc-des-caam",
3072				.cra_blocksize = DES_BLOCK_SIZE,
3073			},
3074			.setkey = aead_setkey,
3075			.setauthsize = aead_setauthsize,
3076			.encrypt = aead_encrypt,
3077			.decrypt = aead_decrypt,
3078			.ivsize = DES_BLOCK_SIZE,
3079			.maxauthsize = SHA256_DIGEST_SIZE,
3080		},
3081		.aead.op = {
3082			.do_one_request = aead_do_one_req,
3083		},
3084		.caam = {
3085			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3086			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3087					   OP_ALG_AAI_HMAC_PRECOMP,
3088			.geniv = true,
3089		},
3090	},
3091	{
3092		.aead.base = {
3093			.base = {
3094				.cra_name = "authenc(hmac(sha384),cbc(des))",
3095				.cra_driver_name = "authenc-hmac-sha384-"
3096						   "cbc-des-caam",
3097				.cra_blocksize = DES_BLOCK_SIZE,
3098			},
3099			.setkey = aead_setkey,
3100			.setauthsize = aead_setauthsize,
3101			.encrypt = aead_encrypt,
3102			.decrypt = aead_decrypt,
3103			.ivsize = DES_BLOCK_SIZE,
3104			.maxauthsize = SHA384_DIGEST_SIZE,
3105		},
3106		.aead.op = {
3107			.do_one_request = aead_do_one_req,
3108		},
3109		.caam = {
3110			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3111			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3112					   OP_ALG_AAI_HMAC_PRECOMP,
3113		},
3114	},
3115	{
3116		.aead.base = {
3117			.base = {
3118				.cra_name = "echainiv(authenc(hmac(sha384),"
3119					    "cbc(des)))",
3120				.cra_driver_name = "echainiv-authenc-"
3121						   "hmac-sha384-cbc-des-caam",
3122				.cra_blocksize = DES_BLOCK_SIZE,
3123			},
3124			.setkey = aead_setkey,
3125			.setauthsize = aead_setauthsize,
3126			.encrypt = aead_encrypt,
3127			.decrypt = aead_decrypt,
3128			.ivsize = DES_BLOCK_SIZE,
3129			.maxauthsize = SHA384_DIGEST_SIZE,
3130		},
3131		.aead.op = {
3132			.do_one_request = aead_do_one_req,
3133		},
3134		.caam = {
3135			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3136			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3137					   OP_ALG_AAI_HMAC_PRECOMP,
3138			.geniv = true,
3139		},
3140	},
3141	{
3142		.aead.base = {
3143			.base = {
3144				.cra_name = "authenc(hmac(sha512),cbc(des))",
3145				.cra_driver_name = "authenc-hmac-sha512-"
3146						   "cbc-des-caam",
3147				.cra_blocksize = DES_BLOCK_SIZE,
3148			},
3149			.setkey = aead_setkey,
3150			.setauthsize = aead_setauthsize,
3151			.encrypt = aead_encrypt,
3152			.decrypt = aead_decrypt,
3153			.ivsize = DES_BLOCK_SIZE,
3154			.maxauthsize = SHA512_DIGEST_SIZE,
3155		},
3156		.aead.op = {
3157			.do_one_request = aead_do_one_req,
3158		},
3159		.caam = {
3160			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3161			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3162					   OP_ALG_AAI_HMAC_PRECOMP,
3163		},
3164	},
3165	{
3166		.aead.base = {
3167			.base = {
3168				.cra_name = "echainiv(authenc(hmac(sha512),"
3169					    "cbc(des)))",
3170				.cra_driver_name = "echainiv-authenc-"
3171						   "hmac-sha512-cbc-des-caam",
3172				.cra_blocksize = DES_BLOCK_SIZE,
3173			},
3174			.setkey = aead_setkey,
3175			.setauthsize = aead_setauthsize,
3176			.encrypt = aead_encrypt,
3177			.decrypt = aead_decrypt,
3178			.ivsize = DES_BLOCK_SIZE,
3179			.maxauthsize = SHA512_DIGEST_SIZE,
3180		},
3181		.aead.op = {
3182			.do_one_request = aead_do_one_req,
3183		},
3184		.caam = {
3185			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
3186			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3187					   OP_ALG_AAI_HMAC_PRECOMP,
3188			.geniv = true,
3189		},
3190	},
3191	{
3192		.aead.base = {
3193			.base = {
3194				.cra_name = "authenc(hmac(md5),"
3195					    "rfc3686(ctr(aes)))",
3196				.cra_driver_name = "authenc-hmac-md5-"
3197						   "rfc3686-ctr-aes-caam",
3198				.cra_blocksize = 1,
3199			},
3200			.setkey = aead_setkey,
3201			.setauthsize = aead_setauthsize,
3202			.encrypt = aead_encrypt,
3203			.decrypt = aead_decrypt,
3204			.ivsize = CTR_RFC3686_IV_SIZE,
3205			.maxauthsize = MD5_DIGEST_SIZE,
3206		},
3207		.aead.op = {
3208			.do_one_request = aead_do_one_req,
3209		},
3210		.caam = {
3211			.class1_alg_type = OP_ALG_ALGSEL_AES |
3212					   OP_ALG_AAI_CTR_MOD128,
3213			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
3214					   OP_ALG_AAI_HMAC_PRECOMP,
3215			.rfc3686 = true,
3216		},
3217	},
3218	{
3219		.aead.base = {
3220			.base = {
3221				.cra_name = "seqiv(authenc("
3222					    "hmac(md5),rfc3686(ctr(aes))))",
3223				.cra_driver_name = "seqiv-authenc-hmac-md5-"
3224						   "rfc3686-ctr-aes-caam",
3225				.cra_blocksize = 1,
3226			},
3227			.setkey = aead_setkey,
3228			.setauthsize = aead_setauthsize,
3229			.encrypt = aead_encrypt,
3230			.decrypt = aead_decrypt,
3231			.ivsize = CTR_RFC3686_IV_SIZE,
3232			.maxauthsize = MD5_DIGEST_SIZE,
3233		},
3234		.aead.op = {
3235			.do_one_request = aead_do_one_req,
3236		},
3237		.caam = {
3238			.class1_alg_type = OP_ALG_ALGSEL_AES |
3239					   OP_ALG_AAI_CTR_MOD128,
3240			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
3241					   OP_ALG_AAI_HMAC_PRECOMP,
3242			.rfc3686 = true,
3243			.geniv = true,
3244		},
3245	},
3246	{
3247		.aead.base = {
3248			.base = {
3249				.cra_name = "authenc(hmac(sha1),"
3250					    "rfc3686(ctr(aes)))",
3251				.cra_driver_name = "authenc-hmac-sha1-"
3252						   "rfc3686-ctr-aes-caam",
3253				.cra_blocksize = 1,
3254			},
3255			.setkey = aead_setkey,
3256			.setauthsize = aead_setauthsize,
3257			.encrypt = aead_encrypt,
3258			.decrypt = aead_decrypt,
3259			.ivsize = CTR_RFC3686_IV_SIZE,
3260			.maxauthsize = SHA1_DIGEST_SIZE,
3261		},
3262		.aead.op = {
3263			.do_one_request = aead_do_one_req,
3264		},
3265		.caam = {
3266			.class1_alg_type = OP_ALG_ALGSEL_AES |
3267					   OP_ALG_AAI_CTR_MOD128,
3268			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3269					   OP_ALG_AAI_HMAC_PRECOMP,
3270			.rfc3686 = true,
3271		},
3272	},
3273	{
3274		.aead.base = {
3275			.base = {
3276				.cra_name = "seqiv(authenc("
3277					    "hmac(sha1),rfc3686(ctr(aes))))",
3278				.cra_driver_name = "seqiv-authenc-hmac-sha1-"
3279						   "rfc3686-ctr-aes-caam",
3280				.cra_blocksize = 1,
3281			},
3282			.setkey = aead_setkey,
3283			.setauthsize = aead_setauthsize,
3284			.encrypt = aead_encrypt,
3285			.decrypt = aead_decrypt,
3286			.ivsize = CTR_RFC3686_IV_SIZE,
3287			.maxauthsize = SHA1_DIGEST_SIZE,
3288		},
3289		.aead.op = {
3290			.do_one_request = aead_do_one_req,
3291		},
3292		.caam = {
3293			.class1_alg_type = OP_ALG_ALGSEL_AES |
3294					   OP_ALG_AAI_CTR_MOD128,
3295			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3296					   OP_ALG_AAI_HMAC_PRECOMP,
3297			.rfc3686 = true,
3298			.geniv = true,
3299		},
3300	},
3301	{
3302		.aead.base = {
3303			.base = {
3304				.cra_name = "authenc(hmac(sha224),"
3305					    "rfc3686(ctr(aes)))",
3306				.cra_driver_name = "authenc-hmac-sha224-"
3307						   "rfc3686-ctr-aes-caam",
3308				.cra_blocksize = 1,
3309			},
3310			.setkey = aead_setkey,
3311			.setauthsize = aead_setauthsize,
3312			.encrypt = aead_encrypt,
3313			.decrypt = aead_decrypt,
3314			.ivsize = CTR_RFC3686_IV_SIZE,
3315			.maxauthsize = SHA224_DIGEST_SIZE,
3316		},
3317		.aead.op = {
3318			.do_one_request = aead_do_one_req,
3319		},
3320		.caam = {
3321			.class1_alg_type = OP_ALG_ALGSEL_AES |
3322					   OP_ALG_AAI_CTR_MOD128,
3323			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3324					   OP_ALG_AAI_HMAC_PRECOMP,
3325			.rfc3686 = true,
3326		},
3327	},
3328	{
3329		.aead.base = {
3330			.base = {
3331				.cra_name = "seqiv(authenc("
3332					    "hmac(sha224),rfc3686(ctr(aes))))",
3333				.cra_driver_name = "seqiv-authenc-hmac-sha224-"
3334						   "rfc3686-ctr-aes-caam",
3335				.cra_blocksize = 1,
3336			},
3337			.setkey = aead_setkey,
3338			.setauthsize = aead_setauthsize,
3339			.encrypt = aead_encrypt,
3340			.decrypt = aead_decrypt,
3341			.ivsize = CTR_RFC3686_IV_SIZE,
3342			.maxauthsize = SHA224_DIGEST_SIZE,
3343		},
3344		.aead.op = {
3345			.do_one_request = aead_do_one_req,
3346		},
3347		.caam = {
3348			.class1_alg_type = OP_ALG_ALGSEL_AES |
3349					   OP_ALG_AAI_CTR_MOD128,
3350			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3351					   OP_ALG_AAI_HMAC_PRECOMP,
3352			.rfc3686 = true,
3353			.geniv = true,
3354		},
3355	},
3356	{
3357		.aead.base = {
3358			.base = {
3359				.cra_name = "authenc(hmac(sha256),"
3360					    "rfc3686(ctr(aes)))",
3361				.cra_driver_name = "authenc-hmac-sha256-"
3362						   "rfc3686-ctr-aes-caam",
3363				.cra_blocksize = 1,
3364			},
3365			.setkey = aead_setkey,
3366			.setauthsize = aead_setauthsize,
3367			.encrypt = aead_encrypt,
3368			.decrypt = aead_decrypt,
3369			.ivsize = CTR_RFC3686_IV_SIZE,
3370			.maxauthsize = SHA256_DIGEST_SIZE,
3371		},
3372		.aead.op = {
3373			.do_one_request = aead_do_one_req,
3374		},
3375		.caam = {
3376			.class1_alg_type = OP_ALG_ALGSEL_AES |
3377					   OP_ALG_AAI_CTR_MOD128,
3378			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3379					   OP_ALG_AAI_HMAC_PRECOMP,
3380			.rfc3686 = true,
3381		},
3382	},
3383	{
3384		.aead.base = {
3385			.base = {
3386				.cra_name = "seqiv(authenc(hmac(sha256),"
3387					    "rfc3686(ctr(aes))))",
3388				.cra_driver_name = "seqiv-authenc-hmac-sha256-"
3389						   "rfc3686-ctr-aes-caam",
3390				.cra_blocksize = 1,
3391			},
3392			.setkey = aead_setkey,
3393			.setauthsize = aead_setauthsize,
3394			.encrypt = aead_encrypt,
3395			.decrypt = aead_decrypt,
3396			.ivsize = CTR_RFC3686_IV_SIZE,
3397			.maxauthsize = SHA256_DIGEST_SIZE,
3398		},
3399		.aead.op = {
3400			.do_one_request = aead_do_one_req,
3401		},
3402		.caam = {
3403			.class1_alg_type = OP_ALG_ALGSEL_AES |
3404					   OP_ALG_AAI_CTR_MOD128,
3405			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3406					   OP_ALG_AAI_HMAC_PRECOMP,
3407			.rfc3686 = true,
3408			.geniv = true,
3409		},
3410	},
3411	{
3412		.aead.base = {
3413			.base = {
3414				.cra_name = "authenc(hmac(sha384),"
3415					    "rfc3686(ctr(aes)))",
3416				.cra_driver_name = "authenc-hmac-sha384-"
3417						   "rfc3686-ctr-aes-caam",
3418				.cra_blocksize = 1,
3419			},
3420			.setkey = aead_setkey,
3421			.setauthsize = aead_setauthsize,
3422			.encrypt = aead_encrypt,
3423			.decrypt = aead_decrypt,
3424			.ivsize = CTR_RFC3686_IV_SIZE,
3425			.maxauthsize = SHA384_DIGEST_SIZE,
3426		},
3427		.aead.op = {
3428			.do_one_request = aead_do_one_req,
3429		},
3430		.caam = {
3431			.class1_alg_type = OP_ALG_ALGSEL_AES |
3432					   OP_ALG_AAI_CTR_MOD128,
3433			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3434					   OP_ALG_AAI_HMAC_PRECOMP,
3435			.rfc3686 = true,
3436		},
3437	},
3438	{
3439		.aead.base = {
3440			.base = {
3441				.cra_name = "seqiv(authenc(hmac(sha384),"
3442					    "rfc3686(ctr(aes))))",
3443				.cra_driver_name = "seqiv-authenc-hmac-sha384-"
3444						   "rfc3686-ctr-aes-caam",
3445				.cra_blocksize = 1,
3446			},
3447			.setkey = aead_setkey,
3448			.setauthsize = aead_setauthsize,
3449			.encrypt = aead_encrypt,
3450			.decrypt = aead_decrypt,
3451			.ivsize = CTR_RFC3686_IV_SIZE,
3452			.maxauthsize = SHA384_DIGEST_SIZE,
3453		},
3454		.aead.op = {
3455			.do_one_request = aead_do_one_req,
3456		},
3457		.caam = {
3458			.class1_alg_type = OP_ALG_ALGSEL_AES |
3459					   OP_ALG_AAI_CTR_MOD128,
3460			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3461					   OP_ALG_AAI_HMAC_PRECOMP,
3462			.rfc3686 = true,
3463			.geniv = true,
3464		},
3465	},
3466	{
3467		.aead.base = {
3468			.base = {
3469				.cra_name = "authenc(hmac(sha512),"
3470					    "rfc3686(ctr(aes)))",
3471				.cra_driver_name = "authenc-hmac-sha512-"
3472						   "rfc3686-ctr-aes-caam",
3473				.cra_blocksize = 1,
3474			},
3475			.setkey = aead_setkey,
3476			.setauthsize = aead_setauthsize,
3477			.encrypt = aead_encrypt,
3478			.decrypt = aead_decrypt,
3479			.ivsize = CTR_RFC3686_IV_SIZE,
3480			.maxauthsize = SHA512_DIGEST_SIZE,
3481		},
3482		.aead.op = {
3483			.do_one_request = aead_do_one_req,
3484		},
3485		.caam = {
3486			.class1_alg_type = OP_ALG_ALGSEL_AES |
3487					   OP_ALG_AAI_CTR_MOD128,
3488			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3489					   OP_ALG_AAI_HMAC_PRECOMP,
3490			.rfc3686 = true,
3491		},
3492	},
3493	{
3494		.aead.base = {
3495			.base = {
3496				.cra_name = "seqiv(authenc(hmac(sha512),"
3497					    "rfc3686(ctr(aes))))",
3498				.cra_driver_name = "seqiv-authenc-hmac-sha512-"
3499						   "rfc3686-ctr-aes-caam",
3500				.cra_blocksize = 1,
3501			},
3502			.setkey = aead_setkey,
3503			.setauthsize = aead_setauthsize,
3504			.encrypt = aead_encrypt,
3505			.decrypt = aead_decrypt,
3506			.ivsize = CTR_RFC3686_IV_SIZE,
3507			.maxauthsize = SHA512_DIGEST_SIZE,
3508		},
3509		.aead.op = {
3510			.do_one_request = aead_do_one_req,
3511		},
3512		.caam = {
3513			.class1_alg_type = OP_ALG_ALGSEL_AES |
3514					   OP_ALG_AAI_CTR_MOD128,
3515			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3516					   OP_ALG_AAI_HMAC_PRECOMP,
3517			.rfc3686 = true,
3518			.geniv = true,
3519		},
3520	},
3521	{
3522		.aead.base = {
3523			.base = {
3524				.cra_name = "rfc7539(chacha20,poly1305)",
3525				.cra_driver_name = "rfc7539-chacha20-poly1305-"
3526						   "caam",
3527				.cra_blocksize = 1,
3528			},
3529			.setkey = chachapoly_setkey,
3530			.setauthsize = chachapoly_setauthsize,
3531			.encrypt = chachapoly_encrypt,
3532			.decrypt = chachapoly_decrypt,
3533			.ivsize = CHACHAPOLY_IV_SIZE,
3534			.maxauthsize = POLY1305_DIGEST_SIZE,
3535		},
3536		.aead.op = {
3537			.do_one_request = aead_do_one_req,
3538		},
3539		.caam = {
3540			.class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3541					   OP_ALG_AAI_AEAD,
3542			.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3543					   OP_ALG_AAI_AEAD,
3544			.nodkp = true,
3545		},
3546	},
3547	{
3548		.aead.base = {
3549			.base = {
3550				.cra_name = "rfc7539esp(chacha20,poly1305)",
3551				.cra_driver_name = "rfc7539esp-chacha20-"
3552						   "poly1305-caam",
3553				.cra_blocksize = 1,
3554			},
3555			.setkey = chachapoly_setkey,
3556			.setauthsize = chachapoly_setauthsize,
3557			.encrypt = chachapoly_encrypt,
3558			.decrypt = chachapoly_decrypt,
3559			.ivsize = 8,
3560			.maxauthsize = POLY1305_DIGEST_SIZE,
3561		},
3562		.aead.op = {
3563			.do_one_request = aead_do_one_req,
3564		},
3565		.caam = {
3566			.class1_alg_type = OP_ALG_ALGSEL_CHACHA20 |
3567					   OP_ALG_AAI_AEAD,
3568			.class2_alg_type = OP_ALG_ALGSEL_POLY1305 |
3569					   OP_ALG_AAI_AEAD,
3570			.nodkp = true,
3571		},
3572	},
3573};
3574
3575static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3576			    bool uses_dkp)
3577{
3578	dma_addr_t dma_addr;
3579	struct caam_drv_private *priv;
3580	const size_t sh_desc_enc_offset = offsetof(struct caam_ctx,
3581						   sh_desc_enc);
3582
3583	ctx->jrdev = caam_jr_alloc();
3584	if (IS_ERR(ctx->jrdev)) {
3585		pr_err("Job Ring Device allocation for transform failed\n");
3586		return PTR_ERR(ctx->jrdev);
3587	}
3588
3589	priv = dev_get_drvdata(ctx->jrdev->parent);
3590	if (priv->era >= 6 && uses_dkp)
3591		ctx->dir = DMA_BIDIRECTIONAL;
3592	else
3593		ctx->dir = DMA_TO_DEVICE;
3594
3595	dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3596					offsetof(struct caam_ctx,
3597						 sh_desc_enc_dma) -
3598					sh_desc_enc_offset,
3599					ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3600	if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3601		dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3602		caam_jr_free(ctx->jrdev);
3603		return -ENOMEM;
3604	}
3605
3606	ctx->sh_desc_enc_dma = dma_addr;
3607	ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3608						   sh_desc_dec) -
3609					sh_desc_enc_offset;
3610	ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key) -
3611					sh_desc_enc_offset;
3612
3613	/* copy descriptor header template value */
3614	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3615	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3616
3617	return 0;
3618}
3619
3620static int caam_cra_init(struct crypto_skcipher *tfm)
3621{
3622	struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
3623	struct caam_skcipher_alg *caam_alg =
3624		container_of(alg, typeof(*caam_alg), skcipher.base);
3625	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);
3626	u32 alg_aai = caam_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3627	int ret = 0;
3628
3629	if (alg_aai == OP_ALG_AAI_XTS) {
3630		const char *tfm_name = crypto_tfm_alg_name(&tfm->base);
3631		struct crypto_skcipher *fallback;
3632
3633		fallback = crypto_alloc_skcipher(tfm_name, 0,
3634						 CRYPTO_ALG_NEED_FALLBACK);
3635		if (IS_ERR(fallback)) {
3636			pr_err("Failed to allocate %s fallback: %ld\n",
3637			       tfm_name, PTR_ERR(fallback));
3638			return PTR_ERR(fallback);
3639		}
3640
3641		ctx->fallback = fallback;
3642		crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx) +
3643					    crypto_skcipher_reqsize(fallback));
3644	} else {
3645		crypto_skcipher_set_reqsize(tfm, sizeof(struct caam_skcipher_req_ctx));
3646	}
3647
3648	ret = caam_init_common(ctx, &caam_alg->caam, false);
3649	if (ret && ctx->fallback)
3650		crypto_free_skcipher(ctx->fallback);
3651
3652	return ret;
3653}
3654
3655static int caam_aead_init(struct crypto_aead *tfm)
3656{
3657	struct aead_alg *alg = crypto_aead_alg(tfm);
3658	struct caam_aead_alg *caam_alg =
3659		 container_of(alg, struct caam_aead_alg, aead.base);
3660	struct caam_ctx *ctx = crypto_aead_ctx_dma(tfm);
3661
3662	crypto_aead_set_reqsize(tfm, sizeof(struct caam_aead_req_ctx));
3663
3664	return caam_init_common(ctx, &caam_alg->caam, !caam_alg->caam.nodkp);
3665}
3666
3667static void caam_exit_common(struct caam_ctx *ctx)
3668{
3669	dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3670			       offsetof(struct caam_ctx, sh_desc_enc_dma) -
3671			       offsetof(struct caam_ctx, sh_desc_enc),
3672			       ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3673	caam_jr_free(ctx->jrdev);
3674}
3675
3676static void caam_cra_exit(struct crypto_skcipher *tfm)
3677{
3678	struct caam_ctx *ctx = crypto_skcipher_ctx_dma(tfm);
3679
3680	if (ctx->fallback)
3681		crypto_free_skcipher(ctx->fallback);
3682	caam_exit_common(ctx);
3683}
3684
3685static void caam_aead_exit(struct crypto_aead *tfm)
3686{
3687	caam_exit_common(crypto_aead_ctx_dma(tfm));
3688}
3689
3690void caam_algapi_exit(void)
3691{
 
 
3692	int i;
3693
3694	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3695		struct caam_aead_alg *t_alg = driver_aeads + i;
3696
3697		if (t_alg->registered)
3698			crypto_engine_unregister_aead(&t_alg->aead);
3699	}
3700
3701	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3702		struct caam_skcipher_alg *t_alg = driver_algs + i;
3703
3704		if (t_alg->registered)
3705			crypto_engine_unregister_skcipher(&t_alg->skcipher);
 
 
3706	}
3707}
3708
3709static void caam_skcipher_alg_init(struct caam_skcipher_alg *t_alg)
 
3710{
3711	struct skcipher_alg *alg = &t_alg->skcipher.base;
 
3712
3713	alg->base.cra_module = THIS_MODULE;
3714	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3715	alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding();
3716	alg->base.cra_flags |= (CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
3717			      CRYPTO_ALG_KERN_DRIVER_ONLY);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3718
3719	alg->init = caam_cra_init;
3720	alg->exit = caam_cra_exit;
3721}
3722
3723static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3724{
3725	struct aead_alg *alg = &t_alg->aead.base;
3726
3727	alg->base.cra_module = THIS_MODULE;
3728	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3729	alg->base.cra_ctxsize = sizeof(struct caam_ctx) + crypto_dma_padding();
3730	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_ALLOCATES_MEMORY |
3731			      CRYPTO_ALG_KERN_DRIVER_ONLY;
3732
3733	alg->init = caam_aead_init;
3734	alg->exit = caam_aead_exit;
3735}
3736
3737int caam_algapi_init(struct device *ctrldev)
3738{
3739	struct caam_drv_private *priv = dev_get_drvdata(ctrldev);
 
 
 
3740	int i = 0, err = 0;
3741	u32 aes_vid, aes_inst, des_inst, md_vid, md_inst, ccha_inst, ptha_inst;
3742	unsigned int md_limit = SHA512_DIGEST_SIZE;
3743	bool registered = false, gcm_support;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3744
3745	/*
3746	 * Register crypto algorithms the device supports.
3747	 * First, detect presence and attributes of DES, AES, and MD blocks.
3748	 */
3749	if (priv->era < 10) {
3750		struct caam_perfmon __iomem *perfmon = &priv->jr[0]->perfmon;
3751		u32 cha_vid, cha_inst, aes_rn;
3752
3753		cha_vid = rd_reg32(&perfmon->cha_id_ls);
3754		aes_vid = cha_vid & CHA_ID_LS_AES_MASK;
3755		md_vid = (cha_vid & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3756
3757		cha_inst = rd_reg32(&perfmon->cha_num_ls);
3758		des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >>
3759			   CHA_ID_LS_DES_SHIFT;
3760		aes_inst = cha_inst & CHA_ID_LS_AES_MASK;
3761		md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
3762		ccha_inst = 0;
3763		ptha_inst = 0;
3764
3765		aes_rn = rd_reg32(&perfmon->cha_rev_ls) & CHA_ID_LS_AES_MASK;
3766		gcm_support = !(aes_vid == CHA_VER_VID_AES_LP && aes_rn < 8);
3767	} else {
3768		struct version_regs __iomem *vreg = &priv->jr[0]->vreg;
3769		u32 aesa, mdha;
3770
3771		aesa = rd_reg32(&vreg->aesa);
3772		mdha = rd_reg32(&vreg->mdha);
3773
3774		aes_vid = (aesa & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3775		md_vid = (mdha & CHA_VER_VID_MASK) >> CHA_VER_VID_SHIFT;
3776
3777		des_inst = rd_reg32(&vreg->desa) & CHA_VER_NUM_MASK;
3778		aes_inst = aesa & CHA_VER_NUM_MASK;
3779		md_inst = mdha & CHA_VER_NUM_MASK;
3780		ccha_inst = rd_reg32(&vreg->ccha) & CHA_VER_NUM_MASK;
3781		ptha_inst = rd_reg32(&vreg->ptha) & CHA_VER_NUM_MASK;
3782
3783		gcm_support = aesa & CHA_VER_MISC_AES_GCM;
3784	}
3785
3786	/* If MD is present, limit digest size based on LP256 */
3787	if (md_inst && md_vid  == CHA_VER_VID_MD_LP256)
3788		md_limit = SHA256_DIGEST_SIZE;
3789
3790	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3791		struct caam_skcipher_alg *t_alg = driver_algs + i;
3792		u32 alg_sel = t_alg->caam.class1_alg_type & OP_ALG_ALGSEL_MASK;
 
3793
3794		/* Skip DES algorithms if not supported by device */
3795		if (!des_inst &&
3796		    ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3797		     (alg_sel == OP_ALG_ALGSEL_DES)))
3798				continue;
3799
3800		/* Skip AES algorithms if not supported by device */
3801		if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3802				continue;
3803
3804		/*
3805		 * Check support for AES modes not available
3806		 * on LP devices.
3807		 */
3808		if (aes_vid == CHA_VER_VID_AES_LP &&
3809		    (t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK) ==
3810		    OP_ALG_AAI_XTS)
3811			continue;
3812
3813		caam_skcipher_alg_init(t_alg);
 
 
 
 
 
3814
3815		err = crypto_engine_register_skcipher(&t_alg->skcipher);
3816		if (err) {
3817			pr_warn("%s alg registration failed\n",
3818				t_alg->skcipher.base.base.cra_driver_name);
 
3819			continue;
3820		}
3821
3822		t_alg->registered = true;
3823		registered = true;
3824	}
3825
3826	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3827		struct caam_aead_alg *t_alg = driver_aeads + i;
3828		u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3829				 OP_ALG_ALGSEL_MASK;
3830		u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3831				 OP_ALG_ALGSEL_MASK;
3832		u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3833
3834		/* Skip DES algorithms if not supported by device */
3835		if (!des_inst &&
3836		    ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3837		     (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3838				continue;
3839
3840		/* Skip AES algorithms if not supported by device */
3841		if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3842				continue;
3843
3844		/* Skip CHACHA20 algorithms if not supported by device */
3845		if (c1_alg_sel == OP_ALG_ALGSEL_CHACHA20 && !ccha_inst)
3846			continue;
3847
3848		/* Skip POLY1305 algorithms if not supported by device */
3849		if (c2_alg_sel == OP_ALG_ALGSEL_POLY1305 && !ptha_inst)
3850			continue;
3851
3852		/* Skip GCM algorithms if not supported by device */
3853		if (c1_alg_sel == OP_ALG_ALGSEL_AES &&
3854		    alg_aai == OP_ALG_AAI_GCM && !gcm_support)
3855			continue;
3856
3857		/*
3858		 * Skip algorithms requiring message digests
3859		 * if MD or MD size is not supported by device.
3860		 */
3861		if (is_mdha(c2_alg_sel) &&
3862		    (!md_inst || t_alg->aead.base.maxauthsize > md_limit))
3863			continue;
3864
3865		caam_aead_alg_init(t_alg);
3866
3867		err = crypto_engine_register_aead(&t_alg->aead);
3868		if (err) {
3869			pr_warn("%s alg registration failed\n",
3870				t_alg->aead.base.base.cra_driver_name);
3871			continue;
3872		}
3873
3874		t_alg->registered = true;
3875		registered = true;
3876	}
3877
3878	if (registered)
3879		pr_info("caam algorithms registered in /proc/crypto\n");
3880
3881	return err;
3882}
v4.17
 
   1/*
   2 * caam - Freescale FSL CAAM support for crypto API
   3 *
   4 * Copyright 2008-2011 Freescale Semiconductor, Inc.
   5 * Copyright 2016 NXP
   6 *
   7 * Based on talitos crypto API driver.
   8 *
   9 * relationship of job descriptors to shared descriptors (SteveC Dec 10 2008):
  10 *
  11 * ---------------                     ---------------
  12 * | JobDesc #1  |-------------------->|  ShareDesc  |
  13 * | *(packet 1) |                     |   (PDB)     |
  14 * ---------------      |------------->|  (hashKey)  |
  15 *       .              |              | (cipherKey) |
  16 *       .              |    |-------->| (operation) |
  17 * ---------------      |    |         ---------------
  18 * | JobDesc #2  |------|    |
  19 * | *(packet 2) |           |
  20 * ---------------           |
  21 *       .                   |
  22 *       .                   |
  23 * ---------------           |
  24 * | JobDesc #3  |------------
  25 * | *(packet 3) |
  26 * ---------------
  27 *
  28 * The SharedDesc never changes for a connection unless rekeyed, but
  29 * each packet will likely be in a different place. So all we need
  30 * to know to process the packet is where the input is, where the
  31 * output goes, and what context we want to process with. Context is
  32 * in the SharedDesc, packet references in the JobDesc.
  33 *
  34 * So, a job desc looks like:
  35 *
  36 * ---------------------
  37 * | Header            |
  38 * | ShareDesc Pointer |
  39 * | SEQ_OUT_PTR       |
  40 * | (output buffer)   |
  41 * | (output length)   |
  42 * | SEQ_IN_PTR        |
  43 * | (input buffer)    |
  44 * | (input length)    |
  45 * ---------------------
  46 */
  47
  48#include "compat.h"
  49
  50#include "regs.h"
  51#include "intern.h"
  52#include "desc_constr.h"
  53#include "jr.h"
  54#include "error.h"
  55#include "sg_sw_sec4.h"
  56#include "key_gen.h"
  57#include "caamalg_desc.h"
 
 
 
 
 
 
 
 
 
 
 
 
  58
  59/*
  60 * crypto alg
  61 */
  62#define CAAM_CRA_PRIORITY		3000
  63/* max key is sum of AES_MAX_KEY_SIZE, max split key size */
  64#define CAAM_MAX_KEY_SIZE		(AES_MAX_KEY_SIZE + \
  65					 CTR_RFC3686_NONCE_SIZE + \
  66					 SHA512_DIGEST_SIZE * 2)
  67
  68#define AEAD_DESC_JOB_IO_LEN		(DESC_JOB_IO_LEN + CAAM_CMD_SZ * 2)
  69#define GCM_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
  70					 CAAM_CMD_SZ * 4)
  71#define AUTHENC_DESC_JOB_IO_LEN		(AEAD_DESC_JOB_IO_LEN + \
  72					 CAAM_CMD_SZ * 5)
  73
  74#define DESC_MAX_USED_BYTES		(CAAM_DESC_BYTES_MAX - DESC_JOB_IO_LEN)
 
 
  75#define DESC_MAX_USED_LEN		(DESC_MAX_USED_BYTES / CAAM_CMD_SZ)
  76
  77#ifdef DEBUG
  78/* for print_hex_dumps with line references */
  79#define debug(format, arg...) printk(format, arg)
  80#else
  81#define debug(format, arg...)
  82#endif
  83
  84static struct list_head alg_list;
  85
  86struct caam_alg_entry {
  87	int class1_alg_type;
  88	int class2_alg_type;
  89	bool rfc3686;
  90	bool geniv;
 
  91};
  92
  93struct caam_aead_alg {
  94	struct aead_alg aead;
 
 
 
 
 
 
  95	struct caam_alg_entry caam;
  96	bool registered;
  97};
  98
  99/*
 100 * per-session context
 101 */
 102struct caam_ctx {
 103	u32 sh_desc_enc[DESC_MAX_USED_LEN];
 104	u32 sh_desc_dec[DESC_MAX_USED_LEN];
 105	u32 sh_desc_givenc[DESC_MAX_USED_LEN];
 106	u8 key[CAAM_MAX_KEY_SIZE];
 107	dma_addr_t sh_desc_enc_dma;
 108	dma_addr_t sh_desc_dec_dma;
 109	dma_addr_t sh_desc_givenc_dma;
 110	dma_addr_t key_dma;
 111	enum dma_data_direction dir;
 112	struct device *jrdev;
 113	struct alginfo adata;
 114	struct alginfo cdata;
 115	unsigned int authsize;
 
 
 
 
 
 
 
 
 
 
 
 116};
 117
 118static int aead_null_set_sh_desc(struct crypto_aead *aead)
 119{
 120	struct caam_ctx *ctx = crypto_aead_ctx(aead);
 121	struct device *jrdev = ctx->jrdev;
 122	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 123	u32 *desc;
 124	int rem_bytes = CAAM_DESC_BYTES_MAX - AEAD_DESC_JOB_IO_LEN -
 125			ctx->adata.keylen_pad;
 126
 127	/*
 128	 * Job Descriptor and Shared Descriptors
 129	 * must all fit into the 64-word Descriptor h/w Buffer
 130	 */
 131	if (rem_bytes >= DESC_AEAD_NULL_ENC_LEN) {
 132		ctx->adata.key_inline = true;
 133		ctx->adata.key_virt = ctx->key;
 134	} else {
 135		ctx->adata.key_inline = false;
 136		ctx->adata.key_dma = ctx->key_dma;
 137	}
 138
 139	/* aead_encrypt shared descriptor */
 140	desc = ctx->sh_desc_enc;
 141	cnstr_shdsc_aead_null_encap(desc, &ctx->adata, ctx->authsize,
 142				    ctrlpriv->era);
 143	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 144				   desc_bytes(desc), ctx->dir);
 145
 146	/*
 147	 * Job Descriptor and Shared Descriptors
 148	 * must all fit into the 64-word Descriptor h/w Buffer
 149	 */
 150	if (rem_bytes >= DESC_AEAD_NULL_DEC_LEN) {
 151		ctx->adata.key_inline = true;
 152		ctx->adata.key_virt = ctx->key;
 153	} else {
 154		ctx->adata.key_inline = false;
 155		ctx->adata.key_dma = ctx->key_dma;
 156	}
 157
 158	/* aead_decrypt shared descriptor */
 159	desc = ctx->sh_desc_dec;
 160	cnstr_shdsc_aead_null_decap(desc, &ctx->adata, ctx->authsize,
 161				    ctrlpriv->era);
 162	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 163				   desc_bytes(desc), ctx->dir);
 164
 165	return 0;
 166}
 167
 168static int aead_set_sh_desc(struct crypto_aead *aead)
 169{
 170	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
 171						 struct caam_aead_alg, aead);
 
 172	unsigned int ivsize = crypto_aead_ivsize(aead);
 173	struct caam_ctx *ctx = crypto_aead_ctx(aead);
 174	struct device *jrdev = ctx->jrdev;
 175	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 176	u32 ctx1_iv_off = 0;
 177	u32 *desc, *nonce = NULL;
 178	u32 inl_mask;
 179	unsigned int data_len[2];
 180	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
 181			       OP_ALG_AAI_CTR_MOD128);
 182	const bool is_rfc3686 = alg->caam.rfc3686;
 183
 184	if (!ctx->authsize)
 185		return 0;
 186
 187	/* NULL encryption / decryption */
 188	if (!ctx->cdata.keylen)
 189		return aead_null_set_sh_desc(aead);
 190
 191	/*
 192	 * AES-CTR needs to load IV in CONTEXT1 reg
 193	 * at an offset of 128bits (16bytes)
 194	 * CONTEXT1[255:128] = IV
 195	 */
 196	if (ctr_mode)
 197		ctx1_iv_off = 16;
 198
 199	/*
 200	 * RFC3686 specific:
 201	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
 202	 */
 203	if (is_rfc3686) {
 204		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
 205		nonce = (u32 *)((void *)ctx->key + ctx->adata.keylen_pad +
 206				ctx->cdata.keylen - CTR_RFC3686_NONCE_SIZE);
 207	}
 208
 
 
 
 
 
 
 
 
 
 
 
 
 209	data_len[0] = ctx->adata.keylen_pad;
 210	data_len[1] = ctx->cdata.keylen;
 211
 212	if (alg->caam.geniv)
 213		goto skip_enc;
 214
 215	/*
 216	 * Job Descriptor and Shared Descriptors
 217	 * must all fit into the 64-word Descriptor h/w Buffer
 218	 */
 219	if (desc_inline_query(DESC_AEAD_ENC_LEN +
 220			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
 221			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
 222			      ARRAY_SIZE(data_len)) < 0)
 223		return -EINVAL;
 224
 225	if (inl_mask & 1)
 226		ctx->adata.key_virt = ctx->key;
 227	else
 228		ctx->adata.key_dma = ctx->key_dma;
 229
 230	if (inl_mask & 2)
 231		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
 232	else
 233		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
 234
 235	ctx->adata.key_inline = !!(inl_mask & 1);
 236	ctx->cdata.key_inline = !!(inl_mask & 2);
 237
 238	/* aead_encrypt shared descriptor */
 239	desc = ctx->sh_desc_enc;
 240	cnstr_shdsc_aead_encap(desc, &ctx->cdata, &ctx->adata, ivsize,
 241			       ctx->authsize, is_rfc3686, nonce, ctx1_iv_off,
 242			       false, ctrlpriv->era);
 243	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 244				   desc_bytes(desc), ctx->dir);
 245
 246skip_enc:
 247	/*
 248	 * Job Descriptor and Shared Descriptors
 249	 * must all fit into the 64-word Descriptor h/w Buffer
 250	 */
 251	if (desc_inline_query(DESC_AEAD_DEC_LEN +
 252			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
 253			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
 254			      ARRAY_SIZE(data_len)) < 0)
 255		return -EINVAL;
 256
 257	if (inl_mask & 1)
 258		ctx->adata.key_virt = ctx->key;
 259	else
 260		ctx->adata.key_dma = ctx->key_dma;
 261
 262	if (inl_mask & 2)
 263		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
 264	else
 265		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
 266
 267	ctx->adata.key_inline = !!(inl_mask & 1);
 268	ctx->cdata.key_inline = !!(inl_mask & 2);
 269
 270	/* aead_decrypt shared descriptor */
 271	desc = ctx->sh_desc_dec;
 272	cnstr_shdsc_aead_decap(desc, &ctx->cdata, &ctx->adata, ivsize,
 273			       ctx->authsize, alg->caam.geniv, is_rfc3686,
 274			       nonce, ctx1_iv_off, false, ctrlpriv->era);
 275	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 276				   desc_bytes(desc), ctx->dir);
 277
 278	if (!alg->caam.geniv)
 279		goto skip_givenc;
 280
 281	/*
 282	 * Job Descriptor and Shared Descriptors
 283	 * must all fit into the 64-word Descriptor h/w Buffer
 284	 */
 285	if (desc_inline_query(DESC_AEAD_GIVENC_LEN +
 286			      (is_rfc3686 ? DESC_AEAD_CTR_RFC3686_LEN : 0),
 287			      AUTHENC_DESC_JOB_IO_LEN, data_len, &inl_mask,
 288			      ARRAY_SIZE(data_len)) < 0)
 289		return -EINVAL;
 290
 291	if (inl_mask & 1)
 292		ctx->adata.key_virt = ctx->key;
 293	else
 294		ctx->adata.key_dma = ctx->key_dma;
 295
 296	if (inl_mask & 2)
 297		ctx->cdata.key_virt = ctx->key + ctx->adata.keylen_pad;
 298	else
 299		ctx->cdata.key_dma = ctx->key_dma + ctx->adata.keylen_pad;
 300
 301	ctx->adata.key_inline = !!(inl_mask & 1);
 302	ctx->cdata.key_inline = !!(inl_mask & 2);
 303
 304	/* aead_givencrypt shared descriptor */
 305	desc = ctx->sh_desc_enc;
 306	cnstr_shdsc_aead_givencap(desc, &ctx->cdata, &ctx->adata, ivsize,
 307				  ctx->authsize, is_rfc3686, nonce,
 308				  ctx1_iv_off, false, ctrlpriv->era);
 309	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 310				   desc_bytes(desc), ctx->dir);
 311
 312skip_givenc:
 313	return 0;
 314}
 315
 316static int aead_setauthsize(struct crypto_aead *authenc,
 317				    unsigned int authsize)
 318{
 319	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
 320
 321	ctx->authsize = authsize;
 322	aead_set_sh_desc(authenc);
 323
 324	return 0;
 325}
 326
 327static int gcm_set_sh_desc(struct crypto_aead *aead)
 328{
 329	struct caam_ctx *ctx = crypto_aead_ctx(aead);
 330	struct device *jrdev = ctx->jrdev;
 331	unsigned int ivsize = crypto_aead_ivsize(aead);
 332	u32 *desc;
 333	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
 334			ctx->cdata.keylen;
 335
 336	if (!ctx->cdata.keylen || !ctx->authsize)
 337		return 0;
 338
 339	/*
 340	 * AES GCM encrypt shared descriptor
 341	 * Job Descriptor and Shared Descriptor
 342	 * must fit into the 64-word Descriptor h/w Buffer
 343	 */
 344	if (rem_bytes >= DESC_GCM_ENC_LEN) {
 345		ctx->cdata.key_inline = true;
 346		ctx->cdata.key_virt = ctx->key;
 347	} else {
 348		ctx->cdata.key_inline = false;
 349		ctx->cdata.key_dma = ctx->key_dma;
 350	}
 351
 352	desc = ctx->sh_desc_enc;
 353	cnstr_shdsc_gcm_encap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
 354	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 355				   desc_bytes(desc), ctx->dir);
 356
 357	/*
 358	 * Job Descriptor and Shared Descriptors
 359	 * must all fit into the 64-word Descriptor h/w Buffer
 360	 */
 361	if (rem_bytes >= DESC_GCM_DEC_LEN) {
 362		ctx->cdata.key_inline = true;
 363		ctx->cdata.key_virt = ctx->key;
 364	} else {
 365		ctx->cdata.key_inline = false;
 366		ctx->cdata.key_dma = ctx->key_dma;
 367	}
 368
 369	desc = ctx->sh_desc_dec;
 370	cnstr_shdsc_gcm_decap(desc, &ctx->cdata, ivsize, ctx->authsize, false);
 371	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 372				   desc_bytes(desc), ctx->dir);
 373
 374	return 0;
 375}
 376
 377static int gcm_setauthsize(struct crypto_aead *authenc, unsigned int authsize)
 378{
 379	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
 
 
 
 
 
 380
 381	ctx->authsize = authsize;
 382	gcm_set_sh_desc(authenc);
 383
 384	return 0;
 385}
 386
 387static int rfc4106_set_sh_desc(struct crypto_aead *aead)
 388{
 389	struct caam_ctx *ctx = crypto_aead_ctx(aead);
 390	struct device *jrdev = ctx->jrdev;
 391	unsigned int ivsize = crypto_aead_ivsize(aead);
 392	u32 *desc;
 393	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
 394			ctx->cdata.keylen;
 395
 396	if (!ctx->cdata.keylen || !ctx->authsize)
 397		return 0;
 398
 399	/*
 400	 * RFC4106 encrypt shared descriptor
 401	 * Job Descriptor and Shared Descriptor
 402	 * must fit into the 64-word Descriptor h/w Buffer
 403	 */
 404	if (rem_bytes >= DESC_RFC4106_ENC_LEN) {
 405		ctx->cdata.key_inline = true;
 406		ctx->cdata.key_virt = ctx->key;
 407	} else {
 408		ctx->cdata.key_inline = false;
 409		ctx->cdata.key_dma = ctx->key_dma;
 410	}
 411
 412	desc = ctx->sh_desc_enc;
 413	cnstr_shdsc_rfc4106_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
 414				  false);
 415	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 416				   desc_bytes(desc), ctx->dir);
 417
 418	/*
 419	 * Job Descriptor and Shared Descriptors
 420	 * must all fit into the 64-word Descriptor h/w Buffer
 421	 */
 422	if (rem_bytes >= DESC_RFC4106_DEC_LEN) {
 423		ctx->cdata.key_inline = true;
 424		ctx->cdata.key_virt = ctx->key;
 425	} else {
 426		ctx->cdata.key_inline = false;
 427		ctx->cdata.key_dma = ctx->key_dma;
 428	}
 429
 430	desc = ctx->sh_desc_dec;
 431	cnstr_shdsc_rfc4106_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
 432				  false);
 433	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 434				   desc_bytes(desc), ctx->dir);
 435
 436	return 0;
 437}
 438
 439static int rfc4106_setauthsize(struct crypto_aead *authenc,
 440			       unsigned int authsize)
 441{
 442	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
 
 
 
 
 
 443
 444	ctx->authsize = authsize;
 445	rfc4106_set_sh_desc(authenc);
 446
 447	return 0;
 448}
 449
 450static int rfc4543_set_sh_desc(struct crypto_aead *aead)
 451{
 452	struct caam_ctx *ctx = crypto_aead_ctx(aead);
 453	struct device *jrdev = ctx->jrdev;
 454	unsigned int ivsize = crypto_aead_ivsize(aead);
 455	u32 *desc;
 456	int rem_bytes = CAAM_DESC_BYTES_MAX - GCM_DESC_JOB_IO_LEN -
 457			ctx->cdata.keylen;
 458
 459	if (!ctx->cdata.keylen || !ctx->authsize)
 460		return 0;
 461
 462	/*
 463	 * RFC4543 encrypt shared descriptor
 464	 * Job Descriptor and Shared Descriptor
 465	 * must fit into the 64-word Descriptor h/w Buffer
 466	 */
 467	if (rem_bytes >= DESC_RFC4543_ENC_LEN) {
 468		ctx->cdata.key_inline = true;
 469		ctx->cdata.key_virt = ctx->key;
 470	} else {
 471		ctx->cdata.key_inline = false;
 472		ctx->cdata.key_dma = ctx->key_dma;
 473	}
 474
 475	desc = ctx->sh_desc_enc;
 476	cnstr_shdsc_rfc4543_encap(desc, &ctx->cdata, ivsize, ctx->authsize,
 477				  false);
 478	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 479				   desc_bytes(desc), ctx->dir);
 480
 481	/*
 482	 * Job Descriptor and Shared Descriptors
 483	 * must all fit into the 64-word Descriptor h/w Buffer
 484	 */
 485	if (rem_bytes >= DESC_RFC4543_DEC_LEN) {
 486		ctx->cdata.key_inline = true;
 487		ctx->cdata.key_virt = ctx->key;
 488	} else {
 489		ctx->cdata.key_inline = false;
 490		ctx->cdata.key_dma = ctx->key_dma;
 491	}
 492
 493	desc = ctx->sh_desc_dec;
 494	cnstr_shdsc_rfc4543_decap(desc, &ctx->cdata, ivsize, ctx->authsize,
 495				  false);
 496	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 497				   desc_bytes(desc), ctx->dir);
 498
 499	return 0;
 500}
 501
 502static int rfc4543_setauthsize(struct crypto_aead *authenc,
 503			       unsigned int authsize)
 504{
 505	struct caam_ctx *ctx = crypto_aead_ctx(authenc);
 
 
 
 506
 507	ctx->authsize = authsize;
 508	rfc4543_set_sh_desc(authenc);
 509
 510	return 0;
 511}
 512
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 513static int aead_setkey(struct crypto_aead *aead,
 514			       const u8 *key, unsigned int keylen)
 515{
 516	struct caam_ctx *ctx = crypto_aead_ctx(aead);
 517	struct device *jrdev = ctx->jrdev;
 518	struct caam_drv_private *ctrlpriv = dev_get_drvdata(jrdev->parent);
 519	struct crypto_authenc_keys keys;
 520	int ret = 0;
 521
 522	if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
 523		goto badkey;
 524
 525#ifdef DEBUG
 526	printk(KERN_ERR "keylen %d enckeylen %d authkeylen %d\n",
 527	       keys.authkeylen + keys.enckeylen, keys.enckeylen,
 528	       keys.authkeylen);
 529	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
 530		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
 531#endif
 532
 533	/*
 534	 * If DKP is supported, use it in the shared descriptor to generate
 535	 * the split key.
 536	 */
 537	if (ctrlpriv->era >= 6) {
 538		ctx->adata.keylen = keys.authkeylen;
 539		ctx->adata.keylen_pad = split_key_len(ctx->adata.algtype &
 540						      OP_ALG_ALGSEL_MASK);
 541
 542		if (ctx->adata.keylen_pad + keys.enckeylen > CAAM_MAX_KEY_SIZE)
 543			goto badkey;
 544
 545		memcpy(ctx->key, keys.authkey, keys.authkeylen);
 546		memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey,
 547		       keys.enckeylen);
 548		dma_sync_single_for_device(jrdev, ctx->key_dma,
 549					   ctx->adata.keylen_pad +
 550					   keys.enckeylen, ctx->dir);
 551		goto skip_split_key;
 552	}
 553
 554	ret = gen_split_key(ctx->jrdev, ctx->key, &ctx->adata, keys.authkey,
 555			    keys.authkeylen, CAAM_MAX_KEY_SIZE -
 556			    keys.enckeylen);
 557	if (ret) {
 558		goto badkey;
 559	}
 560
 561	/* postpend encryption key to auth split key */
 562	memcpy(ctx->key + ctx->adata.keylen_pad, keys.enckey, keys.enckeylen);
 563	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->adata.keylen_pad +
 564				   keys.enckeylen, ctx->dir);
 565#ifdef DEBUG
 566	print_hex_dump(KERN_ERR, "ctx.key@"__stringify(__LINE__)": ",
 567		       DUMP_PREFIX_ADDRESS, 16, 4, ctx->key,
 568		       ctx->adata.keylen_pad + keys.enckeylen, 1);
 569#endif
 570
 571skip_split_key:
 572	ctx->cdata.keylen = keys.enckeylen;
 573	memzero_explicit(&keys, sizeof(keys));
 574	return aead_set_sh_desc(aead);
 575badkey:
 576	crypto_aead_set_flags(aead, CRYPTO_TFM_RES_BAD_KEY_LEN);
 577	memzero_explicit(&keys, sizeof(keys));
 578	return -EINVAL;
 579}
 580
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 581static int gcm_setkey(struct crypto_aead *aead,
 582		      const u8 *key, unsigned int keylen)
 583{
 584	struct caam_ctx *ctx = crypto_aead_ctx(aead);
 585	struct device *jrdev = ctx->jrdev;
 
 586
 587#ifdef DEBUG
 588	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
 589		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
 590#endif
 
 
 591
 592	memcpy(ctx->key, key, keylen);
 593	dma_sync_single_for_device(jrdev, ctx->key_dma, keylen, ctx->dir);
 594	ctx->cdata.keylen = keylen;
 595
 596	return gcm_set_sh_desc(aead);
 597}
 598
 599static int rfc4106_setkey(struct crypto_aead *aead,
 600			  const u8 *key, unsigned int keylen)
 601{
 602	struct caam_ctx *ctx = crypto_aead_ctx(aead);
 603	struct device *jrdev = ctx->jrdev;
 
 604
 605	if (keylen < 4)
 606		return -EINVAL;
 
 607
 608#ifdef DEBUG
 609	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
 610		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
 611#endif
 612
 613	memcpy(ctx->key, key, keylen);
 614
 615	/*
 616	 * The last four bytes of the key material are used as the salt value
 617	 * in the nonce. Update the AES key length.
 618	 */
 619	ctx->cdata.keylen = keylen - 4;
 620	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
 621				   ctx->dir);
 622	return rfc4106_set_sh_desc(aead);
 623}
 624
 625static int rfc4543_setkey(struct crypto_aead *aead,
 626			  const u8 *key, unsigned int keylen)
 627{
 628	struct caam_ctx *ctx = crypto_aead_ctx(aead);
 629	struct device *jrdev = ctx->jrdev;
 
 630
 631	if (keylen < 4)
 632		return -EINVAL;
 
 633
 634#ifdef DEBUG
 635	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
 636		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
 637#endif
 638
 639	memcpy(ctx->key, key, keylen);
 640
 641	/*
 642	 * The last four bytes of the key material are used as the salt value
 643	 * in the nonce. Update the AES key length.
 644	 */
 645	ctx->cdata.keylen = keylen - 4;
 646	dma_sync_single_for_device(jrdev, ctx->key_dma, ctx->cdata.keylen,
 647				   ctx->dir);
 648	return rfc4543_set_sh_desc(aead);
 649}
 650
 651static int ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
 652			     const u8 *key, unsigned int keylen)
 653{
 654	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
 655	struct crypto_tfm *tfm = crypto_ablkcipher_tfm(ablkcipher);
 656	const char *alg_name = crypto_tfm_alg_name(tfm);
 
 657	struct device *jrdev = ctx->jrdev;
 658	unsigned int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
 659	u32 *desc;
 660	u32 ctx1_iv_off = 0;
 661	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
 662			       OP_ALG_AAI_CTR_MOD128);
 663	const bool is_rfc3686 = (ctr_mode &&
 664				 (strstr(alg_name, "rfc3686") != NULL));
 665
 666#ifdef DEBUG
 667	print_hex_dump(KERN_ERR, "key in @"__stringify(__LINE__)": ",
 668		       DUMP_PREFIX_ADDRESS, 16, 4, key, keylen, 1);
 669#endif
 670	/*
 671	 * AES-CTR needs to load IV in CONTEXT1 reg
 672	 * at an offset of 128bits (16bytes)
 673	 * CONTEXT1[255:128] = IV
 674	 */
 675	if (ctr_mode)
 676		ctx1_iv_off = 16;
 677
 678	/*
 679	 * RFC3686 specific:
 680	 *	| CONTEXT1[255:128] = {NONCE, IV, COUNTER}
 681	 *	| *key = {KEY, NONCE}
 682	 */
 683	if (is_rfc3686) {
 684		ctx1_iv_off = 16 + CTR_RFC3686_NONCE_SIZE;
 685		keylen -= CTR_RFC3686_NONCE_SIZE;
 686	}
 687
 688	ctx->cdata.keylen = keylen;
 689	ctx->cdata.key_virt = key;
 690	ctx->cdata.key_inline = true;
 691
 692	/* ablkcipher_encrypt shared descriptor */
 693	desc = ctx->sh_desc_enc;
 694	cnstr_shdsc_ablkcipher_encap(desc, &ctx->cdata, ivsize, is_rfc3686,
 695				     ctx1_iv_off);
 696	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 697				   desc_bytes(desc), ctx->dir);
 698
 699	/* ablkcipher_decrypt shared descriptor */
 700	desc = ctx->sh_desc_dec;
 701	cnstr_shdsc_ablkcipher_decap(desc, &ctx->cdata, ivsize, is_rfc3686,
 702				     ctx1_iv_off);
 703	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 704				   desc_bytes(desc), ctx->dir);
 705
 706	/* ablkcipher_givencrypt shared descriptor */
 707	desc = ctx->sh_desc_givenc;
 708	cnstr_shdsc_ablkcipher_givencap(desc, &ctx->cdata, ivsize, is_rfc3686,
 709					ctx1_iv_off);
 710	dma_sync_single_for_device(jrdev, ctx->sh_desc_givenc_dma,
 711				   desc_bytes(desc), ctx->dir);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 712
 713	return 0;
 
 
 
 
 714}
 715
 716static int xts_ablkcipher_setkey(struct crypto_ablkcipher *ablkcipher,
 717				 const u8 *key, unsigned int keylen)
 718{
 719	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
 720	struct device *jrdev = ctx->jrdev;
 
 721	u32 *desc;
 
 
 
 
 
 
 
 722
 723	if (keylen != 2 * AES_MIN_KEY_SIZE  && keylen != 2 * AES_MAX_KEY_SIZE) {
 724		crypto_ablkcipher_set_flags(ablkcipher,
 725					    CRYPTO_TFM_RES_BAD_KEY_LEN);
 726		dev_err(jrdev, "key size mismatch\n");
 727		return -EINVAL;
 
 
 728	}
 729
 730	ctx->cdata.keylen = keylen;
 731	ctx->cdata.key_virt = key;
 732	ctx->cdata.key_inline = true;
 733
 734	/* xts_ablkcipher_encrypt shared descriptor */
 735	desc = ctx->sh_desc_enc;
 736	cnstr_shdsc_xts_ablkcipher_encap(desc, &ctx->cdata);
 737	dma_sync_single_for_device(jrdev, ctx->sh_desc_enc_dma,
 738				   desc_bytes(desc), ctx->dir);
 739
 740	/* xts_ablkcipher_decrypt shared descriptor */
 741	desc = ctx->sh_desc_dec;
 742	cnstr_shdsc_xts_ablkcipher_decap(desc, &ctx->cdata);
 743	dma_sync_single_for_device(jrdev, ctx->sh_desc_dec_dma,
 744				   desc_bytes(desc), ctx->dir);
 745
 746	return 0;
 747}
 748
 749/*
 750 * aead_edesc - s/w-extended aead descriptor
 751 * @src_nents: number of segments in input s/w scatterlist
 752 * @dst_nents: number of segments in output s/w scatterlist
 
 
 753 * @sec4_sg_bytes: length of dma mapped sec4_sg space
 
 754 * @sec4_sg_dma: bus physical mapped address of h/w link table
 755 * @sec4_sg: pointer to h/w link table
 756 * @hw_desc: the h/w job descriptor followed by any referenced link tables
 757 */
 758struct aead_edesc {
 759	int src_nents;
 760	int dst_nents;
 
 
 761	int sec4_sg_bytes;
 
 762	dma_addr_t sec4_sg_dma;
 763	struct sec4_sg_entry *sec4_sg;
 764	u32 hw_desc[];
 765};
 766
 767/*
 768 * ablkcipher_edesc - s/w-extended ablkcipher descriptor
 769 * @src_nents: number of segments in input s/w scatterlist
 770 * @dst_nents: number of segments in output s/w scatterlist
 
 
 771 * @iv_dma: dma address of iv for checking continuity and link table
 772 * @sec4_sg_bytes: length of dma mapped sec4_sg space
 
 773 * @sec4_sg_dma: bus physical mapped address of h/w link table
 774 * @sec4_sg: pointer to h/w link table
 775 * @hw_desc: the h/w job descriptor followed by any referenced link tables
 
 776 */
 777struct ablkcipher_edesc {
 778	int src_nents;
 779	int dst_nents;
 
 
 780	dma_addr_t iv_dma;
 781	int sec4_sg_bytes;
 
 782	dma_addr_t sec4_sg_dma;
 783	struct sec4_sg_entry *sec4_sg;
 784	u32 hw_desc[0];
 785};
 786
 787static void caam_unmap(struct device *dev, struct scatterlist *src,
 788		       struct scatterlist *dst, int src_nents,
 789		       int dst_nents,
 790		       dma_addr_t iv_dma, int ivsize, dma_addr_t sec4_sg_dma,
 791		       int sec4_sg_bytes)
 792{
 793	if (dst != src) {
 794		if (src_nents)
 795			dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
 796		dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
 
 797	} else {
 798		dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
 799	}
 800
 801	if (iv_dma)
 802		dma_unmap_single(dev, iv_dma, ivsize, DMA_TO_DEVICE);
 803	if (sec4_sg_bytes)
 804		dma_unmap_single(dev, sec4_sg_dma, sec4_sg_bytes,
 805				 DMA_TO_DEVICE);
 806}
 807
 808static void aead_unmap(struct device *dev,
 809		       struct aead_edesc *edesc,
 810		       struct aead_request *req)
 811{
 812	caam_unmap(dev, req->src, req->dst,
 813		   edesc->src_nents, edesc->dst_nents, 0, 0,
 814		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
 815}
 816
 817static void ablkcipher_unmap(struct device *dev,
 818			     struct ablkcipher_edesc *edesc,
 819			     struct ablkcipher_request *req)
 820{
 821	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
 822	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
 823
 824	caam_unmap(dev, req->src, req->dst,
 825		   edesc->src_nents, edesc->dst_nents,
 826		   edesc->iv_dma, ivsize,
 827		   edesc->sec4_sg_dma, edesc->sec4_sg_bytes);
 828}
 829
 830static void aead_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
 831				   void *context)
 832{
 833	struct aead_request *req = context;
 
 
 834	struct aead_edesc *edesc;
 
 
 835
 836#ifdef DEBUG
 837	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
 838#endif
 839
 840	edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
 
 841
 842	if (err)
 843		caam_jr_strstatus(jrdev, err);
 844
 845	aead_unmap(jrdev, edesc, req);
 846
 847	kfree(edesc);
 848
 849	aead_request_complete(req, err);
 
 
 
 
 
 
 
 850}
 851
 852static void aead_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
 853				   void *context)
 854{
 855	struct aead_request *req = context;
 856	struct aead_edesc *edesc;
 857
 858#ifdef DEBUG
 859	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
 860#endif
 861
 862	edesc = container_of(desc, struct aead_edesc, hw_desc[0]);
 863
 864	if (err)
 865		caam_jr_strstatus(jrdev, err);
 866
 867	aead_unmap(jrdev, edesc, req);
 868
 869	/*
 870	 * verify hw auth check passed else return -EBADMSG
 871	 */
 872	if ((err & JRSTA_CCBERR_ERRID_MASK) == JRSTA_CCBERR_ERRID_ICVCHK)
 873		err = -EBADMSG;
 874
 875	kfree(edesc);
 876
 877	aead_request_complete(req, err);
 
 878}
 879
 880static void ablkcipher_encrypt_done(struct device *jrdev, u32 *desc, u32 err,
 881				   void *context)
 882{
 883	struct ablkcipher_request *req = context;
 884	struct ablkcipher_edesc *edesc;
 885	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
 886	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
 
 
 
 
 887
 888#ifdef DEBUG
 889	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
 890#endif
 891
 892	edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
 893
 
 
 894	if (err)
 895		caam_jr_strstatus(jrdev, err);
 896
 897#ifdef DEBUG
 898	print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
 899		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
 900		       edesc->src_nents > 1 ? 100 : ivsize, 1);
 901#endif
 902	caam_dump_sg(KERN_ERR, "dst    @" __stringify(__LINE__)": ",
 903		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
 904		     edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
 905
 906	ablkcipher_unmap(jrdev, edesc, req);
 907
 908	/*
 909	 * The crypto API expects us to set the IV (req->info) to the last
 910	 * ciphertext block. This is used e.g. by the CTS mode.
 
 911	 */
 912	scatterwalk_map_and_copy(req->info, req->dst, req->nbytes - ivsize,
 913				 ivsize, 0);
 914
 915	kfree(edesc);
 
 
 
 916
 917	ablkcipher_request_complete(req, err);
 918}
 919
 920static void ablkcipher_decrypt_done(struct device *jrdev, u32 *desc, u32 err,
 921				    void *context)
 922{
 923	struct ablkcipher_request *req = context;
 924	struct ablkcipher_edesc *edesc;
 925	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
 926	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
 927
 928#ifdef DEBUG
 929	dev_err(jrdev, "%s %d: err 0x%x\n", __func__, __LINE__, err);
 930#endif
 931
 932	edesc = container_of(desc, struct ablkcipher_edesc, hw_desc[0]);
 933	if (err)
 934		caam_jr_strstatus(jrdev, err);
 935
 936#ifdef DEBUG
 937	print_hex_dump(KERN_ERR, "dstiv  @"__stringify(__LINE__)": ",
 938		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
 939		       ivsize, 1);
 940#endif
 941	caam_dump_sg(KERN_ERR, "dst    @" __stringify(__LINE__)": ",
 942		     DUMP_PREFIX_ADDRESS, 16, 4, req->dst,
 943		     edesc->dst_nents > 1 ? 100 : req->nbytes, 1);
 944
 945	ablkcipher_unmap(jrdev, edesc, req);
 946
 947	/*
 948	 * The crypto API expects us to set the IV (req->info) to the last
 949	 * ciphertext block.
 950	 */
 951	scatterwalk_map_and_copy(req->info, req->src, req->nbytes - ivsize,
 952				 ivsize, 0);
 953
 954	kfree(edesc);
 955
 956	ablkcipher_request_complete(req, err);
 957}
 958
 959/*
 960 * Fill in aead job descriptor
 961 */
 962static void init_aead_job(struct aead_request *req,
 963			  struct aead_edesc *edesc,
 964			  bool all_contig, bool encrypt)
 965{
 966	struct crypto_aead *aead = crypto_aead_reqtfm(req);
 967	struct caam_ctx *ctx = crypto_aead_ctx(aead);
 968	int authsize = ctx->authsize;
 969	u32 *desc = edesc->hw_desc;
 970	u32 out_options, in_options;
 971	dma_addr_t dst_dma, src_dma;
 972	int len, sec4_sg_index = 0;
 973	dma_addr_t ptr;
 974	u32 *sh_desc;
 975
 976	sh_desc = encrypt ? ctx->sh_desc_enc : ctx->sh_desc_dec;
 977	ptr = encrypt ? ctx->sh_desc_enc_dma : ctx->sh_desc_dec_dma;
 978
 979	len = desc_len(sh_desc);
 980	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
 981
 982	if (all_contig) {
 983		src_dma = edesc->src_nents ? sg_dma_address(req->src) : 0;
 
 984		in_options = 0;
 985	} else {
 986		src_dma = edesc->sec4_sg_dma;
 987		sec4_sg_index += edesc->src_nents;
 988		in_options = LDST_SGF;
 989	}
 990
 991	append_seq_in_ptr(desc, src_dma, req->assoclen + req->cryptlen,
 992			  in_options);
 993
 994	dst_dma = src_dma;
 995	out_options = in_options;
 996
 997	if (unlikely(req->src != req->dst)) {
 998		if (edesc->dst_nents == 1) {
 
 
 
 999			dst_dma = sg_dma_address(req->dst);
 
1000		} else {
1001			dst_dma = edesc->sec4_sg_dma +
1002				  sec4_sg_index *
1003				  sizeof(struct sec4_sg_entry);
1004			out_options = LDST_SGF;
1005		}
1006	}
1007
1008	if (encrypt)
1009		append_seq_out_ptr(desc, dst_dma,
1010				   req->assoclen + req->cryptlen + authsize,
1011				   out_options);
1012	else
1013		append_seq_out_ptr(desc, dst_dma,
1014				   req->assoclen + req->cryptlen - authsize,
1015				   out_options);
1016}
1017
1018static void init_gcm_job(struct aead_request *req,
1019			 struct aead_edesc *edesc,
1020			 bool all_contig, bool encrypt)
1021{
1022	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1023	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1024	unsigned int ivsize = crypto_aead_ivsize(aead);
1025	u32 *desc = edesc->hw_desc;
1026	bool generic_gcm = (ivsize == GCM_AES_IV_SIZE);
1027	unsigned int last;
1028
1029	init_aead_job(req, edesc, all_contig, encrypt);
1030	append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1031
1032	/* BUG This should not be specific to generic GCM. */
1033	last = 0;
1034	if (encrypt && generic_gcm && !(req->assoclen + req->cryptlen))
1035		last = FIFOLD_TYPE_LAST1;
1036
1037	/* Read GCM IV */
1038	append_cmd(desc, CMD_FIFO_LOAD | FIFOLD_CLASS_CLASS1 | IMMEDIATE |
1039			 FIFOLD_TYPE_IV | FIFOLD_TYPE_FLUSH1 | GCM_AES_IV_SIZE | last);
1040	/* Append Salt */
1041	if (!generic_gcm)
1042		append_data(desc, ctx->key + ctx->cdata.keylen, 4);
1043	/* Append IV */
1044	append_data(desc, req->iv, ivsize);
1045	/* End of blank commands */
1046}
1047
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1048static void init_authenc_job(struct aead_request *req,
1049			     struct aead_edesc *edesc,
1050			     bool all_contig, bool encrypt)
1051{
1052	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1053	struct caam_aead_alg *alg = container_of(crypto_aead_alg(aead),
1054						 struct caam_aead_alg, aead);
 
1055	unsigned int ivsize = crypto_aead_ivsize(aead);
1056	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1057	struct caam_drv_private *ctrlpriv = dev_get_drvdata(ctx->jrdev->parent);
1058	const bool ctr_mode = ((ctx->cdata.algtype & OP_ALG_AAI_MASK) ==
1059			       OP_ALG_AAI_CTR_MOD128);
1060	const bool is_rfc3686 = alg->caam.rfc3686;
1061	u32 *desc = edesc->hw_desc;
1062	u32 ivoffset = 0;
1063
1064	/*
1065	 * AES-CTR needs to load IV in CONTEXT1 reg
1066	 * at an offset of 128bits (16bytes)
1067	 * CONTEXT1[255:128] = IV
1068	 */
1069	if (ctr_mode)
1070		ivoffset = 16;
1071
1072	/*
1073	 * RFC3686 specific:
1074	 *	CONTEXT1[255:128] = {NONCE, IV, COUNTER}
1075	 */
1076	if (is_rfc3686)
1077		ivoffset = 16 + CTR_RFC3686_NONCE_SIZE;
1078
1079	init_aead_job(req, edesc, all_contig, encrypt);
1080
1081	/*
1082	 * {REG3, DPOVRD} = assoclen, depending on whether MATH command supports
1083	 * having DPOVRD as destination.
1084	 */
1085	if (ctrlpriv->era < 3)
1086		append_math_add_imm_u32(desc, REG3, ZERO, IMM, req->assoclen);
1087	else
1088		append_math_add_imm_u32(desc, DPOVRD, ZERO, IMM, req->assoclen);
1089
1090	if (ivsize && ((is_rfc3686 && encrypt) || !alg->caam.geniv))
1091		append_load_as_imm(desc, req->iv, ivsize,
1092				   LDST_CLASS_1_CCB |
1093				   LDST_SRCDST_BYTE_CONTEXT |
1094				   (ivoffset << LDST_OFFSET_SHIFT));
1095}
1096
1097/*
1098 * Fill in ablkcipher job descriptor
1099 */
1100static void init_ablkcipher_job(u32 *sh_desc, dma_addr_t ptr,
1101				struct ablkcipher_edesc *edesc,
1102				struct ablkcipher_request *req,
1103				bool iv_contig)
1104{
1105	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1106	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
 
 
1107	u32 *desc = edesc->hw_desc;
1108	u32 out_options = 0, in_options;
1109	dma_addr_t dst_dma, src_dma;
 
1110	int len, sec4_sg_index = 0;
1111
1112#ifdef DEBUG
1113	print_hex_dump(KERN_ERR, "presciv@"__stringify(__LINE__)": ",
1114		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1115		       ivsize, 1);
1116	pr_err("asked=%d, nbytes%d\n",
1117	       (int)edesc->src_nents > 1 ? 100 : req->nbytes, req->nbytes);
1118#endif
1119	caam_dump_sg(KERN_ERR, "src    @" __stringify(__LINE__)": ",
1120		     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1121		     edesc->src_nents > 1 ? 100 : req->nbytes, 1);
 
 
 
1122
1123	len = desc_len(sh_desc);
1124	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1125
1126	if (iv_contig) {
1127		src_dma = edesc->iv_dma;
1128		in_options = 0;
1129	} else {
1130		src_dma = edesc->sec4_sg_dma;
1131		sec4_sg_index += edesc->src_nents + 1;
1132		in_options = LDST_SGF;
1133	}
1134	append_seq_in_ptr(desc, src_dma, req->nbytes + ivsize, in_options);
1135
1136	if (likely(req->src == req->dst)) {
1137		if (edesc->src_nents == 1 && iv_contig) {
1138			dst_dma = sg_dma_address(req->src);
1139		} else {
1140			dst_dma = edesc->sec4_sg_dma +
1141				sizeof(struct sec4_sg_entry);
1142			out_options = LDST_SGF;
1143		}
1144	} else {
1145		if (edesc->dst_nents == 1) {
1146			dst_dma = sg_dma_address(req->dst);
1147		} else {
1148			dst_dma = edesc->sec4_sg_dma +
1149				sec4_sg_index * sizeof(struct sec4_sg_entry);
1150			out_options = LDST_SGF;
1151		}
1152	}
1153	append_seq_out_ptr(desc, dst_dma, req->nbytes, out_options);
1154}
1155
1156/*
1157 * Fill in ablkcipher givencrypt job descriptor
1158 */
1159static void init_ablkcipher_giv_job(u32 *sh_desc, dma_addr_t ptr,
1160				    struct ablkcipher_edesc *edesc,
1161				    struct ablkcipher_request *req,
1162				    bool iv_contig)
1163{
1164	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1165	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1166	u32 *desc = edesc->hw_desc;
1167	u32 out_options, in_options;
1168	dma_addr_t dst_dma, src_dma;
1169	int len, sec4_sg_index = 0;
1170
1171#ifdef DEBUG
1172	print_hex_dump(KERN_ERR, "presciv@" __stringify(__LINE__) ": ",
1173		       DUMP_PREFIX_ADDRESS, 16, 4, req->info,
1174		       ivsize, 1);
1175#endif
1176	caam_dump_sg(KERN_ERR, "src    @" __stringify(__LINE__) ": ",
1177		     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1178		     edesc->src_nents > 1 ? 100 : req->nbytes, 1);
1179
1180	len = desc_len(sh_desc);
1181	init_job_desc_shared(desc, ptr, len, HDR_SHARE_DEFER | HDR_REVERSE);
1182
1183	if (edesc->src_nents == 1) {
1184		src_dma = sg_dma_address(req->src);
1185		in_options = 0;
1186	} else {
1187		src_dma = edesc->sec4_sg_dma;
1188		sec4_sg_index += edesc->src_nents;
1189		in_options = LDST_SGF;
1190	}
1191	append_seq_in_ptr(desc, src_dma, req->nbytes, in_options);
1192
1193	if (iv_contig) {
1194		dst_dma = edesc->iv_dma;
1195		out_options = 0;
1196	} else {
1197		dst_dma = edesc->sec4_sg_dma +
1198			  sec4_sg_index * sizeof(struct sec4_sg_entry);
1199		out_options = LDST_SGF;
1200	}
1201	append_seq_out_ptr(desc, dst_dma, req->nbytes + ivsize, out_options);
1202}
1203
1204/*
1205 * allocate and map the aead extended descriptor
1206 */
1207static struct aead_edesc *aead_edesc_alloc(struct aead_request *req,
1208					   int desc_bytes, bool *all_contig_ptr,
1209					   bool encrypt)
1210{
1211	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1212	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1213	struct device *jrdev = ctx->jrdev;
 
1214	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1215		       GFP_KERNEL : GFP_ATOMIC;
1216	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
 
1217	struct aead_edesc *edesc;
1218	int sec4_sg_index, sec4_sg_len, sec4_sg_bytes;
1219	unsigned int authsize = ctx->authsize;
1220
1221	if (unlikely(req->dst != req->src)) {
1222		src_nents = sg_nents_for_len(req->src, req->assoclen +
1223					     req->cryptlen);
 
 
1224		if (unlikely(src_nents < 0)) {
1225			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1226				req->assoclen + req->cryptlen);
1227			return ERR_PTR(src_nents);
1228		}
1229
1230		dst_nents = sg_nents_for_len(req->dst, req->assoclen +
1231					     req->cryptlen +
1232						(encrypt ? authsize :
1233							   (-authsize)));
1234		if (unlikely(dst_nents < 0)) {
1235			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1236				req->assoclen + req->cryptlen +
1237				(encrypt ? authsize : (-authsize)));
1238			return ERR_PTR(dst_nents);
1239		}
1240	} else {
1241		src_nents = sg_nents_for_len(req->src, req->assoclen +
1242					     req->cryptlen +
1243					     (encrypt ? authsize : 0));
 
1244		if (unlikely(src_nents < 0)) {
1245			dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1246				req->assoclen + req->cryptlen +
1247				(encrypt ? authsize : 0));
1248			return ERR_PTR(src_nents);
1249		}
1250	}
1251
1252	if (likely(req->src == req->dst)) {
1253		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1254					      DMA_BIDIRECTIONAL);
1255		if (unlikely(!mapped_src_nents)) {
1256			dev_err(jrdev, "unable to map source\n");
1257			return ERR_PTR(-ENOMEM);
1258		}
1259	} else {
1260		/* Cover also the case of null (zero length) input data */
1261		if (src_nents) {
1262			mapped_src_nents = dma_map_sg(jrdev, req->src,
1263						      src_nents, DMA_TO_DEVICE);
1264			if (unlikely(!mapped_src_nents)) {
1265				dev_err(jrdev, "unable to map source\n");
1266				return ERR_PTR(-ENOMEM);
1267			}
1268		} else {
1269			mapped_src_nents = 0;
1270		}
1271
1272		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1273					      DMA_FROM_DEVICE);
1274		if (unlikely(!mapped_dst_nents)) {
1275			dev_err(jrdev, "unable to map destination\n");
1276			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1277			return ERR_PTR(-ENOMEM);
 
 
 
 
 
 
 
1278		}
1279	}
1280
 
 
 
 
1281	sec4_sg_len = mapped_src_nents > 1 ? mapped_src_nents : 0;
1282	sec4_sg_len += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
 
 
 
 
1283	sec4_sg_bytes = sec4_sg_len * sizeof(struct sec4_sg_entry);
1284
1285	/* allocate space for base edesc and hw desc commands, link tables */
1286	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1287			GFP_DMA | flags);
1288	if (!edesc) {
1289		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1290			   0, 0, 0);
1291		return ERR_PTR(-ENOMEM);
1292	}
1293
1294	edesc->src_nents = src_nents;
1295	edesc->dst_nents = dst_nents;
 
 
1296	edesc->sec4_sg = (void *)edesc + sizeof(struct aead_edesc) +
1297			 desc_bytes;
 
 
 
1298	*all_contig_ptr = !(mapped_src_nents > 1);
1299
1300	sec4_sg_index = 0;
1301	if (mapped_src_nents > 1) {
1302		sg_to_sec4_sg_last(req->src, mapped_src_nents,
1303				   edesc->sec4_sg + sec4_sg_index, 0);
1304		sec4_sg_index += mapped_src_nents;
1305	}
1306	if (mapped_dst_nents > 1) {
1307		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1308				   edesc->sec4_sg + sec4_sg_index, 0);
1309	}
1310
1311	if (!sec4_sg_bytes)
1312		return edesc;
1313
1314	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1315					    sec4_sg_bytes, DMA_TO_DEVICE);
1316	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1317		dev_err(jrdev, "unable to map S/G table\n");
1318		aead_unmap(jrdev, edesc, req);
1319		kfree(edesc);
1320		return ERR_PTR(-ENOMEM);
1321	}
1322
1323	edesc->sec4_sg_bytes = sec4_sg_bytes;
1324
1325	return edesc;
1326}
1327
1328static int gcm_encrypt(struct aead_request *req)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1329{
1330	struct aead_edesc *edesc;
1331	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1332	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1333	struct device *jrdev = ctx->jrdev;
1334	bool all_contig;
1335	u32 *desc;
1336	int ret = 0;
1337
1338	/* allocate extended descriptor */
1339	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, true);
1340	if (IS_ERR(edesc))
1341		return PTR_ERR(edesc);
1342
1343	/* Create and submit job descriptor */
1344	init_gcm_job(req, edesc, all_contig, true);
1345#ifdef DEBUG
1346	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1347		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1348		       desc_bytes(edesc->hw_desc), 1);
1349#endif
1350
1351	desc = edesc->hw_desc;
1352	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1353	if (!ret) {
1354		ret = -EINPROGRESS;
1355	} else {
1356		aead_unmap(jrdev, edesc, req);
1357		kfree(edesc);
1358	}
1359
1360	return ret;
1361}
1362
1363static int ipsec_gcm_encrypt(struct aead_request *req)
1364{
1365	if (req->assoclen < 8)
1366		return -EINVAL;
1367
1368	return gcm_encrypt(req);
 
 
1369}
1370
1371static int aead_encrypt(struct aead_request *req)
1372{
1373	struct aead_edesc *edesc;
1374	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1375	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1376	struct device *jrdev = ctx->jrdev;
1377	bool all_contig;
1378	u32 *desc;
1379	int ret = 0;
1380
1381	/* allocate extended descriptor */
1382	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1383				 &all_contig, true);
1384	if (IS_ERR(edesc))
1385		return PTR_ERR(edesc);
1386
1387	/* Create and submit job descriptor */
1388	init_authenc_job(req, edesc, all_contig, true);
1389#ifdef DEBUG
1390	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1391		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1392		       desc_bytes(edesc->hw_desc), 1);
1393#endif
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1394
1395	desc = edesc->hw_desc;
1396	ret = caam_jr_enqueue(jrdev, desc, aead_encrypt_done, req);
1397	if (!ret) {
1398		ret = -EINPROGRESS;
 
 
 
 
1399	} else {
1400		aead_unmap(jrdev, edesc, req);
1401		kfree(edesc);
1402	}
1403
1404	return ret;
1405}
1406
1407static int gcm_decrypt(struct aead_request *req)
1408{
1409	struct aead_edesc *edesc;
1410	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1411	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1412	struct device *jrdev = ctx->jrdev;
1413	bool all_contig;
1414	u32 *desc;
1415	int ret = 0;
1416
1417	/* allocate extended descriptor */
1418	edesc = aead_edesc_alloc(req, GCM_DESC_JOB_IO_LEN, &all_contig, false);
 
1419	if (IS_ERR(edesc))
1420		return PTR_ERR(edesc);
1421
1422	/* Create and submit job descriptor*/
1423	init_gcm_job(req, edesc, all_contig, false);
1424#ifdef DEBUG
1425	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1426		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1427		       desc_bytes(edesc->hw_desc), 1);
1428#endif
1429
1430	desc = edesc->hw_desc;
1431	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1432	if (!ret) {
1433		ret = -EINPROGRESS;
1434	} else {
1435		aead_unmap(jrdev, edesc, req);
1436		kfree(edesc);
1437	}
1438
1439	return ret;
1440}
1441
1442static int ipsec_gcm_decrypt(struct aead_request *req)
1443{
1444	if (req->assoclen < 8)
1445		return -EINVAL;
1446
1447	return gcm_decrypt(req);
 
 
1448}
1449
1450static int aead_decrypt(struct aead_request *req)
1451{
1452	struct aead_edesc *edesc;
1453	struct crypto_aead *aead = crypto_aead_reqtfm(req);
1454	struct caam_ctx *ctx = crypto_aead_ctx(aead);
1455	struct device *jrdev = ctx->jrdev;
1456	bool all_contig;
1457	u32 *desc;
1458	int ret = 0;
1459
1460	caam_dump_sg(KERN_ERR, "dec src@" __stringify(__LINE__)": ",
1461		     DUMP_PREFIX_ADDRESS, 16, 4, req->src,
1462		     req->assoclen + req->cryptlen, 1);
1463
1464	/* allocate extended descriptor */
1465	edesc = aead_edesc_alloc(req, AUTHENC_DESC_JOB_IO_LEN,
1466				 &all_contig, false);
1467	if (IS_ERR(edesc))
1468		return PTR_ERR(edesc);
1469
1470	/* Create and submit job descriptor*/
1471	init_authenc_job(req, edesc, all_contig, false);
1472#ifdef DEBUG
1473	print_hex_dump(KERN_ERR, "aead jobdesc@"__stringify(__LINE__)": ",
1474		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1475		       desc_bytes(edesc->hw_desc), 1);
1476#endif
1477
1478	desc = edesc->hw_desc;
1479	ret = caam_jr_enqueue(jrdev, desc, aead_decrypt_done, req);
1480	if (!ret) {
1481		ret = -EINPROGRESS;
1482	} else {
1483		aead_unmap(jrdev, edesc, req);
1484		kfree(edesc);
1485	}
1486
1487	return ret;
1488}
1489
1490/*
1491 * allocate and map the ablkcipher extended descriptor for ablkcipher
1492 */
1493static struct ablkcipher_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request
1494						       *req, int desc_bytes,
1495						       bool *iv_contig_out)
1496{
1497	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1498	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
 
1499	struct device *jrdev = ctx->jrdev;
1500	gfp_t flags = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ?
1501		       GFP_KERNEL : GFP_ATOMIC;
1502	int src_nents, mapped_src_nents, dst_nents = 0, mapped_dst_nents = 0;
1503	struct ablkcipher_edesc *edesc;
1504	dma_addr_t iv_dma = 0;
1505	bool in_contig;
1506	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1507	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
 
1508
1509	src_nents = sg_nents_for_len(req->src, req->nbytes);
1510	if (unlikely(src_nents < 0)) {
1511		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1512			req->nbytes);
1513		return ERR_PTR(src_nents);
1514	}
1515
1516	if (req->dst != req->src) {
1517		dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1518		if (unlikely(dst_nents < 0)) {
1519			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1520				req->nbytes);
1521			return ERR_PTR(dst_nents);
1522		}
1523	}
1524
1525	if (likely(req->src == req->dst)) {
1526		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1527					      DMA_BIDIRECTIONAL);
1528		if (unlikely(!mapped_src_nents)) {
1529			dev_err(jrdev, "unable to map source\n");
1530			return ERR_PTR(-ENOMEM);
1531		}
1532	} else {
1533		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1534					      DMA_TO_DEVICE);
1535		if (unlikely(!mapped_src_nents)) {
1536			dev_err(jrdev, "unable to map source\n");
1537			return ERR_PTR(-ENOMEM);
1538		}
1539
1540		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1541					      DMA_FROM_DEVICE);
1542		if (unlikely(!mapped_dst_nents)) {
1543			dev_err(jrdev, "unable to map destination\n");
1544			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1545			return ERR_PTR(-ENOMEM);
1546		}
1547	}
1548
1549	iv_dma = dma_map_single(jrdev, req->info, ivsize, DMA_TO_DEVICE);
1550	if (dma_mapping_error(jrdev, iv_dma)) {
1551		dev_err(jrdev, "unable to map IV\n");
1552		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1553			   0, 0, 0);
1554		return ERR_PTR(-ENOMEM);
1555	}
1556
1557	if (mapped_src_nents == 1 &&
1558	    iv_dma + ivsize == sg_dma_address(req->src)) {
1559		in_contig = true;
1560		sec4_sg_ents = 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1561	} else {
1562		in_contig = false;
1563		sec4_sg_ents = 1 + mapped_src_nents;
1564	}
1565	dst_sg_idx = sec4_sg_ents;
1566	sec4_sg_ents += mapped_dst_nents > 1 ? mapped_dst_nents : 0;
1567	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1568
1569	/* allocate space for base edesc and hw desc commands, link tables */
1570	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1571			GFP_DMA | flags);
 
 
 
 
 
 
1572	if (!edesc) {
1573		dev_err(jrdev, "could not allocate extended descriptor\n");
1574		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1575			   iv_dma, ivsize, 0, 0);
1576		return ERR_PTR(-ENOMEM);
1577	}
1578
1579	edesc->src_nents = src_nents;
1580	edesc->dst_nents = dst_nents;
 
 
1581	edesc->sec4_sg_bytes = sec4_sg_bytes;
1582	edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1583			 desc_bytes;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1584
1585	if (!in_contig) {
1586		dma_to_sec4_sg_one(edesc->sec4_sg, iv_dma, ivsize, 0);
1587		sg_to_sec4_sg_last(req->src, mapped_src_nents,
1588				   edesc->sec4_sg + 1, 0);
1589	}
1590
1591	if (mapped_dst_nents > 1) {
1592		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1593				   edesc->sec4_sg + dst_sg_idx, 0);
1594	}
1595
1596	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1597					    sec4_sg_bytes, DMA_TO_DEVICE);
1598	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1599		dev_err(jrdev, "unable to map S/G table\n");
1600		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1601			   iv_dma, ivsize, 0, 0);
1602		kfree(edesc);
1603		return ERR_PTR(-ENOMEM);
 
 
 
 
 
 
 
 
 
 
 
 
 
1604	}
1605
1606	edesc->iv_dma = iv_dma;
1607
1608#ifdef DEBUG
1609	print_hex_dump(KERN_ERR, "ablkcipher sec4_sg@"__stringify(__LINE__)": ",
1610		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1611		       sec4_sg_bytes, 1);
1612#endif
1613
1614	*iv_contig_out = in_contig;
1615	return edesc;
1616}
1617
1618static int ablkcipher_encrypt(struct ablkcipher_request *req)
1619{
1620	struct ablkcipher_edesc *edesc;
1621	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1622	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1623	struct device *jrdev = ctx->jrdev;
1624	bool iv_contig;
1625	u32 *desc;
1626	int ret = 0;
1627
1628	/* allocate extended descriptor */
1629	edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1630				       CAAM_CMD_SZ, &iv_contig);
1631	if (IS_ERR(edesc))
1632		return PTR_ERR(edesc);
1633
1634	/* Create and submit job descriptor*/
1635	init_ablkcipher_job(ctx->sh_desc_enc,
1636		ctx->sh_desc_enc_dma, edesc, req, iv_contig);
1637#ifdef DEBUG
1638	print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1639		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1640		       desc_bytes(edesc->hw_desc), 1);
1641#endif
1642	desc = edesc->hw_desc;
1643	ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
1644
1645	if (!ret) {
1646		ret = -EINPROGRESS;
 
1647	} else {
1648		ablkcipher_unmap(jrdev, edesc, req);
1649		kfree(edesc);
1650	}
1651
1652	return ret;
1653}
1654
1655static int ablkcipher_decrypt(struct ablkcipher_request *req)
1656{
1657	struct ablkcipher_edesc *edesc;
1658	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1659	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1660	struct device *jrdev = ctx->jrdev;
1661	bool iv_contig;
1662	u32 *desc;
1663	int ret = 0;
1664
1665	/* allocate extended descriptor */
1666	edesc = ablkcipher_edesc_alloc(req, DESC_JOB_IO_LEN *
1667				       CAAM_CMD_SZ, &iv_contig);
1668	if (IS_ERR(edesc))
1669		return PTR_ERR(edesc);
1670
1671	/* Create and submit job descriptor*/
1672	init_ablkcipher_job(ctx->sh_desc_dec,
1673		ctx->sh_desc_dec_dma, edesc, req, iv_contig);
1674	desc = edesc->hw_desc;
1675#ifdef DEBUG
1676	print_hex_dump(KERN_ERR, "ablkcipher jobdesc@"__stringify(__LINE__)": ",
1677		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1678		       desc_bytes(edesc->hw_desc), 1);
1679#endif
1680
1681	ret = caam_jr_enqueue(jrdev, desc, ablkcipher_decrypt_done, req);
1682	if (!ret) {
1683		ret = -EINPROGRESS;
1684	} else {
1685		ablkcipher_unmap(jrdev, edesc, req);
1686		kfree(edesc);
1687	}
1688
1689	return ret;
1690}
1691
1692/*
1693 * allocate and map the ablkcipher extended descriptor
1694 * for ablkcipher givencrypt
1695 */
1696static struct ablkcipher_edesc *ablkcipher_giv_edesc_alloc(
1697				struct skcipher_givcrypt_request *greq,
1698				int desc_bytes,
1699				bool *iv_contig_out)
1700{
1701	struct ablkcipher_request *req = &greq->creq;
1702	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1703	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1704	struct device *jrdev = ctx->jrdev;
1705	gfp_t flags = (req->base.flags &  CRYPTO_TFM_REQ_MAY_SLEEP) ?
1706		       GFP_KERNEL : GFP_ATOMIC;
1707	int src_nents, mapped_src_nents, dst_nents, mapped_dst_nents;
1708	struct ablkcipher_edesc *edesc;
1709	dma_addr_t iv_dma = 0;
1710	bool out_contig;
1711	int ivsize = crypto_ablkcipher_ivsize(ablkcipher);
1712	int dst_sg_idx, sec4_sg_ents, sec4_sg_bytes;
1713
1714	src_nents = sg_nents_for_len(req->src, req->nbytes);
1715	if (unlikely(src_nents < 0)) {
1716		dev_err(jrdev, "Insufficient bytes (%d) in src S/G\n",
1717			req->nbytes);
1718		return ERR_PTR(src_nents);
1719	}
1720
1721	if (likely(req->src == req->dst)) {
1722		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1723					      DMA_BIDIRECTIONAL);
1724		if (unlikely(!mapped_src_nents)) {
1725			dev_err(jrdev, "unable to map source\n");
1726			return ERR_PTR(-ENOMEM);
1727		}
1728
1729		dst_nents = src_nents;
1730		mapped_dst_nents = src_nents;
1731	} else {
1732		mapped_src_nents = dma_map_sg(jrdev, req->src, src_nents,
1733					      DMA_TO_DEVICE);
1734		if (unlikely(!mapped_src_nents)) {
1735			dev_err(jrdev, "unable to map source\n");
1736			return ERR_PTR(-ENOMEM);
1737		}
1738
1739		dst_nents = sg_nents_for_len(req->dst, req->nbytes);
1740		if (unlikely(dst_nents < 0)) {
1741			dev_err(jrdev, "Insufficient bytes (%d) in dst S/G\n",
1742				req->nbytes);
1743			return ERR_PTR(dst_nents);
1744		}
1745
1746		mapped_dst_nents = dma_map_sg(jrdev, req->dst, dst_nents,
1747					      DMA_FROM_DEVICE);
1748		if (unlikely(!mapped_dst_nents)) {
1749			dev_err(jrdev, "unable to map destination\n");
1750			dma_unmap_sg(jrdev, req->src, src_nents, DMA_TO_DEVICE);
1751			return ERR_PTR(-ENOMEM);
1752		}
1753	}
1754
1755	/*
1756	 * Check if iv can be contiguous with source and destination.
1757	 * If so, include it. If not, create scatterlist.
 
1758	 */
1759	iv_dma = dma_map_single(jrdev, greq->giv, ivsize, DMA_TO_DEVICE);
1760	if (dma_mapping_error(jrdev, iv_dma)) {
1761		dev_err(jrdev, "unable to map IV\n");
1762		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents, 0,
1763			   0, 0, 0);
1764		return ERR_PTR(-ENOMEM);
1765	}
1766
1767	sec4_sg_ents = mapped_src_nents > 1 ? mapped_src_nents : 0;
1768	dst_sg_idx = sec4_sg_ents;
1769	if (mapped_dst_nents == 1 &&
1770	    iv_dma + ivsize == sg_dma_address(req->dst)) {
1771		out_contig = true;
1772	} else {
1773		out_contig = false;
1774		sec4_sg_ents += 1 + mapped_dst_nents;
1775	}
 
 
1776
1777	/* allocate space for base edesc and hw desc commands, link tables */
1778	sec4_sg_bytes = sec4_sg_ents * sizeof(struct sec4_sg_entry);
1779	edesc = kzalloc(sizeof(*edesc) + desc_bytes + sec4_sg_bytes,
1780			GFP_DMA | flags);
1781	if (!edesc) {
1782		dev_err(jrdev, "could not allocate extended descriptor\n");
1783		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1784			   iv_dma, ivsize, 0, 0);
1785		return ERR_PTR(-ENOMEM);
1786	}
1787
1788	edesc->src_nents = src_nents;
1789	edesc->dst_nents = dst_nents;
1790	edesc->sec4_sg_bytes = sec4_sg_bytes;
1791	edesc->sec4_sg = (void *)edesc + sizeof(struct ablkcipher_edesc) +
1792			 desc_bytes;
1793
1794	if (mapped_src_nents > 1)
1795		sg_to_sec4_sg_last(req->src, mapped_src_nents, edesc->sec4_sg,
1796				   0);
1797
1798	if (!out_contig) {
1799		dma_to_sec4_sg_one(edesc->sec4_sg + dst_sg_idx,
1800				   iv_dma, ivsize, 0);
1801		sg_to_sec4_sg_last(req->dst, mapped_dst_nents,
1802				   edesc->sec4_sg + dst_sg_idx + 1, 0);
1803	}
1804
1805	edesc->sec4_sg_dma = dma_map_single(jrdev, edesc->sec4_sg,
1806					    sec4_sg_bytes, DMA_TO_DEVICE);
1807	if (dma_mapping_error(jrdev, edesc->sec4_sg_dma)) {
1808		dev_err(jrdev, "unable to map S/G table\n");
1809		caam_unmap(jrdev, req->src, req->dst, src_nents, dst_nents,
1810			   iv_dma, ivsize, 0, 0);
1811		kfree(edesc);
1812		return ERR_PTR(-ENOMEM);
1813	}
1814	edesc->iv_dma = iv_dma;
1815
1816#ifdef DEBUG
1817	print_hex_dump(KERN_ERR,
1818		       "ablkcipher sec4_sg@" __stringify(__LINE__) ": ",
1819		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->sec4_sg,
1820		       sec4_sg_bytes, 1);
1821#endif
1822
1823	*iv_contig_out = out_contig;
1824	return edesc;
1825}
1826
1827static int ablkcipher_givencrypt(struct skcipher_givcrypt_request *creq)
1828{
1829	struct ablkcipher_request *req = &creq->creq;
1830	struct ablkcipher_edesc *edesc;
1831	struct crypto_ablkcipher *ablkcipher = crypto_ablkcipher_reqtfm(req);
1832	struct caam_ctx *ctx = crypto_ablkcipher_ctx(ablkcipher);
1833	struct device *jrdev = ctx->jrdev;
1834	bool iv_contig = false;
1835	u32 *desc;
1836	int ret = 0;
1837
1838	/* allocate extended descriptor */
1839	edesc = ablkcipher_giv_edesc_alloc(creq, DESC_JOB_IO_LEN *
1840				       CAAM_CMD_SZ, &iv_contig);
1841	if (IS_ERR(edesc))
1842		return PTR_ERR(edesc);
1843
1844	/* Create and submit job descriptor*/
1845	init_ablkcipher_giv_job(ctx->sh_desc_givenc, ctx->sh_desc_givenc_dma,
1846				edesc, req, iv_contig);
1847#ifdef DEBUG
1848	print_hex_dump(KERN_ERR,
1849		       "ablkcipher jobdesc@" __stringify(__LINE__) ": ",
1850		       DUMP_PREFIX_ADDRESS, 16, 4, edesc->hw_desc,
1851		       desc_bytes(edesc->hw_desc), 1);
1852#endif
1853	desc = edesc->hw_desc;
1854	ret = caam_jr_enqueue(jrdev, desc, ablkcipher_encrypt_done, req);
 
 
 
 
 
 
 
 
 
1855
1856	if (!ret) {
1857		ret = -EINPROGRESS;
1858	} else {
1859		ablkcipher_unmap(jrdev, edesc, req);
1860		kfree(edesc);
1861	}
1862
1863	return ret;
1864}
1865
1866#define template_aead		template_u.aead
1867#define template_ablkcipher	template_u.ablkcipher
1868struct caam_alg_template {
1869	char name[CRYPTO_MAX_ALG_NAME];
1870	char driver_name[CRYPTO_MAX_ALG_NAME];
1871	unsigned int blocksize;
1872	u32 type;
1873	union {
1874		struct ablkcipher_alg ablkcipher;
1875	} template_u;
1876	u32 class1_alg_type;
1877	u32 class2_alg_type;
1878};
1879
1880static struct caam_alg_template driver_algs[] = {
1881	/* ablkcipher descriptor */
1882	{
1883		.name = "cbc(aes)",
1884		.driver_name = "cbc-aes-caam",
1885		.blocksize = AES_BLOCK_SIZE,
1886		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1887		.template_ablkcipher = {
1888			.setkey = ablkcipher_setkey,
1889			.encrypt = ablkcipher_encrypt,
1890			.decrypt = ablkcipher_decrypt,
1891			.givencrypt = ablkcipher_givencrypt,
1892			.geniv = "<built-in>",
1893			.min_keysize = AES_MIN_KEY_SIZE,
1894			.max_keysize = AES_MAX_KEY_SIZE,
1895			.ivsize = AES_BLOCK_SIZE,
1896			},
1897		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
 
 
 
1898	},
1899	{
1900		.name = "cbc(des3_ede)",
1901		.driver_name = "cbc-3des-caam",
1902		.blocksize = DES3_EDE_BLOCK_SIZE,
1903		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1904		.template_ablkcipher = {
1905			.setkey = ablkcipher_setkey,
1906			.encrypt = ablkcipher_encrypt,
1907			.decrypt = ablkcipher_decrypt,
1908			.givencrypt = ablkcipher_givencrypt,
1909			.geniv = "<built-in>",
1910			.min_keysize = DES3_EDE_KEY_SIZE,
1911			.max_keysize = DES3_EDE_KEY_SIZE,
1912			.ivsize = DES3_EDE_BLOCK_SIZE,
1913			},
1914		.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
 
 
 
1915	},
1916	{
1917		.name = "cbc(des)",
1918		.driver_name = "cbc-des-caam",
1919		.blocksize = DES_BLOCK_SIZE,
1920		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1921		.template_ablkcipher = {
1922			.setkey = ablkcipher_setkey,
1923			.encrypt = ablkcipher_encrypt,
1924			.decrypt = ablkcipher_decrypt,
1925			.givencrypt = ablkcipher_givencrypt,
1926			.geniv = "<built-in>",
1927			.min_keysize = DES_KEY_SIZE,
1928			.max_keysize = DES_KEY_SIZE,
1929			.ivsize = DES_BLOCK_SIZE,
1930			},
1931		.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
 
 
 
1932	},
1933	{
1934		.name = "ctr(aes)",
1935		.driver_name = "ctr-aes-caam",
1936		.blocksize = 1,
1937		.type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1938		.template_ablkcipher = {
1939			.setkey = ablkcipher_setkey,
1940			.encrypt = ablkcipher_encrypt,
1941			.decrypt = ablkcipher_decrypt,
1942			.geniv = "chainiv",
1943			.min_keysize = AES_MIN_KEY_SIZE,
1944			.max_keysize = AES_MAX_KEY_SIZE,
1945			.ivsize = AES_BLOCK_SIZE,
1946			},
1947		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
 
 
 
 
 
1948	},
1949	{
1950		.name = "rfc3686(ctr(aes))",
1951		.driver_name = "rfc3686-ctr-aes-caam",
1952		.blocksize = 1,
1953		.type = CRYPTO_ALG_TYPE_GIVCIPHER,
1954		.template_ablkcipher = {
1955			.setkey = ablkcipher_setkey,
1956			.encrypt = ablkcipher_encrypt,
1957			.decrypt = ablkcipher_decrypt,
1958			.givencrypt = ablkcipher_givencrypt,
1959			.geniv = "<built-in>",
1960			.min_keysize = AES_MIN_KEY_SIZE +
1961				       CTR_RFC3686_NONCE_SIZE,
1962			.max_keysize = AES_MAX_KEY_SIZE +
1963				       CTR_RFC3686_NONCE_SIZE,
1964			.ivsize = CTR_RFC3686_IV_SIZE,
1965			},
1966		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CTR_MOD128,
 
 
 
 
 
 
 
 
1967	},
1968	{
1969		.name = "xts(aes)",
1970		.driver_name = "xts-aes-caam",
1971		.blocksize = AES_BLOCK_SIZE,
1972		.type = CRYPTO_ALG_TYPE_ABLKCIPHER,
1973		.template_ablkcipher = {
1974			.setkey = xts_ablkcipher_setkey,
1975			.encrypt = ablkcipher_encrypt,
1976			.decrypt = ablkcipher_decrypt,
1977			.geniv = "eseqiv",
 
1978			.min_keysize = 2 * AES_MIN_KEY_SIZE,
1979			.max_keysize = 2 * AES_MAX_KEY_SIZE,
1980			.ivsize = AES_BLOCK_SIZE,
 
 
 
 
 
 
 
 
 
 
 
 
1981			},
1982		.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_XTS,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1983	},
1984};
1985
1986static struct caam_aead_alg driver_aeads[] = {
1987	{
1988		.aead = {
1989			.base = {
1990				.cra_name = "rfc4106(gcm(aes))",
1991				.cra_driver_name = "rfc4106-gcm-aes-caam",
1992				.cra_blocksize = 1,
1993			},
1994			.setkey = rfc4106_setkey,
1995			.setauthsize = rfc4106_setauthsize,
1996			.encrypt = ipsec_gcm_encrypt,
1997			.decrypt = ipsec_gcm_decrypt,
1998			.ivsize = GCM_RFC4106_IV_SIZE,
1999			.maxauthsize = AES_BLOCK_SIZE,
2000		},
 
 
 
2001		.caam = {
2002			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
 
2003		},
2004	},
2005	{
2006		.aead = {
2007			.base = {
2008				.cra_name = "rfc4543(gcm(aes))",
2009				.cra_driver_name = "rfc4543-gcm-aes-caam",
2010				.cra_blocksize = 1,
2011			},
2012			.setkey = rfc4543_setkey,
2013			.setauthsize = rfc4543_setauthsize,
2014			.encrypt = ipsec_gcm_encrypt,
2015			.decrypt = ipsec_gcm_decrypt,
2016			.ivsize = GCM_RFC4543_IV_SIZE,
2017			.maxauthsize = AES_BLOCK_SIZE,
2018		},
 
 
 
2019		.caam = {
2020			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
 
2021		},
2022	},
2023	/* Galois Counter Mode */
2024	{
2025		.aead = {
2026			.base = {
2027				.cra_name = "gcm(aes)",
2028				.cra_driver_name = "gcm-aes-caam",
2029				.cra_blocksize = 1,
2030			},
2031			.setkey = gcm_setkey,
2032			.setauthsize = gcm_setauthsize,
2033			.encrypt = gcm_encrypt,
2034			.decrypt = gcm_decrypt,
2035			.ivsize = GCM_AES_IV_SIZE,
2036			.maxauthsize = AES_BLOCK_SIZE,
2037		},
 
 
 
2038		.caam = {
2039			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_GCM,
 
2040		},
2041	},
2042	/* single-pass ipsec_esp descriptor */
2043	{
2044		.aead = {
2045			.base = {
2046				.cra_name = "authenc(hmac(md5),"
2047					    "ecb(cipher_null))",
2048				.cra_driver_name = "authenc-hmac-md5-"
2049						   "ecb-cipher_null-caam",
2050				.cra_blocksize = NULL_BLOCK_SIZE,
2051			},
2052			.setkey = aead_setkey,
2053			.setauthsize = aead_setauthsize,
2054			.encrypt = aead_encrypt,
2055			.decrypt = aead_decrypt,
2056			.ivsize = NULL_IV_SIZE,
2057			.maxauthsize = MD5_DIGEST_SIZE,
2058		},
 
 
 
2059		.caam = {
2060			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2061					   OP_ALG_AAI_HMAC_PRECOMP,
2062		},
2063	},
2064	{
2065		.aead = {
2066			.base = {
2067				.cra_name = "authenc(hmac(sha1),"
2068					    "ecb(cipher_null))",
2069				.cra_driver_name = "authenc-hmac-sha1-"
2070						   "ecb-cipher_null-caam",
2071				.cra_blocksize = NULL_BLOCK_SIZE,
2072			},
2073			.setkey = aead_setkey,
2074			.setauthsize = aead_setauthsize,
2075			.encrypt = aead_encrypt,
2076			.decrypt = aead_decrypt,
2077			.ivsize = NULL_IV_SIZE,
2078			.maxauthsize = SHA1_DIGEST_SIZE,
2079		},
 
 
 
2080		.caam = {
2081			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2082					   OP_ALG_AAI_HMAC_PRECOMP,
2083		},
2084	},
2085	{
2086		.aead = {
2087			.base = {
2088				.cra_name = "authenc(hmac(sha224),"
2089					    "ecb(cipher_null))",
2090				.cra_driver_name = "authenc-hmac-sha224-"
2091						   "ecb-cipher_null-caam",
2092				.cra_blocksize = NULL_BLOCK_SIZE,
2093			},
2094			.setkey = aead_setkey,
2095			.setauthsize = aead_setauthsize,
2096			.encrypt = aead_encrypt,
2097			.decrypt = aead_decrypt,
2098			.ivsize = NULL_IV_SIZE,
2099			.maxauthsize = SHA224_DIGEST_SIZE,
2100		},
 
 
 
2101		.caam = {
2102			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2103					   OP_ALG_AAI_HMAC_PRECOMP,
2104		},
2105	},
2106	{
2107		.aead = {
2108			.base = {
2109				.cra_name = "authenc(hmac(sha256),"
2110					    "ecb(cipher_null))",
2111				.cra_driver_name = "authenc-hmac-sha256-"
2112						   "ecb-cipher_null-caam",
2113				.cra_blocksize = NULL_BLOCK_SIZE,
2114			},
2115			.setkey = aead_setkey,
2116			.setauthsize = aead_setauthsize,
2117			.encrypt = aead_encrypt,
2118			.decrypt = aead_decrypt,
2119			.ivsize = NULL_IV_SIZE,
2120			.maxauthsize = SHA256_DIGEST_SIZE,
2121		},
 
 
 
2122		.caam = {
2123			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2124					   OP_ALG_AAI_HMAC_PRECOMP,
2125		},
2126	},
2127	{
2128		.aead = {
2129			.base = {
2130				.cra_name = "authenc(hmac(sha384),"
2131					    "ecb(cipher_null))",
2132				.cra_driver_name = "authenc-hmac-sha384-"
2133						   "ecb-cipher_null-caam",
2134				.cra_blocksize = NULL_BLOCK_SIZE,
2135			},
2136			.setkey = aead_setkey,
2137			.setauthsize = aead_setauthsize,
2138			.encrypt = aead_encrypt,
2139			.decrypt = aead_decrypt,
2140			.ivsize = NULL_IV_SIZE,
2141			.maxauthsize = SHA384_DIGEST_SIZE,
2142		},
 
 
 
2143		.caam = {
2144			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2145					   OP_ALG_AAI_HMAC_PRECOMP,
2146		},
2147	},
2148	{
2149		.aead = {
2150			.base = {
2151				.cra_name = "authenc(hmac(sha512),"
2152					    "ecb(cipher_null))",
2153				.cra_driver_name = "authenc-hmac-sha512-"
2154						   "ecb-cipher_null-caam",
2155				.cra_blocksize = NULL_BLOCK_SIZE,
2156			},
2157			.setkey = aead_setkey,
2158			.setauthsize = aead_setauthsize,
2159			.encrypt = aead_encrypt,
2160			.decrypt = aead_decrypt,
2161			.ivsize = NULL_IV_SIZE,
2162			.maxauthsize = SHA512_DIGEST_SIZE,
2163		},
 
 
 
2164		.caam = {
2165			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2166					   OP_ALG_AAI_HMAC_PRECOMP,
2167		},
2168	},
2169	{
2170		.aead = {
2171			.base = {
2172				.cra_name = "authenc(hmac(md5),cbc(aes))",
2173				.cra_driver_name = "authenc-hmac-md5-"
2174						   "cbc-aes-caam",
2175				.cra_blocksize = AES_BLOCK_SIZE,
2176			},
2177			.setkey = aead_setkey,
2178			.setauthsize = aead_setauthsize,
2179			.encrypt = aead_encrypt,
2180			.decrypt = aead_decrypt,
2181			.ivsize = AES_BLOCK_SIZE,
2182			.maxauthsize = MD5_DIGEST_SIZE,
2183		},
 
 
 
2184		.caam = {
2185			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2186			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2187					   OP_ALG_AAI_HMAC_PRECOMP,
2188		},
2189	},
2190	{
2191		.aead = {
2192			.base = {
2193				.cra_name = "echainiv(authenc(hmac(md5),"
2194					    "cbc(aes)))",
2195				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2196						   "cbc-aes-caam",
2197				.cra_blocksize = AES_BLOCK_SIZE,
2198			},
2199			.setkey = aead_setkey,
2200			.setauthsize = aead_setauthsize,
2201			.encrypt = aead_encrypt,
2202			.decrypt = aead_decrypt,
2203			.ivsize = AES_BLOCK_SIZE,
2204			.maxauthsize = MD5_DIGEST_SIZE,
2205		},
 
 
 
2206		.caam = {
2207			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2208			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2209					   OP_ALG_AAI_HMAC_PRECOMP,
2210			.geniv = true,
2211		},
2212	},
2213	{
2214		.aead = {
2215			.base = {
2216				.cra_name = "authenc(hmac(sha1),cbc(aes))",
2217				.cra_driver_name = "authenc-hmac-sha1-"
2218						   "cbc-aes-caam",
2219				.cra_blocksize = AES_BLOCK_SIZE,
2220			},
2221			.setkey = aead_setkey,
2222			.setauthsize = aead_setauthsize,
2223			.encrypt = aead_encrypt,
2224			.decrypt = aead_decrypt,
2225			.ivsize = AES_BLOCK_SIZE,
2226			.maxauthsize = SHA1_DIGEST_SIZE,
2227		},
 
 
 
2228		.caam = {
2229			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2230			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2231					   OP_ALG_AAI_HMAC_PRECOMP,
2232		},
2233	},
2234	{
2235		.aead = {
2236			.base = {
2237				.cra_name = "echainiv(authenc(hmac(sha1),"
2238					    "cbc(aes)))",
2239				.cra_driver_name = "echainiv-authenc-"
2240						   "hmac-sha1-cbc-aes-caam",
2241				.cra_blocksize = AES_BLOCK_SIZE,
2242			},
2243			.setkey = aead_setkey,
2244			.setauthsize = aead_setauthsize,
2245			.encrypt = aead_encrypt,
2246			.decrypt = aead_decrypt,
2247			.ivsize = AES_BLOCK_SIZE,
2248			.maxauthsize = SHA1_DIGEST_SIZE,
2249		},
 
 
 
2250		.caam = {
2251			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2252			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2253					   OP_ALG_AAI_HMAC_PRECOMP,
2254			.geniv = true,
2255		},
2256	},
2257	{
2258		.aead = {
2259			.base = {
2260				.cra_name = "authenc(hmac(sha224),cbc(aes))",
2261				.cra_driver_name = "authenc-hmac-sha224-"
2262						   "cbc-aes-caam",
2263				.cra_blocksize = AES_BLOCK_SIZE,
2264			},
2265			.setkey = aead_setkey,
2266			.setauthsize = aead_setauthsize,
2267			.encrypt = aead_encrypt,
2268			.decrypt = aead_decrypt,
2269			.ivsize = AES_BLOCK_SIZE,
2270			.maxauthsize = SHA224_DIGEST_SIZE,
2271		},
 
 
 
2272		.caam = {
2273			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2274			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2275					   OP_ALG_AAI_HMAC_PRECOMP,
2276		},
2277	},
2278	{
2279		.aead = {
2280			.base = {
2281				.cra_name = "echainiv(authenc(hmac(sha224),"
2282					    "cbc(aes)))",
2283				.cra_driver_name = "echainiv-authenc-"
2284						   "hmac-sha224-cbc-aes-caam",
2285				.cra_blocksize = AES_BLOCK_SIZE,
2286			},
2287			.setkey = aead_setkey,
2288			.setauthsize = aead_setauthsize,
2289			.encrypt = aead_encrypt,
2290			.decrypt = aead_decrypt,
2291			.ivsize = AES_BLOCK_SIZE,
2292			.maxauthsize = SHA224_DIGEST_SIZE,
2293		},
 
 
 
2294		.caam = {
2295			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2296			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2297					   OP_ALG_AAI_HMAC_PRECOMP,
2298			.geniv = true,
2299		},
2300	},
2301	{
2302		.aead = {
2303			.base = {
2304				.cra_name = "authenc(hmac(sha256),cbc(aes))",
2305				.cra_driver_name = "authenc-hmac-sha256-"
2306						   "cbc-aes-caam",
2307				.cra_blocksize = AES_BLOCK_SIZE,
2308			},
2309			.setkey = aead_setkey,
2310			.setauthsize = aead_setauthsize,
2311			.encrypt = aead_encrypt,
2312			.decrypt = aead_decrypt,
2313			.ivsize = AES_BLOCK_SIZE,
2314			.maxauthsize = SHA256_DIGEST_SIZE,
2315		},
 
 
 
2316		.caam = {
2317			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2318			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2319					   OP_ALG_AAI_HMAC_PRECOMP,
2320		},
2321	},
2322	{
2323		.aead = {
2324			.base = {
2325				.cra_name = "echainiv(authenc(hmac(sha256),"
2326					    "cbc(aes)))",
2327				.cra_driver_name = "echainiv-authenc-"
2328						   "hmac-sha256-cbc-aes-caam",
2329				.cra_blocksize = AES_BLOCK_SIZE,
2330			},
2331			.setkey = aead_setkey,
2332			.setauthsize = aead_setauthsize,
2333			.encrypt = aead_encrypt,
2334			.decrypt = aead_decrypt,
2335			.ivsize = AES_BLOCK_SIZE,
2336			.maxauthsize = SHA256_DIGEST_SIZE,
2337		},
 
 
 
2338		.caam = {
2339			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2340			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2341					   OP_ALG_AAI_HMAC_PRECOMP,
2342			.geniv = true,
2343		},
2344	},
2345	{
2346		.aead = {
2347			.base = {
2348				.cra_name = "authenc(hmac(sha384),cbc(aes))",
2349				.cra_driver_name = "authenc-hmac-sha384-"
2350						   "cbc-aes-caam",
2351				.cra_blocksize = AES_BLOCK_SIZE,
2352			},
2353			.setkey = aead_setkey,
2354			.setauthsize = aead_setauthsize,
2355			.encrypt = aead_encrypt,
2356			.decrypt = aead_decrypt,
2357			.ivsize = AES_BLOCK_SIZE,
2358			.maxauthsize = SHA384_DIGEST_SIZE,
2359		},
 
 
 
2360		.caam = {
2361			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2362			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2363					   OP_ALG_AAI_HMAC_PRECOMP,
2364		},
2365	},
2366	{
2367		.aead = {
2368			.base = {
2369				.cra_name = "echainiv(authenc(hmac(sha384),"
2370					    "cbc(aes)))",
2371				.cra_driver_name = "echainiv-authenc-"
2372						   "hmac-sha384-cbc-aes-caam",
2373				.cra_blocksize = AES_BLOCK_SIZE,
2374			},
2375			.setkey = aead_setkey,
2376			.setauthsize = aead_setauthsize,
2377			.encrypt = aead_encrypt,
2378			.decrypt = aead_decrypt,
2379			.ivsize = AES_BLOCK_SIZE,
2380			.maxauthsize = SHA384_DIGEST_SIZE,
2381		},
 
 
 
2382		.caam = {
2383			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2384			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2385					   OP_ALG_AAI_HMAC_PRECOMP,
2386			.geniv = true,
2387		},
2388	},
2389	{
2390		.aead = {
2391			.base = {
2392				.cra_name = "authenc(hmac(sha512),cbc(aes))",
2393				.cra_driver_name = "authenc-hmac-sha512-"
2394						   "cbc-aes-caam",
2395				.cra_blocksize = AES_BLOCK_SIZE,
2396			},
2397			.setkey = aead_setkey,
2398			.setauthsize = aead_setauthsize,
2399			.encrypt = aead_encrypt,
2400			.decrypt = aead_decrypt,
2401			.ivsize = AES_BLOCK_SIZE,
2402			.maxauthsize = SHA512_DIGEST_SIZE,
2403		},
 
 
 
2404		.caam = {
2405			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2406			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2407					   OP_ALG_AAI_HMAC_PRECOMP,
2408		},
2409	},
2410	{
2411		.aead = {
2412			.base = {
2413				.cra_name = "echainiv(authenc(hmac(sha512),"
2414					    "cbc(aes)))",
2415				.cra_driver_name = "echainiv-authenc-"
2416						   "hmac-sha512-cbc-aes-caam",
2417				.cra_blocksize = AES_BLOCK_SIZE,
2418			},
2419			.setkey = aead_setkey,
2420			.setauthsize = aead_setauthsize,
2421			.encrypt = aead_encrypt,
2422			.decrypt = aead_decrypt,
2423			.ivsize = AES_BLOCK_SIZE,
2424			.maxauthsize = SHA512_DIGEST_SIZE,
2425		},
 
 
 
2426		.caam = {
2427			.class1_alg_type = OP_ALG_ALGSEL_AES | OP_ALG_AAI_CBC,
2428			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2429					   OP_ALG_AAI_HMAC_PRECOMP,
2430			.geniv = true,
2431		},
2432	},
2433	{
2434		.aead = {
2435			.base = {
2436				.cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2437				.cra_driver_name = "authenc-hmac-md5-"
2438						   "cbc-des3_ede-caam",
2439				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2440			},
2441			.setkey = aead_setkey,
2442			.setauthsize = aead_setauthsize,
2443			.encrypt = aead_encrypt,
2444			.decrypt = aead_decrypt,
2445			.ivsize = DES3_EDE_BLOCK_SIZE,
2446			.maxauthsize = MD5_DIGEST_SIZE,
2447		},
 
 
 
2448		.caam = {
2449			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2450			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2451					   OP_ALG_AAI_HMAC_PRECOMP,
2452		}
2453	},
2454	{
2455		.aead = {
2456			.base = {
2457				.cra_name = "echainiv(authenc(hmac(md5),"
2458					    "cbc(des3_ede)))",
2459				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2460						   "cbc-des3_ede-caam",
2461				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2462			},
2463			.setkey = aead_setkey,
2464			.setauthsize = aead_setauthsize,
2465			.encrypt = aead_encrypt,
2466			.decrypt = aead_decrypt,
2467			.ivsize = DES3_EDE_BLOCK_SIZE,
2468			.maxauthsize = MD5_DIGEST_SIZE,
2469		},
 
 
 
2470		.caam = {
2471			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2472			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2473					   OP_ALG_AAI_HMAC_PRECOMP,
2474			.geniv = true,
2475		}
2476	},
2477	{
2478		.aead = {
2479			.base = {
2480				.cra_name = "authenc(hmac(sha1),"
2481					    "cbc(des3_ede))",
2482				.cra_driver_name = "authenc-hmac-sha1-"
2483						   "cbc-des3_ede-caam",
2484				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2485			},
2486			.setkey = aead_setkey,
2487			.setauthsize = aead_setauthsize,
2488			.encrypt = aead_encrypt,
2489			.decrypt = aead_decrypt,
2490			.ivsize = DES3_EDE_BLOCK_SIZE,
2491			.maxauthsize = SHA1_DIGEST_SIZE,
2492		},
 
 
 
2493		.caam = {
2494			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2495			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2496					   OP_ALG_AAI_HMAC_PRECOMP,
2497		},
2498	},
2499	{
2500		.aead = {
2501			.base = {
2502				.cra_name = "echainiv(authenc(hmac(sha1),"
2503					    "cbc(des3_ede)))",
2504				.cra_driver_name = "echainiv-authenc-"
2505						   "hmac-sha1-"
2506						   "cbc-des3_ede-caam",
2507				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2508			},
2509			.setkey = aead_setkey,
2510			.setauthsize = aead_setauthsize,
2511			.encrypt = aead_encrypt,
2512			.decrypt = aead_decrypt,
2513			.ivsize = DES3_EDE_BLOCK_SIZE,
2514			.maxauthsize = SHA1_DIGEST_SIZE,
2515		},
 
 
 
2516		.caam = {
2517			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2518			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2519					   OP_ALG_AAI_HMAC_PRECOMP,
2520			.geniv = true,
2521		},
2522	},
2523	{
2524		.aead = {
2525			.base = {
2526				.cra_name = "authenc(hmac(sha224),"
2527					    "cbc(des3_ede))",
2528				.cra_driver_name = "authenc-hmac-sha224-"
2529						   "cbc-des3_ede-caam",
2530				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2531			},
2532			.setkey = aead_setkey,
2533			.setauthsize = aead_setauthsize,
2534			.encrypt = aead_encrypt,
2535			.decrypt = aead_decrypt,
2536			.ivsize = DES3_EDE_BLOCK_SIZE,
2537			.maxauthsize = SHA224_DIGEST_SIZE,
2538		},
 
 
 
2539		.caam = {
2540			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2541			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2542					   OP_ALG_AAI_HMAC_PRECOMP,
2543		},
2544	},
2545	{
2546		.aead = {
2547			.base = {
2548				.cra_name = "echainiv(authenc(hmac(sha224),"
2549					    "cbc(des3_ede)))",
2550				.cra_driver_name = "echainiv-authenc-"
2551						   "hmac-sha224-"
2552						   "cbc-des3_ede-caam",
2553				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2554			},
2555			.setkey = aead_setkey,
2556			.setauthsize = aead_setauthsize,
2557			.encrypt = aead_encrypt,
2558			.decrypt = aead_decrypt,
2559			.ivsize = DES3_EDE_BLOCK_SIZE,
2560			.maxauthsize = SHA224_DIGEST_SIZE,
2561		},
 
 
 
2562		.caam = {
2563			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2564			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2565					   OP_ALG_AAI_HMAC_PRECOMP,
2566			.geniv = true,
2567		},
2568	},
2569	{
2570		.aead = {
2571			.base = {
2572				.cra_name = "authenc(hmac(sha256),"
2573					    "cbc(des3_ede))",
2574				.cra_driver_name = "authenc-hmac-sha256-"
2575						   "cbc-des3_ede-caam",
2576				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2577			},
2578			.setkey = aead_setkey,
2579			.setauthsize = aead_setauthsize,
2580			.encrypt = aead_encrypt,
2581			.decrypt = aead_decrypt,
2582			.ivsize = DES3_EDE_BLOCK_SIZE,
2583			.maxauthsize = SHA256_DIGEST_SIZE,
2584		},
 
 
 
2585		.caam = {
2586			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2587			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2588					   OP_ALG_AAI_HMAC_PRECOMP,
2589		},
2590	},
2591	{
2592		.aead = {
2593			.base = {
2594				.cra_name = "echainiv(authenc(hmac(sha256),"
2595					    "cbc(des3_ede)))",
2596				.cra_driver_name = "echainiv-authenc-"
2597						   "hmac-sha256-"
2598						   "cbc-des3_ede-caam",
2599				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2600			},
2601			.setkey = aead_setkey,
2602			.setauthsize = aead_setauthsize,
2603			.encrypt = aead_encrypt,
2604			.decrypt = aead_decrypt,
2605			.ivsize = DES3_EDE_BLOCK_SIZE,
2606			.maxauthsize = SHA256_DIGEST_SIZE,
2607		},
 
 
 
2608		.caam = {
2609			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2610			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2611					   OP_ALG_AAI_HMAC_PRECOMP,
2612			.geniv = true,
2613		},
2614	},
2615	{
2616		.aead = {
2617			.base = {
2618				.cra_name = "authenc(hmac(sha384),"
2619					    "cbc(des3_ede))",
2620				.cra_driver_name = "authenc-hmac-sha384-"
2621						   "cbc-des3_ede-caam",
2622				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2623			},
2624			.setkey = aead_setkey,
2625			.setauthsize = aead_setauthsize,
2626			.encrypt = aead_encrypt,
2627			.decrypt = aead_decrypt,
2628			.ivsize = DES3_EDE_BLOCK_SIZE,
2629			.maxauthsize = SHA384_DIGEST_SIZE,
2630		},
 
 
 
2631		.caam = {
2632			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2633			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2634					   OP_ALG_AAI_HMAC_PRECOMP,
2635		},
2636	},
2637	{
2638		.aead = {
2639			.base = {
2640				.cra_name = "echainiv(authenc(hmac(sha384),"
2641					    "cbc(des3_ede)))",
2642				.cra_driver_name = "echainiv-authenc-"
2643						   "hmac-sha384-"
2644						   "cbc-des3_ede-caam",
2645				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2646			},
2647			.setkey = aead_setkey,
2648			.setauthsize = aead_setauthsize,
2649			.encrypt = aead_encrypt,
2650			.decrypt = aead_decrypt,
2651			.ivsize = DES3_EDE_BLOCK_SIZE,
2652			.maxauthsize = SHA384_DIGEST_SIZE,
2653		},
 
 
 
2654		.caam = {
2655			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2656			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2657					   OP_ALG_AAI_HMAC_PRECOMP,
2658			.geniv = true,
2659		},
2660	},
2661	{
2662		.aead = {
2663			.base = {
2664				.cra_name = "authenc(hmac(sha512),"
2665					    "cbc(des3_ede))",
2666				.cra_driver_name = "authenc-hmac-sha512-"
2667						   "cbc-des3_ede-caam",
2668				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2669			},
2670			.setkey = aead_setkey,
2671			.setauthsize = aead_setauthsize,
2672			.encrypt = aead_encrypt,
2673			.decrypt = aead_decrypt,
2674			.ivsize = DES3_EDE_BLOCK_SIZE,
2675			.maxauthsize = SHA512_DIGEST_SIZE,
2676		},
 
 
 
2677		.caam = {
2678			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2679			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2680					   OP_ALG_AAI_HMAC_PRECOMP,
2681		},
2682	},
2683	{
2684		.aead = {
2685			.base = {
2686				.cra_name = "echainiv(authenc(hmac(sha512),"
2687					    "cbc(des3_ede)))",
2688				.cra_driver_name = "echainiv-authenc-"
2689						   "hmac-sha512-"
2690						   "cbc-des3_ede-caam",
2691				.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2692			},
2693			.setkey = aead_setkey,
2694			.setauthsize = aead_setauthsize,
2695			.encrypt = aead_encrypt,
2696			.decrypt = aead_decrypt,
2697			.ivsize = DES3_EDE_BLOCK_SIZE,
2698			.maxauthsize = SHA512_DIGEST_SIZE,
2699		},
 
 
 
2700		.caam = {
2701			.class1_alg_type = OP_ALG_ALGSEL_3DES | OP_ALG_AAI_CBC,
2702			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2703					   OP_ALG_AAI_HMAC_PRECOMP,
2704			.geniv = true,
2705		},
2706	},
2707	{
2708		.aead = {
2709			.base = {
2710				.cra_name = "authenc(hmac(md5),cbc(des))",
2711				.cra_driver_name = "authenc-hmac-md5-"
2712						   "cbc-des-caam",
2713				.cra_blocksize = DES_BLOCK_SIZE,
2714			},
2715			.setkey = aead_setkey,
2716			.setauthsize = aead_setauthsize,
2717			.encrypt = aead_encrypt,
2718			.decrypt = aead_decrypt,
2719			.ivsize = DES_BLOCK_SIZE,
2720			.maxauthsize = MD5_DIGEST_SIZE,
2721		},
 
 
 
2722		.caam = {
2723			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2724			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2725					   OP_ALG_AAI_HMAC_PRECOMP,
2726		},
2727	},
2728	{
2729		.aead = {
2730			.base = {
2731				.cra_name = "echainiv(authenc(hmac(md5),"
2732					    "cbc(des)))",
2733				.cra_driver_name = "echainiv-authenc-hmac-md5-"
2734						   "cbc-des-caam",
2735				.cra_blocksize = DES_BLOCK_SIZE,
2736			},
2737			.setkey = aead_setkey,
2738			.setauthsize = aead_setauthsize,
2739			.encrypt = aead_encrypt,
2740			.decrypt = aead_decrypt,
2741			.ivsize = DES_BLOCK_SIZE,
2742			.maxauthsize = MD5_DIGEST_SIZE,
2743		},
 
 
 
2744		.caam = {
2745			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2746			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2747					   OP_ALG_AAI_HMAC_PRECOMP,
2748			.geniv = true,
2749		},
2750	},
2751	{
2752		.aead = {
2753			.base = {
2754				.cra_name = "authenc(hmac(sha1),cbc(des))",
2755				.cra_driver_name = "authenc-hmac-sha1-"
2756						   "cbc-des-caam",
2757				.cra_blocksize = DES_BLOCK_SIZE,
2758			},
2759			.setkey = aead_setkey,
2760			.setauthsize = aead_setauthsize,
2761			.encrypt = aead_encrypt,
2762			.decrypt = aead_decrypt,
2763			.ivsize = DES_BLOCK_SIZE,
2764			.maxauthsize = SHA1_DIGEST_SIZE,
2765		},
 
 
 
2766		.caam = {
2767			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2768			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2769					   OP_ALG_AAI_HMAC_PRECOMP,
2770		},
2771	},
2772	{
2773		.aead = {
2774			.base = {
2775				.cra_name = "echainiv(authenc(hmac(sha1),"
2776					    "cbc(des)))",
2777				.cra_driver_name = "echainiv-authenc-"
2778						   "hmac-sha1-cbc-des-caam",
2779				.cra_blocksize = DES_BLOCK_SIZE,
2780			},
2781			.setkey = aead_setkey,
2782			.setauthsize = aead_setauthsize,
2783			.encrypt = aead_encrypt,
2784			.decrypt = aead_decrypt,
2785			.ivsize = DES_BLOCK_SIZE,
2786			.maxauthsize = SHA1_DIGEST_SIZE,
2787		},
 
 
 
2788		.caam = {
2789			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2790			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
2791					   OP_ALG_AAI_HMAC_PRECOMP,
2792			.geniv = true,
2793		},
2794	},
2795	{
2796		.aead = {
2797			.base = {
2798				.cra_name = "authenc(hmac(sha224),cbc(des))",
2799				.cra_driver_name = "authenc-hmac-sha224-"
2800						   "cbc-des-caam",
2801				.cra_blocksize = DES_BLOCK_SIZE,
2802			},
2803			.setkey = aead_setkey,
2804			.setauthsize = aead_setauthsize,
2805			.encrypt = aead_encrypt,
2806			.decrypt = aead_decrypt,
2807			.ivsize = DES_BLOCK_SIZE,
2808			.maxauthsize = SHA224_DIGEST_SIZE,
2809		},
 
 
 
2810		.caam = {
2811			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2812			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2813					   OP_ALG_AAI_HMAC_PRECOMP,
2814		},
2815	},
2816	{
2817		.aead = {
2818			.base = {
2819				.cra_name = "echainiv(authenc(hmac(sha224),"
2820					    "cbc(des)))",
2821				.cra_driver_name = "echainiv-authenc-"
2822						   "hmac-sha224-cbc-des-caam",
2823				.cra_blocksize = DES_BLOCK_SIZE,
2824			},
2825			.setkey = aead_setkey,
2826			.setauthsize = aead_setauthsize,
2827			.encrypt = aead_encrypt,
2828			.decrypt = aead_decrypt,
2829			.ivsize = DES_BLOCK_SIZE,
2830			.maxauthsize = SHA224_DIGEST_SIZE,
2831		},
 
 
 
2832		.caam = {
2833			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2834			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
2835					   OP_ALG_AAI_HMAC_PRECOMP,
2836			.geniv = true,
2837		},
2838	},
2839	{
2840		.aead = {
2841			.base = {
2842				.cra_name = "authenc(hmac(sha256),cbc(des))",
2843				.cra_driver_name = "authenc-hmac-sha256-"
2844						   "cbc-des-caam",
2845				.cra_blocksize = DES_BLOCK_SIZE,
2846			},
2847			.setkey = aead_setkey,
2848			.setauthsize = aead_setauthsize,
2849			.encrypt = aead_encrypt,
2850			.decrypt = aead_decrypt,
2851			.ivsize = DES_BLOCK_SIZE,
2852			.maxauthsize = SHA256_DIGEST_SIZE,
2853		},
 
 
 
2854		.caam = {
2855			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2856			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2857					   OP_ALG_AAI_HMAC_PRECOMP,
2858		},
2859	},
2860	{
2861		.aead = {
2862			.base = {
2863				.cra_name = "echainiv(authenc(hmac(sha256),"
2864					    "cbc(des)))",
2865				.cra_driver_name = "echainiv-authenc-"
2866						   "hmac-sha256-cbc-des-caam",
2867				.cra_blocksize = DES_BLOCK_SIZE,
2868			},
2869			.setkey = aead_setkey,
2870			.setauthsize = aead_setauthsize,
2871			.encrypt = aead_encrypt,
2872			.decrypt = aead_decrypt,
2873			.ivsize = DES_BLOCK_SIZE,
2874			.maxauthsize = SHA256_DIGEST_SIZE,
2875		},
 
 
 
2876		.caam = {
2877			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2878			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
2879					   OP_ALG_AAI_HMAC_PRECOMP,
2880			.geniv = true,
2881		},
2882	},
2883	{
2884		.aead = {
2885			.base = {
2886				.cra_name = "authenc(hmac(sha384),cbc(des))",
2887				.cra_driver_name = "authenc-hmac-sha384-"
2888						   "cbc-des-caam",
2889				.cra_blocksize = DES_BLOCK_SIZE,
2890			},
2891			.setkey = aead_setkey,
2892			.setauthsize = aead_setauthsize,
2893			.encrypt = aead_encrypt,
2894			.decrypt = aead_decrypt,
2895			.ivsize = DES_BLOCK_SIZE,
2896			.maxauthsize = SHA384_DIGEST_SIZE,
2897		},
 
 
 
2898		.caam = {
2899			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2900			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2901					   OP_ALG_AAI_HMAC_PRECOMP,
2902		},
2903	},
2904	{
2905		.aead = {
2906			.base = {
2907				.cra_name = "echainiv(authenc(hmac(sha384),"
2908					    "cbc(des)))",
2909				.cra_driver_name = "echainiv-authenc-"
2910						   "hmac-sha384-cbc-des-caam",
2911				.cra_blocksize = DES_BLOCK_SIZE,
2912			},
2913			.setkey = aead_setkey,
2914			.setauthsize = aead_setauthsize,
2915			.encrypt = aead_encrypt,
2916			.decrypt = aead_decrypt,
2917			.ivsize = DES_BLOCK_SIZE,
2918			.maxauthsize = SHA384_DIGEST_SIZE,
2919		},
 
 
 
2920		.caam = {
2921			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2922			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
2923					   OP_ALG_AAI_HMAC_PRECOMP,
2924			.geniv = true,
2925		},
2926	},
2927	{
2928		.aead = {
2929			.base = {
2930				.cra_name = "authenc(hmac(sha512),cbc(des))",
2931				.cra_driver_name = "authenc-hmac-sha512-"
2932						   "cbc-des-caam",
2933				.cra_blocksize = DES_BLOCK_SIZE,
2934			},
2935			.setkey = aead_setkey,
2936			.setauthsize = aead_setauthsize,
2937			.encrypt = aead_encrypt,
2938			.decrypt = aead_decrypt,
2939			.ivsize = DES_BLOCK_SIZE,
2940			.maxauthsize = SHA512_DIGEST_SIZE,
2941		},
 
 
 
2942		.caam = {
2943			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2944			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2945					   OP_ALG_AAI_HMAC_PRECOMP,
2946		},
2947	},
2948	{
2949		.aead = {
2950			.base = {
2951				.cra_name = "echainiv(authenc(hmac(sha512),"
2952					    "cbc(des)))",
2953				.cra_driver_name = "echainiv-authenc-"
2954						   "hmac-sha512-cbc-des-caam",
2955				.cra_blocksize = DES_BLOCK_SIZE,
2956			},
2957			.setkey = aead_setkey,
2958			.setauthsize = aead_setauthsize,
2959			.encrypt = aead_encrypt,
2960			.decrypt = aead_decrypt,
2961			.ivsize = DES_BLOCK_SIZE,
2962			.maxauthsize = SHA512_DIGEST_SIZE,
2963		},
 
 
 
2964		.caam = {
2965			.class1_alg_type = OP_ALG_ALGSEL_DES | OP_ALG_AAI_CBC,
2966			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
2967					   OP_ALG_AAI_HMAC_PRECOMP,
2968			.geniv = true,
2969		},
2970	},
2971	{
2972		.aead = {
2973			.base = {
2974				.cra_name = "authenc(hmac(md5),"
2975					    "rfc3686(ctr(aes)))",
2976				.cra_driver_name = "authenc-hmac-md5-"
2977						   "rfc3686-ctr-aes-caam",
2978				.cra_blocksize = 1,
2979			},
2980			.setkey = aead_setkey,
2981			.setauthsize = aead_setauthsize,
2982			.encrypt = aead_encrypt,
2983			.decrypt = aead_decrypt,
2984			.ivsize = CTR_RFC3686_IV_SIZE,
2985			.maxauthsize = MD5_DIGEST_SIZE,
2986		},
 
 
 
2987		.caam = {
2988			.class1_alg_type = OP_ALG_ALGSEL_AES |
2989					   OP_ALG_AAI_CTR_MOD128,
2990			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
2991					   OP_ALG_AAI_HMAC_PRECOMP,
2992			.rfc3686 = true,
2993		},
2994	},
2995	{
2996		.aead = {
2997			.base = {
2998				.cra_name = "seqiv(authenc("
2999					    "hmac(md5),rfc3686(ctr(aes))))",
3000				.cra_driver_name = "seqiv-authenc-hmac-md5-"
3001						   "rfc3686-ctr-aes-caam",
3002				.cra_blocksize = 1,
3003			},
3004			.setkey = aead_setkey,
3005			.setauthsize = aead_setauthsize,
3006			.encrypt = aead_encrypt,
3007			.decrypt = aead_decrypt,
3008			.ivsize = CTR_RFC3686_IV_SIZE,
3009			.maxauthsize = MD5_DIGEST_SIZE,
3010		},
 
 
 
3011		.caam = {
3012			.class1_alg_type = OP_ALG_ALGSEL_AES |
3013					   OP_ALG_AAI_CTR_MOD128,
3014			.class2_alg_type = OP_ALG_ALGSEL_MD5 |
3015					   OP_ALG_AAI_HMAC_PRECOMP,
3016			.rfc3686 = true,
3017			.geniv = true,
3018		},
3019	},
3020	{
3021		.aead = {
3022			.base = {
3023				.cra_name = "authenc(hmac(sha1),"
3024					    "rfc3686(ctr(aes)))",
3025				.cra_driver_name = "authenc-hmac-sha1-"
3026						   "rfc3686-ctr-aes-caam",
3027				.cra_blocksize = 1,
3028			},
3029			.setkey = aead_setkey,
3030			.setauthsize = aead_setauthsize,
3031			.encrypt = aead_encrypt,
3032			.decrypt = aead_decrypt,
3033			.ivsize = CTR_RFC3686_IV_SIZE,
3034			.maxauthsize = SHA1_DIGEST_SIZE,
3035		},
 
 
 
3036		.caam = {
3037			.class1_alg_type = OP_ALG_ALGSEL_AES |
3038					   OP_ALG_AAI_CTR_MOD128,
3039			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3040					   OP_ALG_AAI_HMAC_PRECOMP,
3041			.rfc3686 = true,
3042		},
3043	},
3044	{
3045		.aead = {
3046			.base = {
3047				.cra_name = "seqiv(authenc("
3048					    "hmac(sha1),rfc3686(ctr(aes))))",
3049				.cra_driver_name = "seqiv-authenc-hmac-sha1-"
3050						   "rfc3686-ctr-aes-caam",
3051				.cra_blocksize = 1,
3052			},
3053			.setkey = aead_setkey,
3054			.setauthsize = aead_setauthsize,
3055			.encrypt = aead_encrypt,
3056			.decrypt = aead_decrypt,
3057			.ivsize = CTR_RFC3686_IV_SIZE,
3058			.maxauthsize = SHA1_DIGEST_SIZE,
3059		},
 
 
 
3060		.caam = {
3061			.class1_alg_type = OP_ALG_ALGSEL_AES |
3062					   OP_ALG_AAI_CTR_MOD128,
3063			.class2_alg_type = OP_ALG_ALGSEL_SHA1 |
3064					   OP_ALG_AAI_HMAC_PRECOMP,
3065			.rfc3686 = true,
3066			.geniv = true,
3067		},
3068	},
3069	{
3070		.aead = {
3071			.base = {
3072				.cra_name = "authenc(hmac(sha224),"
3073					    "rfc3686(ctr(aes)))",
3074				.cra_driver_name = "authenc-hmac-sha224-"
3075						   "rfc3686-ctr-aes-caam",
3076				.cra_blocksize = 1,
3077			},
3078			.setkey = aead_setkey,
3079			.setauthsize = aead_setauthsize,
3080			.encrypt = aead_encrypt,
3081			.decrypt = aead_decrypt,
3082			.ivsize = CTR_RFC3686_IV_SIZE,
3083			.maxauthsize = SHA224_DIGEST_SIZE,
3084		},
 
 
 
3085		.caam = {
3086			.class1_alg_type = OP_ALG_ALGSEL_AES |
3087					   OP_ALG_AAI_CTR_MOD128,
3088			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3089					   OP_ALG_AAI_HMAC_PRECOMP,
3090			.rfc3686 = true,
3091		},
3092	},
3093	{
3094		.aead = {
3095			.base = {
3096				.cra_name = "seqiv(authenc("
3097					    "hmac(sha224),rfc3686(ctr(aes))))",
3098				.cra_driver_name = "seqiv-authenc-hmac-sha224-"
3099						   "rfc3686-ctr-aes-caam",
3100				.cra_blocksize = 1,
3101			},
3102			.setkey = aead_setkey,
3103			.setauthsize = aead_setauthsize,
3104			.encrypt = aead_encrypt,
3105			.decrypt = aead_decrypt,
3106			.ivsize = CTR_RFC3686_IV_SIZE,
3107			.maxauthsize = SHA224_DIGEST_SIZE,
3108		},
 
 
 
3109		.caam = {
3110			.class1_alg_type = OP_ALG_ALGSEL_AES |
3111					   OP_ALG_AAI_CTR_MOD128,
3112			.class2_alg_type = OP_ALG_ALGSEL_SHA224 |
3113					   OP_ALG_AAI_HMAC_PRECOMP,
3114			.rfc3686 = true,
3115			.geniv = true,
3116		},
3117	},
3118	{
3119		.aead = {
3120			.base = {
3121				.cra_name = "authenc(hmac(sha256),"
3122					    "rfc3686(ctr(aes)))",
3123				.cra_driver_name = "authenc-hmac-sha256-"
3124						   "rfc3686-ctr-aes-caam",
3125				.cra_blocksize = 1,
3126			},
3127			.setkey = aead_setkey,
3128			.setauthsize = aead_setauthsize,
3129			.encrypt = aead_encrypt,
3130			.decrypt = aead_decrypt,
3131			.ivsize = CTR_RFC3686_IV_SIZE,
3132			.maxauthsize = SHA256_DIGEST_SIZE,
3133		},
 
 
 
3134		.caam = {
3135			.class1_alg_type = OP_ALG_ALGSEL_AES |
3136					   OP_ALG_AAI_CTR_MOD128,
3137			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3138					   OP_ALG_AAI_HMAC_PRECOMP,
3139			.rfc3686 = true,
3140		},
3141	},
3142	{
3143		.aead = {
3144			.base = {
3145				.cra_name = "seqiv(authenc(hmac(sha256),"
3146					    "rfc3686(ctr(aes))))",
3147				.cra_driver_name = "seqiv-authenc-hmac-sha256-"
3148						   "rfc3686-ctr-aes-caam",
3149				.cra_blocksize = 1,
3150			},
3151			.setkey = aead_setkey,
3152			.setauthsize = aead_setauthsize,
3153			.encrypt = aead_encrypt,
3154			.decrypt = aead_decrypt,
3155			.ivsize = CTR_RFC3686_IV_SIZE,
3156			.maxauthsize = SHA256_DIGEST_SIZE,
3157		},
 
 
 
3158		.caam = {
3159			.class1_alg_type = OP_ALG_ALGSEL_AES |
3160					   OP_ALG_AAI_CTR_MOD128,
3161			.class2_alg_type = OP_ALG_ALGSEL_SHA256 |
3162					   OP_ALG_AAI_HMAC_PRECOMP,
3163			.rfc3686 = true,
3164			.geniv = true,
3165		},
3166	},
3167	{
3168		.aead = {
3169			.base = {
3170				.cra_name = "authenc(hmac(sha384),"
3171					    "rfc3686(ctr(aes)))",
3172				.cra_driver_name = "authenc-hmac-sha384-"
3173						   "rfc3686-ctr-aes-caam",
3174				.cra_blocksize = 1,
3175			},
3176			.setkey = aead_setkey,
3177			.setauthsize = aead_setauthsize,
3178			.encrypt = aead_encrypt,
3179			.decrypt = aead_decrypt,
3180			.ivsize = CTR_RFC3686_IV_SIZE,
3181			.maxauthsize = SHA384_DIGEST_SIZE,
3182		},
 
 
 
3183		.caam = {
3184			.class1_alg_type = OP_ALG_ALGSEL_AES |
3185					   OP_ALG_AAI_CTR_MOD128,
3186			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3187					   OP_ALG_AAI_HMAC_PRECOMP,
3188			.rfc3686 = true,
3189		},
3190	},
3191	{
3192		.aead = {
3193			.base = {
3194				.cra_name = "seqiv(authenc(hmac(sha384),"
3195					    "rfc3686(ctr(aes))))",
3196				.cra_driver_name = "seqiv-authenc-hmac-sha384-"
3197						   "rfc3686-ctr-aes-caam",
3198				.cra_blocksize = 1,
3199			},
3200			.setkey = aead_setkey,
3201			.setauthsize = aead_setauthsize,
3202			.encrypt = aead_encrypt,
3203			.decrypt = aead_decrypt,
3204			.ivsize = CTR_RFC3686_IV_SIZE,
3205			.maxauthsize = SHA384_DIGEST_SIZE,
3206		},
 
 
 
3207		.caam = {
3208			.class1_alg_type = OP_ALG_ALGSEL_AES |
3209					   OP_ALG_AAI_CTR_MOD128,
3210			.class2_alg_type = OP_ALG_ALGSEL_SHA384 |
3211					   OP_ALG_AAI_HMAC_PRECOMP,
3212			.rfc3686 = true,
3213			.geniv = true,
3214		},
3215	},
3216	{
3217		.aead = {
3218			.base = {
3219				.cra_name = "authenc(hmac(sha512),"
3220					    "rfc3686(ctr(aes)))",
3221				.cra_driver_name = "authenc-hmac-sha512-"
3222						   "rfc3686-ctr-aes-caam",
3223				.cra_blocksize = 1,
3224			},
3225			.setkey = aead_setkey,
3226			.setauthsize = aead_setauthsize,
3227			.encrypt = aead_encrypt,
3228			.decrypt = aead_decrypt,
3229			.ivsize = CTR_RFC3686_IV_SIZE,
3230			.maxauthsize = SHA512_DIGEST_SIZE,
3231		},
 
 
 
3232		.caam = {
3233			.class1_alg_type = OP_ALG_ALGSEL_AES |
3234					   OP_ALG_AAI_CTR_MOD128,
3235			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3236					   OP_ALG_AAI_HMAC_PRECOMP,
3237			.rfc3686 = true,
3238		},
3239	},
3240	{
3241		.aead = {
3242			.base = {
3243				.cra_name = "seqiv(authenc(hmac(sha512),"
3244					    "rfc3686(ctr(aes))))",
3245				.cra_driver_name = "seqiv-authenc-hmac-sha512-"
3246						   "rfc3686-ctr-aes-caam",
3247				.cra_blocksize = 1,
3248			},
3249			.setkey = aead_setkey,
3250			.setauthsize = aead_setauthsize,
3251			.encrypt = aead_encrypt,
3252			.decrypt = aead_decrypt,
3253			.ivsize = CTR_RFC3686_IV_SIZE,
3254			.maxauthsize = SHA512_DIGEST_SIZE,
3255		},
 
 
 
3256		.caam = {
3257			.class1_alg_type = OP_ALG_ALGSEL_AES |
3258					   OP_ALG_AAI_CTR_MOD128,
3259			.class2_alg_type = OP_ALG_ALGSEL_SHA512 |
3260					   OP_ALG_AAI_HMAC_PRECOMP,
3261			.rfc3686 = true,
3262			.geniv = true,
3263		},
3264	},
3265};
3266
3267struct caam_crypto_alg {
3268	struct crypto_alg crypto_alg;
3269	struct list_head entry;
3270	struct caam_alg_entry caam;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3271};
3272
3273static int caam_init_common(struct caam_ctx *ctx, struct caam_alg_entry *caam,
3274			    bool uses_dkp)
3275{
3276	dma_addr_t dma_addr;
3277	struct caam_drv_private *priv;
 
 
3278
3279	ctx->jrdev = caam_jr_alloc();
3280	if (IS_ERR(ctx->jrdev)) {
3281		pr_err("Job Ring Device allocation for transform failed\n");
3282		return PTR_ERR(ctx->jrdev);
3283	}
3284
3285	priv = dev_get_drvdata(ctx->jrdev->parent);
3286	if (priv->era >= 6 && uses_dkp)
3287		ctx->dir = DMA_BIDIRECTIONAL;
3288	else
3289		ctx->dir = DMA_TO_DEVICE;
3290
3291	dma_addr = dma_map_single_attrs(ctx->jrdev, ctx->sh_desc_enc,
3292					offsetof(struct caam_ctx,
3293						 sh_desc_enc_dma),
 
3294					ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3295	if (dma_mapping_error(ctx->jrdev, dma_addr)) {
3296		dev_err(ctx->jrdev, "unable to map key, shared descriptors\n");
3297		caam_jr_free(ctx->jrdev);
3298		return -ENOMEM;
3299	}
3300
3301	ctx->sh_desc_enc_dma = dma_addr;
3302	ctx->sh_desc_dec_dma = dma_addr + offsetof(struct caam_ctx,
3303						   sh_desc_dec);
3304	ctx->sh_desc_givenc_dma = dma_addr + offsetof(struct caam_ctx,
3305						      sh_desc_givenc);
3306	ctx->key_dma = dma_addr + offsetof(struct caam_ctx, key);
3307
3308	/* copy descriptor header template value */
3309	ctx->cdata.algtype = OP_TYPE_CLASS1_ALG | caam->class1_alg_type;
3310	ctx->adata.algtype = OP_TYPE_CLASS2_ALG | caam->class2_alg_type;
3311
3312	return 0;
3313}
3314
3315static int caam_cra_init(struct crypto_tfm *tfm)
3316{
3317	struct crypto_alg *alg = tfm->__crt_alg;
3318	struct caam_crypto_alg *caam_alg =
3319		 container_of(alg, struct caam_crypto_alg, crypto_alg);
3320	struct caam_ctx *ctx = crypto_tfm_ctx(tfm);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3321
3322	return caam_init_common(ctx, &caam_alg->caam, false);
 
 
 
 
 
 
 
 
 
 
 
3323}
3324
3325static int caam_aead_init(struct crypto_aead *tfm)
3326{
3327	struct aead_alg *alg = crypto_aead_alg(tfm);
3328	struct caam_aead_alg *caam_alg =
3329		 container_of(alg, struct caam_aead_alg, aead);
3330	struct caam_ctx *ctx = crypto_aead_ctx(tfm);
3331
3332	return caam_init_common(ctx, &caam_alg->caam,
3333				alg->setkey == aead_setkey);
 
3334}
3335
3336static void caam_exit_common(struct caam_ctx *ctx)
3337{
3338	dma_unmap_single_attrs(ctx->jrdev, ctx->sh_desc_enc_dma,
3339			       offsetof(struct caam_ctx, sh_desc_enc_dma),
 
3340			       ctx->dir, DMA_ATTR_SKIP_CPU_SYNC);
3341	caam_jr_free(ctx->jrdev);
3342}
3343
3344static void caam_cra_exit(struct crypto_tfm *tfm)
3345{
3346	caam_exit_common(crypto_tfm_ctx(tfm));
 
 
 
 
3347}
3348
3349static void caam_aead_exit(struct crypto_aead *tfm)
3350{
3351	caam_exit_common(crypto_aead_ctx(tfm));
3352}
3353
3354static void __exit caam_algapi_exit(void)
3355{
3356
3357	struct caam_crypto_alg *t_alg, *n;
3358	int i;
3359
3360	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3361		struct caam_aead_alg *t_alg = driver_aeads + i;
3362
3363		if (t_alg->registered)
3364			crypto_unregister_aead(&t_alg->aead);
3365	}
3366
3367	if (!alg_list.next)
3368		return;
3369
3370	list_for_each_entry_safe(t_alg, n, &alg_list, entry) {
3371		crypto_unregister_alg(&t_alg->crypto_alg);
3372		list_del(&t_alg->entry);
3373		kfree(t_alg);
3374	}
3375}
3376
3377static struct caam_crypto_alg *caam_alg_alloc(struct caam_alg_template
3378					      *template)
3379{
3380	struct caam_crypto_alg *t_alg;
3381	struct crypto_alg *alg;
3382
3383	t_alg = kzalloc(sizeof(*t_alg), GFP_KERNEL);
3384	if (!t_alg) {
3385		pr_err("failed to allocate t_alg\n");
3386		return ERR_PTR(-ENOMEM);
3387	}
3388
3389	alg = &t_alg->crypto_alg;
3390
3391	snprintf(alg->cra_name, CRYPTO_MAX_ALG_NAME, "%s", template->name);
3392	snprintf(alg->cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s",
3393		 template->driver_name);
3394	alg->cra_module = THIS_MODULE;
3395	alg->cra_init = caam_cra_init;
3396	alg->cra_exit = caam_cra_exit;
3397	alg->cra_priority = CAAM_CRA_PRIORITY;
3398	alg->cra_blocksize = template->blocksize;
3399	alg->cra_alignmask = 0;
3400	alg->cra_ctxsize = sizeof(struct caam_ctx);
3401	alg->cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY |
3402			 template->type;
3403	switch (template->type) {
3404	case CRYPTO_ALG_TYPE_GIVCIPHER:
3405		alg->cra_type = &crypto_givcipher_type;
3406		alg->cra_ablkcipher = template->template_ablkcipher;
3407		break;
3408	case CRYPTO_ALG_TYPE_ABLKCIPHER:
3409		alg->cra_type = &crypto_ablkcipher_type;
3410		alg->cra_ablkcipher = template->template_ablkcipher;
3411		break;
3412	}
3413
3414	t_alg->caam.class1_alg_type = template->class1_alg_type;
3415	t_alg->caam.class2_alg_type = template->class2_alg_type;
3416
3417	return t_alg;
 
3418}
3419
3420static void caam_aead_alg_init(struct caam_aead_alg *t_alg)
3421{
3422	struct aead_alg *alg = &t_alg->aead;
3423
3424	alg->base.cra_module = THIS_MODULE;
3425	alg->base.cra_priority = CAAM_CRA_PRIORITY;
3426	alg->base.cra_ctxsize = sizeof(struct caam_ctx);
3427	alg->base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY;
 
3428
3429	alg->init = caam_aead_init;
3430	alg->exit = caam_aead_exit;
3431}
3432
3433static int __init caam_algapi_init(void)
3434{
3435	struct device_node *dev_node;
3436	struct platform_device *pdev;
3437	struct device *ctrldev;
3438	struct caam_drv_private *priv;
3439	int i = 0, err = 0;
3440	u32 cha_vid, cha_inst, des_inst, aes_inst, md_inst;
3441	unsigned int md_limit = SHA512_DIGEST_SIZE;
3442	bool registered = false;
3443
3444	dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec-v4.0");
3445	if (!dev_node) {
3446		dev_node = of_find_compatible_node(NULL, NULL, "fsl,sec4.0");
3447		if (!dev_node)
3448			return -ENODEV;
3449	}
3450
3451	pdev = of_find_device_by_node(dev_node);
3452	if (!pdev) {
3453		of_node_put(dev_node);
3454		return -ENODEV;
3455	}
3456
3457	ctrldev = &pdev->dev;
3458	priv = dev_get_drvdata(ctrldev);
3459	of_node_put(dev_node);
3460
3461	/*
3462	 * If priv is NULL, it's probably because the caam driver wasn't
3463	 * properly initialized (e.g. RNG4 init failed). Thus, bail out here.
3464	 */
3465	if (!priv)
3466		return -ENODEV;
3467
3468
3469	INIT_LIST_HEAD(&alg_list);
3470
3471	/*
3472	 * Register crypto algorithms the device supports.
3473	 * First, detect presence and attributes of DES, AES, and MD blocks.
3474	 */
3475	cha_vid = rd_reg32(&priv->ctrl->perfmon.cha_id_ls);
3476	cha_inst = rd_reg32(&priv->ctrl->perfmon.cha_num_ls);
3477	des_inst = (cha_inst & CHA_ID_LS_DES_MASK) >> CHA_ID_LS_DES_SHIFT;
3478	aes_inst = (cha_inst & CHA_ID_LS_AES_MASK) >> CHA_ID_LS_AES_SHIFT;
3479	md_inst = (cha_inst & CHA_ID_LS_MD_MASK) >> CHA_ID_LS_MD_SHIFT;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3480
3481	/* If MD is present, limit digest size based on LP256 */
3482	if (md_inst && ((cha_vid & CHA_ID_LS_MD_MASK) == CHA_ID_LS_MD_LP256))
3483		md_limit = SHA256_DIGEST_SIZE;
3484
3485	for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3486		struct caam_crypto_alg *t_alg;
3487		struct caam_alg_template *alg = driver_algs + i;
3488		u32 alg_sel = alg->class1_alg_type & OP_ALG_ALGSEL_MASK;
3489
3490		/* Skip DES algorithms if not supported by device */
3491		if (!des_inst &&
3492		    ((alg_sel == OP_ALG_ALGSEL_3DES) ||
3493		     (alg_sel == OP_ALG_ALGSEL_DES)))
3494				continue;
3495
3496		/* Skip AES algorithms if not supported by device */
3497		if (!aes_inst && (alg_sel == OP_ALG_ALGSEL_AES))
3498				continue;
3499
3500		/*
3501		 * Check support for AES modes not available
3502		 * on LP devices.
3503		 */
3504		if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3505			if ((alg->class1_alg_type & OP_ALG_AAI_MASK) ==
3506			     OP_ALG_AAI_XTS)
3507				continue;
3508
3509		t_alg = caam_alg_alloc(alg);
3510		if (IS_ERR(t_alg)) {
3511			err = PTR_ERR(t_alg);
3512			pr_warn("%s alg allocation failed\n", alg->driver_name);
3513			continue;
3514		}
3515
3516		err = crypto_register_alg(&t_alg->crypto_alg);
3517		if (err) {
3518			pr_warn("%s alg registration failed\n",
3519				t_alg->crypto_alg.cra_driver_name);
3520			kfree(t_alg);
3521			continue;
3522		}
3523
3524		list_add_tail(&t_alg->entry, &alg_list);
3525		registered = true;
3526	}
3527
3528	for (i = 0; i < ARRAY_SIZE(driver_aeads); i++) {
3529		struct caam_aead_alg *t_alg = driver_aeads + i;
3530		u32 c1_alg_sel = t_alg->caam.class1_alg_type &
3531				 OP_ALG_ALGSEL_MASK;
3532		u32 c2_alg_sel = t_alg->caam.class2_alg_type &
3533				 OP_ALG_ALGSEL_MASK;
3534		u32 alg_aai = t_alg->caam.class1_alg_type & OP_ALG_AAI_MASK;
3535
3536		/* Skip DES algorithms if not supported by device */
3537		if (!des_inst &&
3538		    ((c1_alg_sel == OP_ALG_ALGSEL_3DES) ||
3539		     (c1_alg_sel == OP_ALG_ALGSEL_DES)))
3540				continue;
3541
3542		/* Skip AES algorithms if not supported by device */
3543		if (!aes_inst && (c1_alg_sel == OP_ALG_ALGSEL_AES))
3544				continue;
3545
3546		/*
3547		 * Check support for AES algorithms not available
3548		 * on LP devices.
3549		 */
3550		if ((cha_vid & CHA_ID_LS_AES_MASK) == CHA_ID_LS_AES_LP)
3551			if (alg_aai == OP_ALG_AAI_GCM)
3552				continue;
 
 
 
 
 
3553
3554		/*
3555		 * Skip algorithms requiring message digests
3556		 * if MD or MD size is not supported by device.
3557		 */
3558		if (c2_alg_sel &&
3559		    (!md_inst || (t_alg->aead.maxauthsize > md_limit)))
3560				continue;
3561
3562		caam_aead_alg_init(t_alg);
3563
3564		err = crypto_register_aead(&t_alg->aead);
3565		if (err) {
3566			pr_warn("%s alg registration failed\n",
3567				t_alg->aead.base.cra_driver_name);
3568			continue;
3569		}
3570
3571		t_alg->registered = true;
3572		registered = true;
3573	}
3574
3575	if (registered)
3576		pr_info("caam algorithms registered in /proc/crypto\n");
3577
3578	return err;
3579}
3580
3581module_init(caam_algapi_init);
3582module_exit(caam_algapi_exit);
3583
3584MODULE_LICENSE("GPL");
3585MODULE_DESCRIPTION("FSL CAAM support for crypto API");
3586MODULE_AUTHOR("Freescale Semiconductor - NMG/STC");