Linux Audio

Check our new training course

Loading...
v6.8
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (C) 2017 Marvell
   4 *
   5 * Antoine Tenart <antoine.tenart@free-electrons.com>
   6 */
   7
   8#include <crypto/aes.h>
   9#include <crypto/hmac.h>
  10#include <crypto/md5.h>
  11#include <crypto/sha1.h>
  12#include <crypto/sha2.h>
  13#include <crypto/sha3.h>
  14#include <crypto/skcipher.h>
  15#include <crypto/sm3.h>
  16#include <crypto/internal/cipher.h>
  17#include <linux/device.h>
  18#include <linux/dma-mapping.h>
  19#include <linux/dmapool.h>
  20
  21#include "safexcel.h"
  22
  23struct safexcel_ahash_ctx {
  24	struct safexcel_context base;
  25
  26	u32 alg;
  27	u8  key_sz;
  28	bool cbcmac;
  29	bool do_fallback;
  30	bool fb_init_done;
  31	bool fb_do_setkey;
  32
  33	struct crypto_aes_ctx *aes;
  34	struct crypto_ahash *fback;
  35	struct crypto_shash *shpre;
  36	struct shash_desc *shdesc;
  37};
  38
  39struct safexcel_ahash_req {
  40	bool last_req;
  41	bool finish;
  42	bool hmac;
  43	bool needs_inv;
  44	bool hmac_zlen;
  45	bool len_is_le;
  46	bool not_first;
  47	bool xcbcmac;
  48
  49	int nents;
  50	dma_addr_t result_dma;
  51
  52	u32 digest;
  53
  54	u8 state_sz;    /* expected state size, only set once */
  55	u8 block_sz;    /* block size, only set once */
  56	u8 digest_sz;   /* output digest size, only set once */
  57	__le32 state[SHA3_512_BLOCK_SIZE /
  58		     sizeof(__le32)] __aligned(sizeof(__le32));
  59
  60	u64 len;
  61	u64 processed;
  62
  63	u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
  64	dma_addr_t cache_dma;
  65	unsigned int cache_sz;
  66
  67	u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
  68};
  69
  70static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
  71{
  72	return req->len - req->processed;
  73}
  74
  75static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
  76				u32 input_length, u32 result_length,
  77				bool cbcmac)
  78{
  79	struct safexcel_token *token =
  80		(struct safexcel_token *)cdesc->control_data.token;
  81
  82	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  83	token[0].packet_length = input_length;
  84	token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
  85
  86	input_length &= 15;
  87	if (unlikely(cbcmac && input_length)) {
  88		token[0].stat =  0;
  89		token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
  90		token[1].packet_length = 16 - input_length;
  91		token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
  92		token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
  93	} else {
  94		token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
  95		eip197_noop_token(&token[1]);
  96	}
  97
  98	token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
  99	token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
 100			EIP197_TOKEN_STAT_LAST_PACKET;
 101	token[2].packet_length = result_length;
 102	token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 103				EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 104
 105	eip197_noop_token(&token[3]);
 106}
 107
 108static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
 109				     struct safexcel_ahash_req *req,
 110				     struct safexcel_command_desc *cdesc)
 111{
 112	struct safexcel_crypto_priv *priv = ctx->base.priv;
 113	u64 count = 0;
 114
 115	cdesc->control_data.control0 = ctx->alg;
 116	cdesc->control_data.control1 = 0;
 117
 118	/*
 119	 * Copy the input digest if needed, and setup the context
 120	 * fields. Do this now as we need it to setup the first command
 121	 * descriptor.
 122	 */
 123	if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
 124		if (req->xcbcmac)
 125			memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
 126		else
 127			memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
 128
 129		if (!req->finish && req->xcbcmac)
 130			cdesc->control_data.control0 |=
 131				CONTEXT_CONTROL_DIGEST_XCM |
 132				CONTEXT_CONTROL_TYPE_HASH_OUT  |
 133				CONTEXT_CONTROL_NO_FINISH_HASH |
 134				CONTEXT_CONTROL_SIZE(req->state_sz /
 135						     sizeof(u32));
 136		else
 137			cdesc->control_data.control0 |=
 138				CONTEXT_CONTROL_DIGEST_XCM |
 139				CONTEXT_CONTROL_TYPE_HASH_OUT  |
 140				CONTEXT_CONTROL_SIZE(req->state_sz /
 141						     sizeof(u32));
 142		return;
 143	} else if (!req->processed) {
 144		/* First - and possibly only - block of basic hash only */
 145		if (req->finish)
 146			cdesc->control_data.control0 |= req->digest |
 147				CONTEXT_CONTROL_TYPE_HASH_OUT |
 148				CONTEXT_CONTROL_RESTART_HASH  |
 149				/* ensure its not 0! */
 150				CONTEXT_CONTROL_SIZE(1);
 151		else
 152			cdesc->control_data.control0 |= req->digest |
 153				CONTEXT_CONTROL_TYPE_HASH_OUT  |
 154				CONTEXT_CONTROL_RESTART_HASH   |
 155				CONTEXT_CONTROL_NO_FINISH_HASH |
 156				/* ensure its not 0! */
 157				CONTEXT_CONTROL_SIZE(1);
 158		return;
 159	}
 160
 161	/* Hash continuation or HMAC, setup (inner) digest from state */
 162	memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
 163
 164	if (req->finish) {
 165		/* Compute digest count for hash/HMAC finish operations */
 166		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
 167		    req->hmac_zlen || (req->processed != req->block_sz)) {
 168			count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
 169
 170			/* This is a hardware limitation, as the
 171			 * counter must fit into an u32. This represents
 172			 * a fairly big amount of input data, so we
 173			 * shouldn't see this.
 174			 */
 175			if (unlikely(count & 0xffffffff00000000ULL)) {
 176				dev_warn(priv->dev,
 177					 "Input data is too big\n");
 178				return;
 179			}
 180		}
 181
 182		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
 183		    /* Special case: zero length HMAC */
 184		    req->hmac_zlen ||
 185		    /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
 186		    (req->processed != req->block_sz)) {
 187			/* Basic hash continue operation, need digest + cnt */
 188			cdesc->control_data.control0 |=
 189				CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
 190				CONTEXT_CONTROL_TYPE_HASH_OUT |
 191				CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 192			/* For zero-len HMAC, don't finalize, already padded! */
 193			if (req->hmac_zlen)
 194				cdesc->control_data.control0 |=
 195					CONTEXT_CONTROL_NO_FINISH_HASH;
 196			cdesc->control_data.control1 |=
 197				CONTEXT_CONTROL_DIGEST_CNT;
 198			ctx->base.ctxr->data[req->state_sz >> 2] =
 199				cpu_to_le32(count);
 200			req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 201
 202			/* Clear zero-length HMAC flag for next operation! */
 203			req->hmac_zlen = false;
 204		} else { /* HMAC */
 205			/* Need outer digest for HMAC finalization */
 206			memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
 207			       &ctx->base.opad, req->state_sz);
 208
 209			/* Single pass HMAC - no digest count */
 210			cdesc->control_data.control0 |=
 211				CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
 212				CONTEXT_CONTROL_TYPE_HASH_OUT |
 213				CONTEXT_CONTROL_DIGEST_HMAC;
 214		}
 215	} else { /* Hash continuation, do not finish yet */
 216		cdesc->control_data.control0 |=
 217			CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
 218			CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
 219			CONTEXT_CONTROL_TYPE_HASH_OUT |
 220			CONTEXT_CONTROL_NO_FINISH_HASH;
 221	}
 222}
 223
 224static int safexcel_ahash_enqueue(struct ahash_request *areq);
 225
 226static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
 227				      int ring,
 228				      struct crypto_async_request *async,
 229				      bool *should_complete, int *ret)
 230{
 231	struct safexcel_result_desc *rdesc;
 232	struct ahash_request *areq = ahash_request_cast(async);
 233	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
 234	struct safexcel_ahash_req *sreq = ahash_request_ctx_dma(areq);
 235	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
 236	u64 cache_len;
 237
 238	*ret = 0;
 239
 240	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 241	if (IS_ERR(rdesc)) {
 242		dev_err(priv->dev,
 243			"hash: result: could not retrieve the result descriptor\n");
 244		*ret = PTR_ERR(rdesc);
 245	} else {
 246		*ret = safexcel_rdesc_check_errors(priv, rdesc);
 247	}
 248
 249	safexcel_complete(priv, ring);
 250
 251	if (sreq->nents) {
 252		dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
 253		sreq->nents = 0;
 254	}
 255
 256	if (sreq->result_dma) {
 257		dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
 258				 DMA_FROM_DEVICE);
 259		sreq->result_dma = 0;
 260	}
 261
 262	if (sreq->cache_dma) {
 263		dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
 264				 DMA_TO_DEVICE);
 265		sreq->cache_dma = 0;
 266		sreq->cache_sz = 0;
 267	}
 268
 269	if (sreq->finish) {
 270		if (sreq->hmac &&
 271		    (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
 272			/* Faking HMAC using hash - need to do outer hash */
 273			memcpy(sreq->cache, sreq->state,
 274			       crypto_ahash_digestsize(ahash));
 275
 276			memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
 277
 278			sreq->len = sreq->block_sz +
 279				    crypto_ahash_digestsize(ahash);
 280			sreq->processed = sreq->block_sz;
 281			sreq->hmac = 0;
 282
 283			if (priv->flags & EIP197_TRC_CACHE)
 284				ctx->base.needs_inv = true;
 285			areq->nbytes = 0;
 286			safexcel_ahash_enqueue(areq);
 287
 288			*should_complete = false; /* Not done yet */
 289			return 1;
 290		}
 291
 292		if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
 293			     ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
 294			/* Undo final XOR with 0xffffffff ...*/
 295			*(__le32 *)areq->result = ~sreq->state[0];
 296		} else {
 297			memcpy(areq->result, sreq->state,
 298			       crypto_ahash_digestsize(ahash));
 299		}
 300	}
 301
 302	cache_len = safexcel_queued_len(sreq);
 303	if (cache_len)
 304		memcpy(sreq->cache, sreq->cache_next, cache_len);
 305
 306	*should_complete = true;
 307
 308	return 1;
 309}
 310
 311static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
 312				   int *commands, int *results)
 313{
 314	struct ahash_request *areq = ahash_request_cast(async);
 315	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 316	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 317	struct safexcel_crypto_priv *priv = ctx->base.priv;
 318	struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
 319	struct safexcel_result_desc *rdesc;
 320	struct scatterlist *sg;
 321	struct safexcel_token *dmmy;
 322	int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
 323	u64 queued, len;
 324
 325	queued = safexcel_queued_len(req);
 326	if (queued <= HASH_CACHE_SIZE)
 327		cache_len = queued;
 328	else
 329		cache_len = queued - areq->nbytes;
 330
 331	if (!req->finish && !req->last_req) {
 332		/* If this is not the last request and the queued data does not
 333		 * fit into full cache blocks, cache it for the next send call.
 334		 */
 335		extra = queued & (HASH_CACHE_SIZE - 1);
 336
 337		/* If this is not the last request and the queued data
 338		 * is a multiple of a block, cache the last one for now.
 339		 */
 340		if (!extra)
 341			extra = HASH_CACHE_SIZE;
 342
 343		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
 344				   req->cache_next, extra,
 345				   areq->nbytes - extra);
 346
 347		queued -= extra;
 348
 349		if (!queued) {
 350			*commands = 0;
 351			*results = 0;
 352			return 0;
 353		}
 354
 355		extra = 0;
 356	}
 357
 358	if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
 359		if (unlikely(cache_len < AES_BLOCK_SIZE)) {
 360			/*
 361			 * Cache contains less than 1 full block, complete.
 362			 */
 363			extra = AES_BLOCK_SIZE - cache_len;
 364			if (queued > cache_len) {
 365				/* More data follows: borrow bytes */
 366				u64 tmp = queued - cache_len;
 367
 368				skip = min_t(u64, tmp, extra);
 369				sg_pcopy_to_buffer(areq->src,
 370					sg_nents(areq->src),
 371					req->cache + cache_len,
 372					skip, 0);
 373			}
 374			extra -= skip;
 375			memset(req->cache + cache_len + skip, 0, extra);
 376			if (!ctx->cbcmac && extra) {
 377				// 10- padding for XCBCMAC & CMAC
 378				req->cache[cache_len + skip] = 0x80;
 379				// HW will use K2 iso K3 - compensate!
 380				for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
 381					u32 *cache = (void *)req->cache;
 382					u32 *ipad = ctx->base.ipad.word;
 383					u32 x;
 384
 385					x = ipad[i] ^ ipad[i + 4];
 386					cache[i] ^= swab32(x);
 387				}
 388			}
 389			cache_len = AES_BLOCK_SIZE;
 390			queued = queued + extra;
 391		}
 392
 393		/* XCBC continue: XOR previous result into 1st word */
 394		crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
 395	}
 396
 397	len = queued;
 398	/* Add a command descriptor for the cached data, if any */
 399	if (cache_len) {
 400		req->cache_dma = dma_map_single(priv->dev, req->cache,
 401						cache_len, DMA_TO_DEVICE);
 402		if (dma_mapping_error(priv->dev, req->cache_dma))
 403			return -EINVAL;
 404
 405		req->cache_sz = cache_len;
 406		first_cdesc = safexcel_add_cdesc(priv, ring, 1,
 407						 (cache_len == len),
 408						 req->cache_dma, cache_len,
 409						 len, ctx->base.ctxr_dma,
 410						 &dmmy);
 411		if (IS_ERR(first_cdesc)) {
 412			ret = PTR_ERR(first_cdesc);
 413			goto unmap_cache;
 414		}
 415		n_cdesc++;
 416
 417		queued -= cache_len;
 418		if (!queued)
 419			goto send_command;
 420	}
 421
 422	/* Now handle the current ahash request buffer(s) */
 423	req->nents = dma_map_sg(priv->dev, areq->src,
 424				sg_nents_for_len(areq->src,
 425						 areq->nbytes),
 426				DMA_TO_DEVICE);
 427	if (!req->nents) {
 428		ret = -ENOMEM;
 429		goto cdesc_rollback;
 430	}
 431
 432	for_each_sg(areq->src, sg, req->nents, i) {
 433		int sglen = sg_dma_len(sg);
 434
 435		if (unlikely(sglen <= skip)) {
 436			skip -= sglen;
 437			continue;
 438		}
 439
 440		/* Do not overflow the request */
 441		if ((queued + skip) <= sglen)
 442			sglen = queued;
 443		else
 444			sglen -= skip;
 445
 446		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
 447					   !(queued - sglen),
 448					   sg_dma_address(sg) + skip, sglen,
 449					   len, ctx->base.ctxr_dma, &dmmy);
 450		if (IS_ERR(cdesc)) {
 451			ret = PTR_ERR(cdesc);
 452			goto unmap_sg;
 453		}
 454
 455		if (!n_cdesc)
 456			first_cdesc = cdesc;
 457		n_cdesc++;
 458
 459		queued -= sglen;
 460		if (!queued)
 461			break;
 462		skip = 0;
 463	}
 464
 465send_command:
 466	/* Setup the context options */
 467	safexcel_context_control(ctx, req, first_cdesc);
 468
 469	/* Add the token */
 470	safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
 471
 472	req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
 473					 DMA_FROM_DEVICE);
 474	if (dma_mapping_error(priv->dev, req->result_dma)) {
 475		ret = -EINVAL;
 476		goto unmap_sg;
 477	}
 478
 479	/* Add a result descriptor */
 480	rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
 481				   req->digest_sz);
 482	if (IS_ERR(rdesc)) {
 483		ret = PTR_ERR(rdesc);
 484		goto unmap_result;
 485	}
 486
 487	safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
 488
 489	req->processed += len - extra;
 490
 491	*commands = n_cdesc;
 492	*results = 1;
 493	return 0;
 494
 495unmap_result:
 496	dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
 497			 DMA_FROM_DEVICE);
 498unmap_sg:
 499	if (req->nents) {
 500		dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
 501		req->nents = 0;
 502	}
 503cdesc_rollback:
 504	for (i = 0; i < n_cdesc; i++)
 505		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
 506unmap_cache:
 507	if (req->cache_dma) {
 508		dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
 509				 DMA_TO_DEVICE);
 510		req->cache_dma = 0;
 511		req->cache_sz = 0;
 512	}
 513
 514	return ret;
 515}
 516
 517static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
 518				      int ring,
 519				      struct crypto_async_request *async,
 520				      bool *should_complete, int *ret)
 521{
 522	struct safexcel_result_desc *rdesc;
 523	struct ahash_request *areq = ahash_request_cast(async);
 524	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
 525	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
 526	int enq_ret;
 527
 528	*ret = 0;
 529
 530	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 531	if (IS_ERR(rdesc)) {
 532		dev_err(priv->dev,
 533			"hash: invalidate: could not retrieve the result descriptor\n");
 534		*ret = PTR_ERR(rdesc);
 535	} else {
 536		*ret = safexcel_rdesc_check_errors(priv, rdesc);
 537	}
 538
 539	safexcel_complete(priv, ring);
 540
 541	if (ctx->base.exit_inv) {
 542		dma_pool_free(priv->context_pool, ctx->base.ctxr,
 543			      ctx->base.ctxr_dma);
 544
 545		*should_complete = true;
 546		return 1;
 547	}
 548
 549	ring = safexcel_select_ring(priv);
 550	ctx->base.ring = ring;
 551
 552	spin_lock_bh(&priv->ring[ring].queue_lock);
 553	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
 554	spin_unlock_bh(&priv->ring[ring].queue_lock);
 555
 556	if (enq_ret != -EINPROGRESS)
 557		*ret = enq_ret;
 558
 559	queue_work(priv->ring[ring].workqueue,
 560		   &priv->ring[ring].work_data.work);
 561
 562	*should_complete = false;
 563
 564	return 1;
 565}
 566
 567static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
 568				  struct crypto_async_request *async,
 569				  bool *should_complete, int *ret)
 570{
 571	struct ahash_request *areq = ahash_request_cast(async);
 572	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 573	int err;
 574
 575	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
 576
 577	if (req->needs_inv) {
 578		req->needs_inv = false;
 579		err = safexcel_handle_inv_result(priv, ring, async,
 580						 should_complete, ret);
 581	} else {
 582		err = safexcel_handle_req_result(priv, ring, async,
 583						 should_complete, ret);
 584	}
 585
 586	return err;
 587}
 588
 589static int safexcel_ahash_send_inv(struct crypto_async_request *async,
 590				   int ring, int *commands, int *results)
 591{
 592	struct ahash_request *areq = ahash_request_cast(async);
 593	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 594	int ret;
 595
 596	ret = safexcel_invalidate_cache(async, ctx->base.priv,
 597					ctx->base.ctxr_dma, ring);
 598	if (unlikely(ret))
 599		return ret;
 600
 601	*commands = 1;
 602	*results = 1;
 603
 604	return 0;
 605}
 606
 607static int safexcel_ahash_send(struct crypto_async_request *async,
 608			       int ring, int *commands, int *results)
 609{
 610	struct ahash_request *areq = ahash_request_cast(async);
 611	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 612	int ret;
 613
 614	if (req->needs_inv)
 615		ret = safexcel_ahash_send_inv(async, ring, commands, results);
 616	else
 617		ret = safexcel_ahash_send_req(async, ring, commands, results);
 618
 619	return ret;
 620}
 621
 622static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
 623{
 624	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 625	struct safexcel_crypto_priv *priv = ctx->base.priv;
 626	EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
 627	struct safexcel_ahash_req *rctx = ahash_request_ctx_dma(req);
 628	DECLARE_CRYPTO_WAIT(result);
 629	int ring = ctx->base.ring;
 630	int err;
 631
 632	memset(req, 0, EIP197_AHASH_REQ_SIZE);
 633
 634	/* create invalidation request */
 635	init_completion(&result.completion);
 636	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 637				   crypto_req_done, &result);
 638
 639	ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
 640	ctx = crypto_tfm_ctx(req->base.tfm);
 641	ctx->base.exit_inv = true;
 642	rctx->needs_inv = true;
 643
 644	spin_lock_bh(&priv->ring[ring].queue_lock);
 645	crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
 646	spin_unlock_bh(&priv->ring[ring].queue_lock);
 647
 648	queue_work(priv->ring[ring].workqueue,
 649		   &priv->ring[ring].work_data.work);
 650
 651	err = crypto_wait_req(-EINPROGRESS, &result);
 652
 653	if (err) {
 654		dev_warn(priv->dev, "hash: completion error (%d)\n", err);
 655		return err;
 
 656	}
 657
 658	return 0;
 659}
 660
 661/* safexcel_ahash_cache: cache data until at least one request can be sent to
 662 * the engine, aka. when there is at least 1 block size in the pipe.
 663 */
 664static int safexcel_ahash_cache(struct ahash_request *areq)
 665{
 666	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 667	u64 cache_len;
 668
 669	/* cache_len: everything accepted by the driver but not sent yet,
 670	 * tot sz handled by update() - last req sz - tot sz handled by send()
 671	 */
 672	cache_len = safexcel_queued_len(req);
 673
 674	/*
 675	 * In case there isn't enough bytes to proceed (less than a
 676	 * block size), cache the data until we have enough.
 677	 */
 678	if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
 679		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
 680				   req->cache + cache_len,
 681				   areq->nbytes, 0);
 682		return 0;
 683	}
 684
 685	/* We couldn't cache all the data */
 686	return -E2BIG;
 687}
 688
 689static int safexcel_ahash_enqueue(struct ahash_request *areq)
 690{
 691	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 692	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 693	struct safexcel_crypto_priv *priv = ctx->base.priv;
 694	int ret, ring;
 695
 696	req->needs_inv = false;
 697
 698	if (ctx->base.ctxr) {
 699		if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
 700		     /* invalidate for *any* non-XCBC continuation */
 701		   ((req->not_first && !req->xcbcmac) ||
 702		     /* invalidate if (i)digest changed */
 703		     memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
 704		     /* invalidate for HMAC finish with odigest changed */
 705		     (req->finish && req->hmac &&
 706		      memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
 707			     &ctx->base.opad, req->state_sz))))
 708			/*
 709			 * We're still setting needs_inv here, even though it is
 710			 * cleared right away, because the needs_inv flag can be
 711			 * set in other functions and we want to keep the same
 712			 * logic.
 713			 */
 714			ctx->base.needs_inv = true;
 715
 716		if (ctx->base.needs_inv) {
 717			ctx->base.needs_inv = false;
 718			req->needs_inv = true;
 719		}
 720	} else {
 721		ctx->base.ring = safexcel_select_ring(priv);
 722		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
 723						 EIP197_GFP_FLAGS(areq->base),
 724						 &ctx->base.ctxr_dma);
 725		if (!ctx->base.ctxr)
 726			return -ENOMEM;
 727	}
 728	req->not_first = true;
 729
 730	ring = ctx->base.ring;
 731
 732	spin_lock_bh(&priv->ring[ring].queue_lock);
 733	ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
 734	spin_unlock_bh(&priv->ring[ring].queue_lock);
 735
 736	queue_work(priv->ring[ring].workqueue,
 737		   &priv->ring[ring].work_data.work);
 738
 739	return ret;
 740}
 741
 742static int safexcel_ahash_update(struct ahash_request *areq)
 743{
 744	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 745	int ret;
 746
 747	/* If the request is 0 length, do nothing */
 748	if (!areq->nbytes)
 749		return 0;
 750
 751	/* Add request to the cache if it fits */
 752	ret = safexcel_ahash_cache(areq);
 753
 754	/* Update total request length */
 755	req->len += areq->nbytes;
 756
 757	/* If not all data could fit into the cache, go process the excess.
 758	 * Also go process immediately for an HMAC IV precompute, which
 759	 * will never be finished at all, but needs to be processed anyway.
 760	 */
 761	if ((ret && !req->finish) || req->last_req)
 762		return safexcel_ahash_enqueue(areq);
 763
 764	return 0;
 765}
 766
 767static int safexcel_ahash_final(struct ahash_request *areq)
 768{
 769	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 770	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 771
 772	req->finish = true;
 773
 774	if (unlikely(!req->len && !areq->nbytes)) {
 775		/*
 776		 * If we have an overall 0 length *hash* request:
 777		 * The HW cannot do 0 length hash, so we provide the correct
 778		 * result directly here.
 779		 */
 780		if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
 781			memcpy(areq->result, md5_zero_message_hash,
 782			       MD5_DIGEST_SIZE);
 783		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
 784			memcpy(areq->result, sha1_zero_message_hash,
 785			       SHA1_DIGEST_SIZE);
 786		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
 787			memcpy(areq->result, sha224_zero_message_hash,
 788			       SHA224_DIGEST_SIZE);
 789		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
 790			memcpy(areq->result, sha256_zero_message_hash,
 791			       SHA256_DIGEST_SIZE);
 792		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
 793			memcpy(areq->result, sha384_zero_message_hash,
 794			       SHA384_DIGEST_SIZE);
 795		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
 796			memcpy(areq->result, sha512_zero_message_hash,
 797			       SHA512_DIGEST_SIZE);
 798		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
 799			memcpy(areq->result,
 800			       EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
 801		}
 802
 803		return 0;
 804	} else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
 805			    ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
 806			    req->len == sizeof(u32) && !areq->nbytes)) {
 807		/* Zero length CRC32 */
 808		memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
 809		return 0;
 810	} else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
 811			    !areq->nbytes)) {
 812		/* Zero length CBC MAC */
 813		memset(areq->result, 0, AES_BLOCK_SIZE);
 814		return 0;
 815	} else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
 816			    !areq->nbytes)) {
 817		/* Zero length (X)CBC/CMAC */
 818		int i;
 819
 820		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
 821			u32 *result = (void *)areq->result;
 822
 823			/* K3 */
 824			result[i] = swab32(ctx->base.ipad.word[i + 4]);
 825		}
 826		areq->result[0] ^= 0x80;			// 10- padding
 827		aes_encrypt(ctx->aes, areq->result, areq->result);
 828		return 0;
 829	} else if (unlikely(req->hmac &&
 830			    (req->len == req->block_sz) &&
 831			    !areq->nbytes)) {
 832		/*
 833		 * If we have an overall 0 length *HMAC* request:
 834		 * For HMAC, we need to finalize the inner digest
 835		 * and then perform the outer hash.
 836		 */
 837
 838		/* generate pad block in the cache */
 839		/* start with a hash block of all zeroes */
 840		memset(req->cache, 0, req->block_sz);
 841		/* set the first byte to 0x80 to 'append a 1 bit' */
 842		req->cache[0] = 0x80;
 843		/* add the length in bits in the last 2 bytes */
 844		if (req->len_is_le) {
 845			/* Little endian length word (e.g. MD5) */
 846			req->cache[req->block_sz-8] = (req->block_sz << 3) &
 847						      255;
 848			req->cache[req->block_sz-7] = (req->block_sz >> 5);
 849		} else {
 850			/* Big endian length word (e.g. any SHA) */
 851			req->cache[req->block_sz-2] = (req->block_sz >> 5);
 852			req->cache[req->block_sz-1] = (req->block_sz << 3) &
 853						      255;
 854		}
 855
 856		req->len += req->block_sz; /* plus 1 hash block */
 857
 858		/* Set special zero-length HMAC flag */
 859		req->hmac_zlen = true;
 860
 861		/* Finalize HMAC */
 862		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
 863	} else if (req->hmac) {
 864		/* Finalize HMAC */
 865		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
 866	}
 867
 868	return safexcel_ahash_enqueue(areq);
 869}
 870
 871static int safexcel_ahash_finup(struct ahash_request *areq)
 872{
 873	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 874
 875	req->finish = true;
 876
 877	safexcel_ahash_update(areq);
 878	return safexcel_ahash_final(areq);
 879}
 880
 881static int safexcel_ahash_export(struct ahash_request *areq, void *out)
 882{
 883	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 884	struct safexcel_ahash_export_state *export = out;
 885
 886	export->len = req->len;
 887	export->processed = req->processed;
 888
 889	export->digest = req->digest;
 890
 891	memcpy(export->state, req->state, req->state_sz);
 892	memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
 893
 894	return 0;
 895}
 896
 897static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
 898{
 899	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 900	const struct safexcel_ahash_export_state *export = in;
 901	int ret;
 902
 903	ret = crypto_ahash_init(areq);
 904	if (ret)
 905		return ret;
 906
 907	req->len = export->len;
 908	req->processed = export->processed;
 909
 910	req->digest = export->digest;
 911
 912	memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
 913	memcpy(req->state, export->state, req->state_sz);
 914
 915	return 0;
 916}
 917
 918static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
 919{
 920	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 921	struct safexcel_alg_template *tmpl =
 922		container_of(__crypto_ahash_alg(tfm->__crt_alg),
 923			     struct safexcel_alg_template, alg.ahash);
 924
 925	ctx->base.priv = tmpl->priv;
 926	ctx->base.send = safexcel_ahash_send;
 927	ctx->base.handle_result = safexcel_handle_result;
 928	ctx->fb_do_setkey = false;
 929
 930	crypto_ahash_set_reqsize_dma(__crypto_ahash_cast(tfm),
 931				     sizeof(struct safexcel_ahash_req));
 932	return 0;
 933}
 934
 935static int safexcel_sha1_init(struct ahash_request *areq)
 936{
 937	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 938	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
 939
 940	memset(req, 0, sizeof(*req));
 941
 942	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
 943	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 944	req->state_sz = SHA1_DIGEST_SIZE;
 945	req->digest_sz = SHA1_DIGEST_SIZE;
 946	req->block_sz = SHA1_BLOCK_SIZE;
 947
 948	return 0;
 949}
 950
 951static int safexcel_sha1_digest(struct ahash_request *areq)
 952{
 953	int ret = safexcel_sha1_init(areq);
 954
 955	if (ret)
 956		return ret;
 957
 958	return safexcel_ahash_finup(areq);
 959}
 960
 961static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
 962{
 963	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 964	struct safexcel_crypto_priv *priv = ctx->base.priv;
 965	int ret;
 966
 967	/* context not allocated, skip invalidation */
 968	if (!ctx->base.ctxr)
 969		return;
 970
 971	if (priv->flags & EIP197_TRC_CACHE) {
 972		ret = safexcel_ahash_exit_inv(tfm);
 973		if (ret)
 974			dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
 975	} else {
 976		dma_pool_free(priv->context_pool, ctx->base.ctxr,
 977			      ctx->base.ctxr_dma);
 978	}
 979}
 980
 981struct safexcel_alg_template safexcel_alg_sha1 = {
 982	.type = SAFEXCEL_ALG_TYPE_AHASH,
 983	.algo_mask = SAFEXCEL_ALG_SHA1,
 984	.alg.ahash = {
 985		.init = safexcel_sha1_init,
 986		.update = safexcel_ahash_update,
 987		.final = safexcel_ahash_final,
 988		.finup = safexcel_ahash_finup,
 989		.digest = safexcel_sha1_digest,
 990		.export = safexcel_ahash_export,
 991		.import = safexcel_ahash_import,
 992		.halg = {
 993			.digestsize = SHA1_DIGEST_SIZE,
 994			.statesize = sizeof(struct safexcel_ahash_export_state),
 995			.base = {
 996				.cra_name = "sha1",
 997				.cra_driver_name = "safexcel-sha1",
 998				.cra_priority = SAFEXCEL_CRA_PRIORITY,
 999				.cra_flags = CRYPTO_ALG_ASYNC |
1000					     CRYPTO_ALG_ALLOCATES_MEMORY |
1001					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1002				.cra_blocksize = SHA1_BLOCK_SIZE,
1003				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1004				.cra_init = safexcel_ahash_cra_init,
1005				.cra_exit = safexcel_ahash_cra_exit,
1006				.cra_module = THIS_MODULE,
1007			},
1008		},
1009	},
1010};
1011
1012static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1013{
1014	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1015	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1016
1017	memset(req, 0, sizeof(*req));
1018
1019	/* Start from ipad precompute */
1020	memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1021	/* Already processed the key^ipad part now! */
1022	req->len	= SHA1_BLOCK_SIZE;
1023	req->processed	= SHA1_BLOCK_SIZE;
1024
1025	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1026	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1027	req->state_sz = SHA1_DIGEST_SIZE;
1028	req->digest_sz = SHA1_DIGEST_SIZE;
1029	req->block_sz = SHA1_BLOCK_SIZE;
1030	req->hmac = true;
1031
1032	return 0;
1033}
1034
1035static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1036{
1037	int ret = safexcel_hmac_sha1_init(areq);
1038
1039	if (ret)
1040		return ret;
1041
1042	return safexcel_ahash_finup(areq);
1043}
1044
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1045static int safexcel_hmac_init_pad(struct ahash_request *areq,
1046				  unsigned int blocksize, const u8 *key,
1047				  unsigned int keylen, u8 *ipad, u8 *opad)
1048{
1049	DECLARE_CRYPTO_WAIT(result);
1050	struct scatterlist sg;
1051	int ret, i;
1052	u8 *keydup;
1053
1054	if (keylen <= blocksize) {
1055		memcpy(ipad, key, keylen);
1056	} else {
1057		keydup = kmemdup(key, keylen, GFP_KERNEL);
1058		if (!keydup)
1059			return -ENOMEM;
1060
1061		ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1062					   crypto_req_done, &result);
1063		sg_init_one(&sg, keydup, keylen);
1064		ahash_request_set_crypt(areq, &sg, ipad, keylen);
 
1065
1066		ret = crypto_ahash_digest(areq);
1067		ret = crypto_wait_req(ret, &result);
 
 
 
1068
1069		/* Avoid leaking */
1070		kfree_sensitive(keydup);
1071
1072		if (ret)
1073			return ret;
1074
1075		keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1076	}
1077
1078	memset(ipad + keylen, 0, blocksize - keylen);
1079	memcpy(opad, ipad, blocksize);
1080
1081	for (i = 0; i < blocksize; i++) {
1082		ipad[i] ^= HMAC_IPAD_VALUE;
1083		opad[i] ^= HMAC_OPAD_VALUE;
1084	}
1085
1086	return 0;
1087}
1088
1089static int safexcel_hmac_init_iv(struct ahash_request *areq,
1090				 unsigned int blocksize, u8 *pad, void *state)
1091{
 
1092	struct safexcel_ahash_req *req;
1093	DECLARE_CRYPTO_WAIT(result);
1094	struct scatterlist sg;
1095	int ret;
1096
1097	ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1098				   crypto_req_done, &result);
1099	sg_init_one(&sg, pad, blocksize);
1100	ahash_request_set_crypt(areq, &sg, pad, blocksize);
 
1101
1102	ret = crypto_ahash_init(areq);
1103	if (ret)
1104		return ret;
1105
1106	req = ahash_request_ctx_dma(areq);
1107	req->hmac = true;
1108	req->last_req = true;
1109
1110	ret = crypto_ahash_update(areq);
1111	ret = crypto_wait_req(ret, &result);
 
 
 
 
 
1112
1113	return ret ?: crypto_ahash_export(areq, state);
1114}
1115
1116static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1117				  unsigned int keylen,
1118				  void *istate, void *ostate)
1119{
1120	struct ahash_request *areq;
1121	struct crypto_ahash *tfm;
1122	unsigned int blocksize;
1123	u8 *ipad, *opad;
1124	int ret;
1125
1126	tfm = crypto_alloc_ahash(alg, 0, 0);
1127	if (IS_ERR(tfm))
1128		return PTR_ERR(tfm);
1129
1130	areq = ahash_request_alloc(tfm, GFP_KERNEL);
1131	if (!areq) {
1132		ret = -ENOMEM;
1133		goto free_ahash;
1134	}
1135
1136	crypto_ahash_clear_flags(tfm, ~0);
1137	blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1138
1139	ipad = kcalloc(2, blocksize, GFP_KERNEL);
1140	if (!ipad) {
1141		ret = -ENOMEM;
1142		goto free_request;
1143	}
1144
1145	opad = ipad + blocksize;
1146
1147	ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1148	if (ret)
1149		goto free_ipad;
1150
1151	ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1152	if (ret)
1153		goto free_ipad;
1154
1155	ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1156
1157free_ipad:
1158	kfree(ipad);
1159free_request:
1160	ahash_request_free(areq);
1161free_ahash:
1162	crypto_free_ahash(tfm);
1163
1164	return ret;
1165}
1166
1167int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1168			 unsigned int keylen, const char *alg,
1169			 unsigned int state_sz)
1170{
1171	struct safexcel_crypto_priv *priv = base->priv;
1172	struct safexcel_ahash_export_state istate, ostate;
1173	int ret;
1174
1175	ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1176	if (ret)
1177		return ret;
1178
1179	if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1180	    (memcmp(&base->ipad, istate.state, state_sz) ||
1181	     memcmp(&base->opad, ostate.state, state_sz)))
1182		base->needs_inv = true;
1183
1184	memcpy(&base->ipad, &istate.state, state_sz);
1185	memcpy(&base->opad, &ostate.state, state_sz);
1186
1187	return 0;
1188}
1189
1190static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1191				    unsigned int keylen, const char *alg,
1192				    unsigned int state_sz)
1193{
1194	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
1195
1196	return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
1197}
1198
1199static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1200				     unsigned int keylen)
1201{
1202	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1203					SHA1_DIGEST_SIZE);
1204}
1205
1206struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1207	.type = SAFEXCEL_ALG_TYPE_AHASH,
1208	.algo_mask = SAFEXCEL_ALG_SHA1,
1209	.alg.ahash = {
1210		.init = safexcel_hmac_sha1_init,
1211		.update = safexcel_ahash_update,
1212		.final = safexcel_ahash_final,
1213		.finup = safexcel_ahash_finup,
1214		.digest = safexcel_hmac_sha1_digest,
1215		.setkey = safexcel_hmac_sha1_setkey,
1216		.export = safexcel_ahash_export,
1217		.import = safexcel_ahash_import,
1218		.halg = {
1219			.digestsize = SHA1_DIGEST_SIZE,
1220			.statesize = sizeof(struct safexcel_ahash_export_state),
1221			.base = {
1222				.cra_name = "hmac(sha1)",
1223				.cra_driver_name = "safexcel-hmac-sha1",
1224				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1225				.cra_flags = CRYPTO_ALG_ASYNC |
1226					     CRYPTO_ALG_ALLOCATES_MEMORY |
1227					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1228				.cra_blocksize = SHA1_BLOCK_SIZE,
1229				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1230				.cra_init = safexcel_ahash_cra_init,
1231				.cra_exit = safexcel_ahash_cra_exit,
1232				.cra_module = THIS_MODULE,
1233			},
1234		},
1235	},
1236};
1237
1238static int safexcel_sha256_init(struct ahash_request *areq)
1239{
1240	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1241	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1242
1243	memset(req, 0, sizeof(*req));
1244
1245	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1246	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1247	req->state_sz = SHA256_DIGEST_SIZE;
1248	req->digest_sz = SHA256_DIGEST_SIZE;
1249	req->block_sz = SHA256_BLOCK_SIZE;
1250
1251	return 0;
1252}
1253
1254static int safexcel_sha256_digest(struct ahash_request *areq)
1255{
1256	int ret = safexcel_sha256_init(areq);
1257
1258	if (ret)
1259		return ret;
1260
1261	return safexcel_ahash_finup(areq);
1262}
1263
1264struct safexcel_alg_template safexcel_alg_sha256 = {
1265	.type = SAFEXCEL_ALG_TYPE_AHASH,
1266	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1267	.alg.ahash = {
1268		.init = safexcel_sha256_init,
1269		.update = safexcel_ahash_update,
1270		.final = safexcel_ahash_final,
1271		.finup = safexcel_ahash_finup,
1272		.digest = safexcel_sha256_digest,
1273		.export = safexcel_ahash_export,
1274		.import = safexcel_ahash_import,
1275		.halg = {
1276			.digestsize = SHA256_DIGEST_SIZE,
1277			.statesize = sizeof(struct safexcel_ahash_export_state),
1278			.base = {
1279				.cra_name = "sha256",
1280				.cra_driver_name = "safexcel-sha256",
1281				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1282				.cra_flags = CRYPTO_ALG_ASYNC |
1283					     CRYPTO_ALG_ALLOCATES_MEMORY |
1284					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1285				.cra_blocksize = SHA256_BLOCK_SIZE,
1286				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1287				.cra_init = safexcel_ahash_cra_init,
1288				.cra_exit = safexcel_ahash_cra_exit,
1289				.cra_module = THIS_MODULE,
1290			},
1291		},
1292	},
1293};
1294
1295static int safexcel_sha224_init(struct ahash_request *areq)
1296{
1297	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1298	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1299
1300	memset(req, 0, sizeof(*req));
1301
1302	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1303	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1304	req->state_sz = SHA256_DIGEST_SIZE;
1305	req->digest_sz = SHA256_DIGEST_SIZE;
1306	req->block_sz = SHA256_BLOCK_SIZE;
1307
1308	return 0;
1309}
1310
1311static int safexcel_sha224_digest(struct ahash_request *areq)
1312{
1313	int ret = safexcel_sha224_init(areq);
1314
1315	if (ret)
1316		return ret;
1317
1318	return safexcel_ahash_finup(areq);
1319}
1320
1321struct safexcel_alg_template safexcel_alg_sha224 = {
1322	.type = SAFEXCEL_ALG_TYPE_AHASH,
1323	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1324	.alg.ahash = {
1325		.init = safexcel_sha224_init,
1326		.update = safexcel_ahash_update,
1327		.final = safexcel_ahash_final,
1328		.finup = safexcel_ahash_finup,
1329		.digest = safexcel_sha224_digest,
1330		.export = safexcel_ahash_export,
1331		.import = safexcel_ahash_import,
1332		.halg = {
1333			.digestsize = SHA224_DIGEST_SIZE,
1334			.statesize = sizeof(struct safexcel_ahash_export_state),
1335			.base = {
1336				.cra_name = "sha224",
1337				.cra_driver_name = "safexcel-sha224",
1338				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1339				.cra_flags = CRYPTO_ALG_ASYNC |
1340					     CRYPTO_ALG_ALLOCATES_MEMORY |
1341					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1342				.cra_blocksize = SHA224_BLOCK_SIZE,
1343				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1344				.cra_init = safexcel_ahash_cra_init,
1345				.cra_exit = safexcel_ahash_cra_exit,
1346				.cra_module = THIS_MODULE,
1347			},
1348		},
1349	},
1350};
1351
1352static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1353				       unsigned int keylen)
1354{
1355	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1356					SHA256_DIGEST_SIZE);
1357}
1358
1359static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1360{
1361	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1362	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1363
1364	memset(req, 0, sizeof(*req));
1365
1366	/* Start from ipad precompute */
1367	memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1368	/* Already processed the key^ipad part now! */
1369	req->len	= SHA256_BLOCK_SIZE;
1370	req->processed	= SHA256_BLOCK_SIZE;
1371
1372	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1373	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1374	req->state_sz = SHA256_DIGEST_SIZE;
1375	req->digest_sz = SHA256_DIGEST_SIZE;
1376	req->block_sz = SHA256_BLOCK_SIZE;
1377	req->hmac = true;
1378
1379	return 0;
1380}
1381
1382static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1383{
1384	int ret = safexcel_hmac_sha224_init(areq);
1385
1386	if (ret)
1387		return ret;
1388
1389	return safexcel_ahash_finup(areq);
1390}
1391
1392struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1393	.type = SAFEXCEL_ALG_TYPE_AHASH,
1394	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1395	.alg.ahash = {
1396		.init = safexcel_hmac_sha224_init,
1397		.update = safexcel_ahash_update,
1398		.final = safexcel_ahash_final,
1399		.finup = safexcel_ahash_finup,
1400		.digest = safexcel_hmac_sha224_digest,
1401		.setkey = safexcel_hmac_sha224_setkey,
1402		.export = safexcel_ahash_export,
1403		.import = safexcel_ahash_import,
1404		.halg = {
1405			.digestsize = SHA224_DIGEST_SIZE,
1406			.statesize = sizeof(struct safexcel_ahash_export_state),
1407			.base = {
1408				.cra_name = "hmac(sha224)",
1409				.cra_driver_name = "safexcel-hmac-sha224",
1410				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1411				.cra_flags = CRYPTO_ALG_ASYNC |
1412					     CRYPTO_ALG_ALLOCATES_MEMORY |
1413					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1414				.cra_blocksize = SHA224_BLOCK_SIZE,
1415				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1416				.cra_init = safexcel_ahash_cra_init,
1417				.cra_exit = safexcel_ahash_cra_exit,
1418				.cra_module = THIS_MODULE,
1419			},
1420		},
1421	},
1422};
1423
1424static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1425				     unsigned int keylen)
1426{
1427	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1428					SHA256_DIGEST_SIZE);
1429}
1430
1431static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1432{
1433	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1434	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1435
1436	memset(req, 0, sizeof(*req));
1437
1438	/* Start from ipad precompute */
1439	memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1440	/* Already processed the key^ipad part now! */
1441	req->len	= SHA256_BLOCK_SIZE;
1442	req->processed	= SHA256_BLOCK_SIZE;
1443
1444	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1445	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1446	req->state_sz = SHA256_DIGEST_SIZE;
1447	req->digest_sz = SHA256_DIGEST_SIZE;
1448	req->block_sz = SHA256_BLOCK_SIZE;
1449	req->hmac = true;
1450
1451	return 0;
1452}
1453
1454static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1455{
1456	int ret = safexcel_hmac_sha256_init(areq);
1457
1458	if (ret)
1459		return ret;
1460
1461	return safexcel_ahash_finup(areq);
1462}
1463
1464struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1465	.type = SAFEXCEL_ALG_TYPE_AHASH,
1466	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1467	.alg.ahash = {
1468		.init = safexcel_hmac_sha256_init,
1469		.update = safexcel_ahash_update,
1470		.final = safexcel_ahash_final,
1471		.finup = safexcel_ahash_finup,
1472		.digest = safexcel_hmac_sha256_digest,
1473		.setkey = safexcel_hmac_sha256_setkey,
1474		.export = safexcel_ahash_export,
1475		.import = safexcel_ahash_import,
1476		.halg = {
1477			.digestsize = SHA256_DIGEST_SIZE,
1478			.statesize = sizeof(struct safexcel_ahash_export_state),
1479			.base = {
1480				.cra_name = "hmac(sha256)",
1481				.cra_driver_name = "safexcel-hmac-sha256",
1482				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1483				.cra_flags = CRYPTO_ALG_ASYNC |
1484					     CRYPTO_ALG_ALLOCATES_MEMORY |
1485					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1486				.cra_blocksize = SHA256_BLOCK_SIZE,
1487				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1488				.cra_init = safexcel_ahash_cra_init,
1489				.cra_exit = safexcel_ahash_cra_exit,
1490				.cra_module = THIS_MODULE,
1491			},
1492		},
1493	},
1494};
1495
1496static int safexcel_sha512_init(struct ahash_request *areq)
1497{
1498	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1499	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1500
1501	memset(req, 0, sizeof(*req));
1502
1503	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1504	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1505	req->state_sz = SHA512_DIGEST_SIZE;
1506	req->digest_sz = SHA512_DIGEST_SIZE;
1507	req->block_sz = SHA512_BLOCK_SIZE;
1508
1509	return 0;
1510}
1511
1512static int safexcel_sha512_digest(struct ahash_request *areq)
1513{
1514	int ret = safexcel_sha512_init(areq);
1515
1516	if (ret)
1517		return ret;
1518
1519	return safexcel_ahash_finup(areq);
1520}
1521
1522struct safexcel_alg_template safexcel_alg_sha512 = {
1523	.type = SAFEXCEL_ALG_TYPE_AHASH,
1524	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1525	.alg.ahash = {
1526		.init = safexcel_sha512_init,
1527		.update = safexcel_ahash_update,
1528		.final = safexcel_ahash_final,
1529		.finup = safexcel_ahash_finup,
1530		.digest = safexcel_sha512_digest,
1531		.export = safexcel_ahash_export,
1532		.import = safexcel_ahash_import,
1533		.halg = {
1534			.digestsize = SHA512_DIGEST_SIZE,
1535			.statesize = sizeof(struct safexcel_ahash_export_state),
1536			.base = {
1537				.cra_name = "sha512",
1538				.cra_driver_name = "safexcel-sha512",
1539				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1540				.cra_flags = CRYPTO_ALG_ASYNC |
1541					     CRYPTO_ALG_ALLOCATES_MEMORY |
1542					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1543				.cra_blocksize = SHA512_BLOCK_SIZE,
1544				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1545				.cra_init = safexcel_ahash_cra_init,
1546				.cra_exit = safexcel_ahash_cra_exit,
1547				.cra_module = THIS_MODULE,
1548			},
1549		},
1550	},
1551};
1552
1553static int safexcel_sha384_init(struct ahash_request *areq)
1554{
1555	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1556	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1557
1558	memset(req, 0, sizeof(*req));
1559
1560	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1561	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1562	req->state_sz = SHA512_DIGEST_SIZE;
1563	req->digest_sz = SHA512_DIGEST_SIZE;
1564	req->block_sz = SHA512_BLOCK_SIZE;
1565
1566	return 0;
1567}
1568
1569static int safexcel_sha384_digest(struct ahash_request *areq)
1570{
1571	int ret = safexcel_sha384_init(areq);
1572
1573	if (ret)
1574		return ret;
1575
1576	return safexcel_ahash_finup(areq);
1577}
1578
1579struct safexcel_alg_template safexcel_alg_sha384 = {
1580	.type = SAFEXCEL_ALG_TYPE_AHASH,
1581	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1582	.alg.ahash = {
1583		.init = safexcel_sha384_init,
1584		.update = safexcel_ahash_update,
1585		.final = safexcel_ahash_final,
1586		.finup = safexcel_ahash_finup,
1587		.digest = safexcel_sha384_digest,
1588		.export = safexcel_ahash_export,
1589		.import = safexcel_ahash_import,
1590		.halg = {
1591			.digestsize = SHA384_DIGEST_SIZE,
1592			.statesize = sizeof(struct safexcel_ahash_export_state),
1593			.base = {
1594				.cra_name = "sha384",
1595				.cra_driver_name = "safexcel-sha384",
1596				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1597				.cra_flags = CRYPTO_ALG_ASYNC |
1598					     CRYPTO_ALG_ALLOCATES_MEMORY |
1599					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1600				.cra_blocksize = SHA384_BLOCK_SIZE,
1601				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1602				.cra_init = safexcel_ahash_cra_init,
1603				.cra_exit = safexcel_ahash_cra_exit,
1604				.cra_module = THIS_MODULE,
1605			},
1606		},
1607	},
1608};
1609
1610static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1611				       unsigned int keylen)
1612{
1613	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1614					SHA512_DIGEST_SIZE);
1615}
1616
1617static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1618{
1619	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1620	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1621
1622	memset(req, 0, sizeof(*req));
1623
1624	/* Start from ipad precompute */
1625	memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1626	/* Already processed the key^ipad part now! */
1627	req->len	= SHA512_BLOCK_SIZE;
1628	req->processed	= SHA512_BLOCK_SIZE;
1629
1630	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1631	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1632	req->state_sz = SHA512_DIGEST_SIZE;
1633	req->digest_sz = SHA512_DIGEST_SIZE;
1634	req->block_sz = SHA512_BLOCK_SIZE;
1635	req->hmac = true;
1636
1637	return 0;
1638}
1639
1640static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1641{
1642	int ret = safexcel_hmac_sha512_init(areq);
1643
1644	if (ret)
1645		return ret;
1646
1647	return safexcel_ahash_finup(areq);
1648}
1649
1650struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1651	.type = SAFEXCEL_ALG_TYPE_AHASH,
1652	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1653	.alg.ahash = {
1654		.init = safexcel_hmac_sha512_init,
1655		.update = safexcel_ahash_update,
1656		.final = safexcel_ahash_final,
1657		.finup = safexcel_ahash_finup,
1658		.digest = safexcel_hmac_sha512_digest,
1659		.setkey = safexcel_hmac_sha512_setkey,
1660		.export = safexcel_ahash_export,
1661		.import = safexcel_ahash_import,
1662		.halg = {
1663			.digestsize = SHA512_DIGEST_SIZE,
1664			.statesize = sizeof(struct safexcel_ahash_export_state),
1665			.base = {
1666				.cra_name = "hmac(sha512)",
1667				.cra_driver_name = "safexcel-hmac-sha512",
1668				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1669				.cra_flags = CRYPTO_ALG_ASYNC |
1670					     CRYPTO_ALG_ALLOCATES_MEMORY |
1671					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1672				.cra_blocksize = SHA512_BLOCK_SIZE,
1673				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1674				.cra_init = safexcel_ahash_cra_init,
1675				.cra_exit = safexcel_ahash_cra_exit,
1676				.cra_module = THIS_MODULE,
1677			},
1678		},
1679	},
1680};
1681
1682static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1683				       unsigned int keylen)
1684{
1685	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1686					SHA512_DIGEST_SIZE);
1687}
1688
1689static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1690{
1691	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1692	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1693
1694	memset(req, 0, sizeof(*req));
1695
1696	/* Start from ipad precompute */
1697	memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1698	/* Already processed the key^ipad part now! */
1699	req->len	= SHA512_BLOCK_SIZE;
1700	req->processed	= SHA512_BLOCK_SIZE;
1701
1702	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1703	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1704	req->state_sz = SHA512_DIGEST_SIZE;
1705	req->digest_sz = SHA512_DIGEST_SIZE;
1706	req->block_sz = SHA512_BLOCK_SIZE;
1707	req->hmac = true;
1708
1709	return 0;
1710}
1711
1712static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1713{
1714	int ret = safexcel_hmac_sha384_init(areq);
1715
1716	if (ret)
1717		return ret;
1718
1719	return safexcel_ahash_finup(areq);
1720}
1721
1722struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1723	.type = SAFEXCEL_ALG_TYPE_AHASH,
1724	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1725	.alg.ahash = {
1726		.init = safexcel_hmac_sha384_init,
1727		.update = safexcel_ahash_update,
1728		.final = safexcel_ahash_final,
1729		.finup = safexcel_ahash_finup,
1730		.digest = safexcel_hmac_sha384_digest,
1731		.setkey = safexcel_hmac_sha384_setkey,
1732		.export = safexcel_ahash_export,
1733		.import = safexcel_ahash_import,
1734		.halg = {
1735			.digestsize = SHA384_DIGEST_SIZE,
1736			.statesize = sizeof(struct safexcel_ahash_export_state),
1737			.base = {
1738				.cra_name = "hmac(sha384)",
1739				.cra_driver_name = "safexcel-hmac-sha384",
1740				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1741				.cra_flags = CRYPTO_ALG_ASYNC |
1742					     CRYPTO_ALG_ALLOCATES_MEMORY |
1743					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1744				.cra_blocksize = SHA384_BLOCK_SIZE,
1745				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1746				.cra_init = safexcel_ahash_cra_init,
1747				.cra_exit = safexcel_ahash_cra_exit,
1748				.cra_module = THIS_MODULE,
1749			},
1750		},
1751	},
1752};
1753
1754static int safexcel_md5_init(struct ahash_request *areq)
1755{
1756	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1757	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1758
1759	memset(req, 0, sizeof(*req));
1760
1761	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1762	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1763	req->state_sz = MD5_DIGEST_SIZE;
1764	req->digest_sz = MD5_DIGEST_SIZE;
1765	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1766
1767	return 0;
1768}
1769
1770static int safexcel_md5_digest(struct ahash_request *areq)
1771{
1772	int ret = safexcel_md5_init(areq);
1773
1774	if (ret)
1775		return ret;
1776
1777	return safexcel_ahash_finup(areq);
1778}
1779
1780struct safexcel_alg_template safexcel_alg_md5 = {
1781	.type = SAFEXCEL_ALG_TYPE_AHASH,
1782	.algo_mask = SAFEXCEL_ALG_MD5,
1783	.alg.ahash = {
1784		.init = safexcel_md5_init,
1785		.update = safexcel_ahash_update,
1786		.final = safexcel_ahash_final,
1787		.finup = safexcel_ahash_finup,
1788		.digest = safexcel_md5_digest,
1789		.export = safexcel_ahash_export,
1790		.import = safexcel_ahash_import,
1791		.halg = {
1792			.digestsize = MD5_DIGEST_SIZE,
1793			.statesize = sizeof(struct safexcel_ahash_export_state),
1794			.base = {
1795				.cra_name = "md5",
1796				.cra_driver_name = "safexcel-md5",
1797				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1798				.cra_flags = CRYPTO_ALG_ASYNC |
1799					     CRYPTO_ALG_ALLOCATES_MEMORY |
1800					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1801				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1802				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1803				.cra_init = safexcel_ahash_cra_init,
1804				.cra_exit = safexcel_ahash_cra_exit,
1805				.cra_module = THIS_MODULE,
1806			},
1807		},
1808	},
1809};
1810
1811static int safexcel_hmac_md5_init(struct ahash_request *areq)
1812{
1813	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1814	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1815
1816	memset(req, 0, sizeof(*req));
1817
1818	/* Start from ipad precompute */
1819	memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1820	/* Already processed the key^ipad part now! */
1821	req->len	= MD5_HMAC_BLOCK_SIZE;
1822	req->processed	= MD5_HMAC_BLOCK_SIZE;
1823
1824	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1825	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1826	req->state_sz = MD5_DIGEST_SIZE;
1827	req->digest_sz = MD5_DIGEST_SIZE;
1828	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1829	req->len_is_le = true; /* MD5 is little endian! ... */
1830	req->hmac = true;
1831
1832	return 0;
1833}
1834
1835static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1836				     unsigned int keylen)
1837{
1838	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1839					MD5_DIGEST_SIZE);
1840}
1841
1842static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1843{
1844	int ret = safexcel_hmac_md5_init(areq);
1845
1846	if (ret)
1847		return ret;
1848
1849	return safexcel_ahash_finup(areq);
1850}
1851
1852struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1853	.type = SAFEXCEL_ALG_TYPE_AHASH,
1854	.algo_mask = SAFEXCEL_ALG_MD5,
1855	.alg.ahash = {
1856		.init = safexcel_hmac_md5_init,
1857		.update = safexcel_ahash_update,
1858		.final = safexcel_ahash_final,
1859		.finup = safexcel_ahash_finup,
1860		.digest = safexcel_hmac_md5_digest,
1861		.setkey = safexcel_hmac_md5_setkey,
1862		.export = safexcel_ahash_export,
1863		.import = safexcel_ahash_import,
1864		.halg = {
1865			.digestsize = MD5_DIGEST_SIZE,
1866			.statesize = sizeof(struct safexcel_ahash_export_state),
1867			.base = {
1868				.cra_name = "hmac(md5)",
1869				.cra_driver_name = "safexcel-hmac-md5",
1870				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1871				.cra_flags = CRYPTO_ALG_ASYNC |
1872					     CRYPTO_ALG_ALLOCATES_MEMORY |
1873					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1874				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1875				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1876				.cra_init = safexcel_ahash_cra_init,
1877				.cra_exit = safexcel_ahash_cra_exit,
1878				.cra_module = THIS_MODULE,
1879			},
1880		},
1881	},
1882};
1883
1884static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1885{
1886	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1887	int ret = safexcel_ahash_cra_init(tfm);
1888
1889	/* Default 'key' is all zeroes */
1890	memset(&ctx->base.ipad, 0, sizeof(u32));
1891	return ret;
1892}
1893
1894static int safexcel_crc32_init(struct ahash_request *areq)
1895{
1896	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1897	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1898
1899	memset(req, 0, sizeof(*req));
1900
1901	/* Start from loaded key */
1902	req->state[0]	= cpu_to_le32(~ctx->base.ipad.word[0]);
1903	/* Set processed to non-zero to enable invalidation detection */
1904	req->len	= sizeof(u32);
1905	req->processed	= sizeof(u32);
1906
1907	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1908	req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1909	req->state_sz = sizeof(u32);
1910	req->digest_sz = sizeof(u32);
1911	req->block_sz = sizeof(u32);
1912
1913	return 0;
1914}
1915
1916static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1917				 unsigned int keylen)
1918{
1919	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1920
1921	if (keylen != sizeof(u32))
1922		return -EINVAL;
1923
1924	memcpy(&ctx->base.ipad, key, sizeof(u32));
1925	return 0;
1926}
1927
1928static int safexcel_crc32_digest(struct ahash_request *areq)
1929{
1930	return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1931}
1932
1933struct safexcel_alg_template safexcel_alg_crc32 = {
1934	.type = SAFEXCEL_ALG_TYPE_AHASH,
1935	.algo_mask = 0,
1936	.alg.ahash = {
1937		.init = safexcel_crc32_init,
1938		.update = safexcel_ahash_update,
1939		.final = safexcel_ahash_final,
1940		.finup = safexcel_ahash_finup,
1941		.digest = safexcel_crc32_digest,
1942		.setkey = safexcel_crc32_setkey,
1943		.export = safexcel_ahash_export,
1944		.import = safexcel_ahash_import,
1945		.halg = {
1946			.digestsize = sizeof(u32),
1947			.statesize = sizeof(struct safexcel_ahash_export_state),
1948			.base = {
1949				.cra_name = "crc32",
1950				.cra_driver_name = "safexcel-crc32",
1951				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1952				.cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1953					     CRYPTO_ALG_ASYNC |
1954					     CRYPTO_ALG_ALLOCATES_MEMORY |
1955					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1956				.cra_blocksize = 1,
1957				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1958				.cra_init = safexcel_crc32_cra_init,
1959				.cra_exit = safexcel_ahash_cra_exit,
1960				.cra_module = THIS_MODULE,
1961			},
1962		},
1963	},
1964};
1965
1966static int safexcel_cbcmac_init(struct ahash_request *areq)
1967{
1968	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1969	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1970
1971	memset(req, 0, sizeof(*req));
1972
1973	/* Start from loaded keys */
1974	memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
1975	/* Set processed to non-zero to enable invalidation detection */
1976	req->len	= AES_BLOCK_SIZE;
1977	req->processed	= AES_BLOCK_SIZE;
1978
1979	req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
1980	req->state_sz = ctx->key_sz;
1981	req->digest_sz = AES_BLOCK_SIZE;
1982	req->block_sz = AES_BLOCK_SIZE;
1983	req->xcbcmac  = true;
1984
1985	return 0;
1986}
1987
1988static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1989				 unsigned int len)
1990{
1991	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1992	struct crypto_aes_ctx aes;
1993	int ret, i;
1994
1995	ret = aes_expandkey(&aes, key, len);
1996	if (ret)
1997		return ret;
1998
1999	memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
2000	for (i = 0; i < len / sizeof(u32); i++)
2001		ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
2002
2003	if (len == AES_KEYSIZE_192) {
2004		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2005		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2006	} else if (len == AES_KEYSIZE_256) {
2007		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2008		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2009	} else {
2010		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2011		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2012	}
2013	ctx->cbcmac  = true;
2014
2015	memzero_explicit(&aes, sizeof(aes));
2016	return 0;
2017}
2018
2019static int safexcel_cbcmac_digest(struct ahash_request *areq)
2020{
2021	return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2022}
2023
2024struct safexcel_alg_template safexcel_alg_cbcmac = {
2025	.type = SAFEXCEL_ALG_TYPE_AHASH,
2026	.algo_mask = 0,
2027	.alg.ahash = {
2028		.init = safexcel_cbcmac_init,
2029		.update = safexcel_ahash_update,
2030		.final = safexcel_ahash_final,
2031		.finup = safexcel_ahash_finup,
2032		.digest = safexcel_cbcmac_digest,
2033		.setkey = safexcel_cbcmac_setkey,
2034		.export = safexcel_ahash_export,
2035		.import = safexcel_ahash_import,
2036		.halg = {
2037			.digestsize = AES_BLOCK_SIZE,
2038			.statesize = sizeof(struct safexcel_ahash_export_state),
2039			.base = {
2040				.cra_name = "cbcmac(aes)",
2041				.cra_driver_name = "safexcel-cbcmac-aes",
2042				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2043				.cra_flags = CRYPTO_ALG_ASYNC |
2044					     CRYPTO_ALG_ALLOCATES_MEMORY |
2045					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2046				.cra_blocksize = 1,
2047				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2048				.cra_init = safexcel_ahash_cra_init,
2049				.cra_exit = safexcel_ahash_cra_exit,
2050				.cra_module = THIS_MODULE,
2051			},
2052		},
2053	},
2054};
2055
2056static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2057				 unsigned int len)
2058{
2059	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
 
2060	u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2061	int ret, i;
2062
2063	ret = aes_expandkey(ctx->aes, key, len);
2064	if (ret)
2065		return ret;
2066
2067	/* precompute the XCBC key material */
2068	aes_encrypt(ctx->aes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2069		    "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2070	aes_encrypt(ctx->aes, (u8 *)key_tmp,
2071		    "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2072	aes_encrypt(ctx->aes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2073		    "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
 
 
 
 
 
 
 
2074	for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2075		ctx->base.ipad.word[i] = swab32(key_tmp[i]);
2076
2077	ret = aes_expandkey(ctx->aes,
2078			    (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2079			    AES_MIN_KEY_SIZE);
 
 
 
2080	if (ret)
2081		return ret;
2082
2083	ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2084	ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2085	ctx->cbcmac = false;
2086
 
2087	return 0;
2088}
2089
2090static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2091{
2092	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2093
2094	safexcel_ahash_cra_init(tfm);
2095	ctx->aes = kmalloc(sizeof(*ctx->aes), GFP_KERNEL);
2096	return PTR_ERR_OR_ZERO(ctx->aes);
2097}
2098
2099static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2100{
2101	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2102
2103	kfree(ctx->aes);
2104	safexcel_ahash_cra_exit(tfm);
2105}
2106
2107struct safexcel_alg_template safexcel_alg_xcbcmac = {
2108	.type = SAFEXCEL_ALG_TYPE_AHASH,
2109	.algo_mask = 0,
2110	.alg.ahash = {
2111		.init = safexcel_cbcmac_init,
2112		.update = safexcel_ahash_update,
2113		.final = safexcel_ahash_final,
2114		.finup = safexcel_ahash_finup,
2115		.digest = safexcel_cbcmac_digest,
2116		.setkey = safexcel_xcbcmac_setkey,
2117		.export = safexcel_ahash_export,
2118		.import = safexcel_ahash_import,
2119		.halg = {
2120			.digestsize = AES_BLOCK_SIZE,
2121			.statesize = sizeof(struct safexcel_ahash_export_state),
2122			.base = {
2123				.cra_name = "xcbc(aes)",
2124				.cra_driver_name = "safexcel-xcbc-aes",
2125				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2126				.cra_flags = CRYPTO_ALG_ASYNC |
2127					     CRYPTO_ALG_ALLOCATES_MEMORY |
2128					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2129				.cra_blocksize = AES_BLOCK_SIZE,
2130				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2131				.cra_init = safexcel_xcbcmac_cra_init,
2132				.cra_exit = safexcel_xcbcmac_cra_exit,
2133				.cra_module = THIS_MODULE,
2134			},
2135		},
2136	},
2137};
2138
2139static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2140				unsigned int len)
2141{
2142	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
 
2143	__be64 consts[4];
2144	u64 _const[2];
2145	u8 msb_mask, gfmask;
2146	int ret, i;
2147
2148	/* precompute the CMAC key material */
2149	ret = aes_expandkey(ctx->aes, key, len);
2150	if (ret)
2151		return ret;
2152
2153	for (i = 0; i < len / sizeof(u32); i++)
2154		ctx->base.ipad.word[i + 8] = swab32(ctx->aes->key_enc[i]);
 
 
 
 
 
 
 
 
2155
2156	/* code below borrowed from crypto/cmac.c */
2157	/* encrypt the zero block */
2158	memset(consts, 0, AES_BLOCK_SIZE);
2159	aes_encrypt(ctx->aes, (u8 *)consts, (u8 *)consts);
2160
2161	gfmask = 0x87;
2162	_const[0] = be64_to_cpu(consts[1]);
2163	_const[1] = be64_to_cpu(consts[0]);
2164
2165	/* gf(2^128) multiply zero-ciphertext with u and u^2 */
2166	for (i = 0; i < 4; i += 2) {
2167		msb_mask = ((s64)_const[1] >> 63) & gfmask;
2168		_const[1] = (_const[1] << 1) | (_const[0] >> 63);
2169		_const[0] = (_const[0] << 1) ^ msb_mask;
2170
2171		consts[i + 0] = cpu_to_be64(_const[1]);
2172		consts[i + 1] = cpu_to_be64(_const[0]);
2173	}
2174	/* end of code borrowed from crypto/cmac.c */
2175
2176	for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2177		ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2178
2179	if (len == AES_KEYSIZE_192) {
2180		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2181		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2182	} else if (len == AES_KEYSIZE_256) {
2183		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2184		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2185	} else {
2186		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2187		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2188	}
2189	ctx->cbcmac = false;
2190
 
2191	return 0;
2192}
2193
2194struct safexcel_alg_template safexcel_alg_cmac = {
2195	.type = SAFEXCEL_ALG_TYPE_AHASH,
2196	.algo_mask = 0,
2197	.alg.ahash = {
2198		.init = safexcel_cbcmac_init,
2199		.update = safexcel_ahash_update,
2200		.final = safexcel_ahash_final,
2201		.finup = safexcel_ahash_finup,
2202		.digest = safexcel_cbcmac_digest,
2203		.setkey = safexcel_cmac_setkey,
2204		.export = safexcel_ahash_export,
2205		.import = safexcel_ahash_import,
2206		.halg = {
2207			.digestsize = AES_BLOCK_SIZE,
2208			.statesize = sizeof(struct safexcel_ahash_export_state),
2209			.base = {
2210				.cra_name = "cmac(aes)",
2211				.cra_driver_name = "safexcel-cmac-aes",
2212				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2213				.cra_flags = CRYPTO_ALG_ASYNC |
2214					     CRYPTO_ALG_ALLOCATES_MEMORY |
2215					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2216				.cra_blocksize = AES_BLOCK_SIZE,
2217				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2218				.cra_init = safexcel_xcbcmac_cra_init,
2219				.cra_exit = safexcel_xcbcmac_cra_exit,
2220				.cra_module = THIS_MODULE,
2221			},
2222		},
2223	},
2224};
2225
2226static int safexcel_sm3_init(struct ahash_request *areq)
2227{
2228	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2229	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2230
2231	memset(req, 0, sizeof(*req));
2232
2233	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2234	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2235	req->state_sz = SM3_DIGEST_SIZE;
2236	req->digest_sz = SM3_DIGEST_SIZE;
2237	req->block_sz = SM3_BLOCK_SIZE;
2238
2239	return 0;
2240}
2241
2242static int safexcel_sm3_digest(struct ahash_request *areq)
2243{
2244	int ret = safexcel_sm3_init(areq);
2245
2246	if (ret)
2247		return ret;
2248
2249	return safexcel_ahash_finup(areq);
2250}
2251
2252struct safexcel_alg_template safexcel_alg_sm3 = {
2253	.type = SAFEXCEL_ALG_TYPE_AHASH,
2254	.algo_mask = SAFEXCEL_ALG_SM3,
2255	.alg.ahash = {
2256		.init = safexcel_sm3_init,
2257		.update = safexcel_ahash_update,
2258		.final = safexcel_ahash_final,
2259		.finup = safexcel_ahash_finup,
2260		.digest = safexcel_sm3_digest,
2261		.export = safexcel_ahash_export,
2262		.import = safexcel_ahash_import,
2263		.halg = {
2264			.digestsize = SM3_DIGEST_SIZE,
2265			.statesize = sizeof(struct safexcel_ahash_export_state),
2266			.base = {
2267				.cra_name = "sm3",
2268				.cra_driver_name = "safexcel-sm3",
2269				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2270				.cra_flags = CRYPTO_ALG_ASYNC |
2271					     CRYPTO_ALG_ALLOCATES_MEMORY |
2272					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2273				.cra_blocksize = SM3_BLOCK_SIZE,
2274				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2275				.cra_init = safexcel_ahash_cra_init,
2276				.cra_exit = safexcel_ahash_cra_exit,
2277				.cra_module = THIS_MODULE,
2278			},
2279		},
2280	},
2281};
2282
2283static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2284				    unsigned int keylen)
2285{
2286	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2287					SM3_DIGEST_SIZE);
2288}
2289
2290static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2291{
2292	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2293	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2294
2295	memset(req, 0, sizeof(*req));
2296
2297	/* Start from ipad precompute */
2298	memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2299	/* Already processed the key^ipad part now! */
2300	req->len	= SM3_BLOCK_SIZE;
2301	req->processed	= SM3_BLOCK_SIZE;
2302
2303	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2304	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2305	req->state_sz = SM3_DIGEST_SIZE;
2306	req->digest_sz = SM3_DIGEST_SIZE;
2307	req->block_sz = SM3_BLOCK_SIZE;
2308	req->hmac = true;
2309
2310	return 0;
2311}
2312
2313static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2314{
2315	int ret = safexcel_hmac_sm3_init(areq);
2316
2317	if (ret)
2318		return ret;
2319
2320	return safexcel_ahash_finup(areq);
2321}
2322
2323struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2324	.type = SAFEXCEL_ALG_TYPE_AHASH,
2325	.algo_mask = SAFEXCEL_ALG_SM3,
2326	.alg.ahash = {
2327		.init = safexcel_hmac_sm3_init,
2328		.update = safexcel_ahash_update,
2329		.final = safexcel_ahash_final,
2330		.finup = safexcel_ahash_finup,
2331		.digest = safexcel_hmac_sm3_digest,
2332		.setkey = safexcel_hmac_sm3_setkey,
2333		.export = safexcel_ahash_export,
2334		.import = safexcel_ahash_import,
2335		.halg = {
2336			.digestsize = SM3_DIGEST_SIZE,
2337			.statesize = sizeof(struct safexcel_ahash_export_state),
2338			.base = {
2339				.cra_name = "hmac(sm3)",
2340				.cra_driver_name = "safexcel-hmac-sm3",
2341				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2342				.cra_flags = CRYPTO_ALG_ASYNC |
2343					     CRYPTO_ALG_ALLOCATES_MEMORY |
2344					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2345				.cra_blocksize = SM3_BLOCK_SIZE,
2346				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2347				.cra_init = safexcel_ahash_cra_init,
2348				.cra_exit = safexcel_ahash_cra_exit,
2349				.cra_module = THIS_MODULE,
2350			},
2351		},
2352	},
2353};
2354
2355static int safexcel_sha3_224_init(struct ahash_request *areq)
2356{
2357	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2358	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2359	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2360
2361	memset(req, 0, sizeof(*req));
2362
2363	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2364	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2365	req->state_sz = SHA3_224_DIGEST_SIZE;
2366	req->digest_sz = SHA3_224_DIGEST_SIZE;
2367	req->block_sz = SHA3_224_BLOCK_SIZE;
2368	ctx->do_fallback = false;
2369	ctx->fb_init_done = false;
2370	return 0;
2371}
2372
2373static int safexcel_sha3_fbcheck(struct ahash_request *req)
2374{
2375	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2376	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2377	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2378	int ret = 0;
2379
2380	if (ctx->do_fallback) {
2381		ahash_request_set_tfm(subreq, ctx->fback);
2382		ahash_request_set_callback(subreq, req->base.flags,
2383					   req->base.complete, req->base.data);
2384		ahash_request_set_crypt(subreq, req->src, req->result,
2385					req->nbytes);
2386		if (!ctx->fb_init_done) {
2387			if (ctx->fb_do_setkey) {
2388				/* Set fallback cipher HMAC key */
2389				u8 key[SHA3_224_BLOCK_SIZE];
2390
2391				memcpy(key, &ctx->base.ipad,
2392				       crypto_ahash_blocksize(ctx->fback) / 2);
2393				memcpy(key +
2394				       crypto_ahash_blocksize(ctx->fback) / 2,
2395				       &ctx->base.opad,
2396				       crypto_ahash_blocksize(ctx->fback) / 2);
2397				ret = crypto_ahash_setkey(ctx->fback, key,
2398					crypto_ahash_blocksize(ctx->fback));
2399				memzero_explicit(key,
2400					crypto_ahash_blocksize(ctx->fback));
2401				ctx->fb_do_setkey = false;
2402			}
2403			ret = ret ?: crypto_ahash_init(subreq);
2404			ctx->fb_init_done = true;
2405		}
2406	}
2407	return ret;
2408}
2409
2410static int safexcel_sha3_update(struct ahash_request *req)
2411{
2412	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2413	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2414	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2415
2416	ctx->do_fallback = true;
2417	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2418}
2419
2420static int safexcel_sha3_final(struct ahash_request *req)
2421{
2422	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2423	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2424	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2425
2426	ctx->do_fallback = true;
2427	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2428}
2429
2430static int safexcel_sha3_finup(struct ahash_request *req)
2431{
2432	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2433	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2434	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2435
2436	ctx->do_fallback |= !req->nbytes;
2437	if (ctx->do_fallback)
2438		/* Update or ex/import happened or len 0, cannot use the HW */
2439		return safexcel_sha3_fbcheck(req) ?:
2440		       crypto_ahash_finup(subreq);
2441	else
2442		return safexcel_ahash_finup(req);
2443}
2444
2445static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2446{
2447	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2448	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2449	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2450
2451	ctx->do_fallback = true;
2452	ctx->fb_init_done = false;
2453	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2454}
2455
2456static int safexcel_sha3_224_digest(struct ahash_request *req)
2457{
2458	if (req->nbytes)
2459		return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2460
2461	/* HW cannot do zero length hash, use fallback instead */
2462	return safexcel_sha3_digest_fallback(req);
2463}
2464
2465static int safexcel_sha3_export(struct ahash_request *req, void *out)
2466{
2467	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2468	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2469	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2470
2471	ctx->do_fallback = true;
2472	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2473}
2474
2475static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2476{
2477	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2478	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2479	struct ahash_request *subreq = ahash_request_ctx_dma(req);
2480
2481	ctx->do_fallback = true;
2482	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2483	// return safexcel_ahash_import(req, in);
2484}
2485
2486static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2487{
2488	struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2489	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2490
2491	safexcel_ahash_cra_init(tfm);
2492
2493	/* Allocate fallback implementation */
2494	ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2495					CRYPTO_ALG_ASYNC |
2496					CRYPTO_ALG_NEED_FALLBACK);
2497	if (IS_ERR(ctx->fback))
2498		return PTR_ERR(ctx->fback);
2499
2500	/* Update statesize from fallback algorithm! */
2501	crypto_hash_alg_common(ahash)->statesize =
2502		crypto_ahash_statesize(ctx->fback);
2503	crypto_ahash_set_reqsize_dma(
2504		ahash, max(sizeof(struct safexcel_ahash_req),
2505			   sizeof(struct ahash_request) +
2506			   crypto_ahash_reqsize(ctx->fback)));
2507	return 0;
2508}
2509
2510static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2511{
2512	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2513
2514	crypto_free_ahash(ctx->fback);
2515	safexcel_ahash_cra_exit(tfm);
2516}
2517
2518struct safexcel_alg_template safexcel_alg_sha3_224 = {
2519	.type = SAFEXCEL_ALG_TYPE_AHASH,
2520	.algo_mask = SAFEXCEL_ALG_SHA3,
2521	.alg.ahash = {
2522		.init = safexcel_sha3_224_init,
2523		.update = safexcel_sha3_update,
2524		.final = safexcel_sha3_final,
2525		.finup = safexcel_sha3_finup,
2526		.digest = safexcel_sha3_224_digest,
2527		.export = safexcel_sha3_export,
2528		.import = safexcel_sha3_import,
2529		.halg = {
2530			.digestsize = SHA3_224_DIGEST_SIZE,
2531			.statesize = sizeof(struct safexcel_ahash_export_state),
2532			.base = {
2533				.cra_name = "sha3-224",
2534				.cra_driver_name = "safexcel-sha3-224",
2535				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2536				.cra_flags = CRYPTO_ALG_ASYNC |
2537					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2538					     CRYPTO_ALG_NEED_FALLBACK,
2539				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2540				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2541				.cra_init = safexcel_sha3_cra_init,
2542				.cra_exit = safexcel_sha3_cra_exit,
2543				.cra_module = THIS_MODULE,
2544			},
2545		},
2546	},
2547};
2548
2549static int safexcel_sha3_256_init(struct ahash_request *areq)
2550{
2551	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2552	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2553	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2554
2555	memset(req, 0, sizeof(*req));
2556
2557	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2558	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2559	req->state_sz = SHA3_256_DIGEST_SIZE;
2560	req->digest_sz = SHA3_256_DIGEST_SIZE;
2561	req->block_sz = SHA3_256_BLOCK_SIZE;
2562	ctx->do_fallback = false;
2563	ctx->fb_init_done = false;
2564	return 0;
2565}
2566
2567static int safexcel_sha3_256_digest(struct ahash_request *req)
2568{
2569	if (req->nbytes)
2570		return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2571
2572	/* HW cannot do zero length hash, use fallback instead */
2573	return safexcel_sha3_digest_fallback(req);
2574}
2575
2576struct safexcel_alg_template safexcel_alg_sha3_256 = {
2577	.type = SAFEXCEL_ALG_TYPE_AHASH,
2578	.algo_mask = SAFEXCEL_ALG_SHA3,
2579	.alg.ahash = {
2580		.init = safexcel_sha3_256_init,
2581		.update = safexcel_sha3_update,
2582		.final = safexcel_sha3_final,
2583		.finup = safexcel_sha3_finup,
2584		.digest = safexcel_sha3_256_digest,
2585		.export = safexcel_sha3_export,
2586		.import = safexcel_sha3_import,
2587		.halg = {
2588			.digestsize = SHA3_256_DIGEST_SIZE,
2589			.statesize = sizeof(struct safexcel_ahash_export_state),
2590			.base = {
2591				.cra_name = "sha3-256",
2592				.cra_driver_name = "safexcel-sha3-256",
2593				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2594				.cra_flags = CRYPTO_ALG_ASYNC |
2595					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2596					     CRYPTO_ALG_NEED_FALLBACK,
2597				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2598				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2599				.cra_init = safexcel_sha3_cra_init,
2600				.cra_exit = safexcel_sha3_cra_exit,
2601				.cra_module = THIS_MODULE,
2602			},
2603		},
2604	},
2605};
2606
2607static int safexcel_sha3_384_init(struct ahash_request *areq)
2608{
2609	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2610	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2611	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2612
2613	memset(req, 0, sizeof(*req));
2614
2615	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2616	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2617	req->state_sz = SHA3_384_DIGEST_SIZE;
2618	req->digest_sz = SHA3_384_DIGEST_SIZE;
2619	req->block_sz = SHA3_384_BLOCK_SIZE;
2620	ctx->do_fallback = false;
2621	ctx->fb_init_done = false;
2622	return 0;
2623}
2624
2625static int safexcel_sha3_384_digest(struct ahash_request *req)
2626{
2627	if (req->nbytes)
2628		return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2629
2630	/* HW cannot do zero length hash, use fallback instead */
2631	return safexcel_sha3_digest_fallback(req);
2632}
2633
2634struct safexcel_alg_template safexcel_alg_sha3_384 = {
2635	.type = SAFEXCEL_ALG_TYPE_AHASH,
2636	.algo_mask = SAFEXCEL_ALG_SHA3,
2637	.alg.ahash = {
2638		.init = safexcel_sha3_384_init,
2639		.update = safexcel_sha3_update,
2640		.final = safexcel_sha3_final,
2641		.finup = safexcel_sha3_finup,
2642		.digest = safexcel_sha3_384_digest,
2643		.export = safexcel_sha3_export,
2644		.import = safexcel_sha3_import,
2645		.halg = {
2646			.digestsize = SHA3_384_DIGEST_SIZE,
2647			.statesize = sizeof(struct safexcel_ahash_export_state),
2648			.base = {
2649				.cra_name = "sha3-384",
2650				.cra_driver_name = "safexcel-sha3-384",
2651				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2652				.cra_flags = CRYPTO_ALG_ASYNC |
2653					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2654					     CRYPTO_ALG_NEED_FALLBACK,
2655				.cra_blocksize = SHA3_384_BLOCK_SIZE,
2656				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2657				.cra_init = safexcel_sha3_cra_init,
2658				.cra_exit = safexcel_sha3_cra_exit,
2659				.cra_module = THIS_MODULE,
2660			},
2661		},
2662	},
2663};
2664
2665static int safexcel_sha3_512_init(struct ahash_request *areq)
2666{
2667	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2668	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2669	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2670
2671	memset(req, 0, sizeof(*req));
2672
2673	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2674	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2675	req->state_sz = SHA3_512_DIGEST_SIZE;
2676	req->digest_sz = SHA3_512_DIGEST_SIZE;
2677	req->block_sz = SHA3_512_BLOCK_SIZE;
2678	ctx->do_fallback = false;
2679	ctx->fb_init_done = false;
2680	return 0;
2681}
2682
2683static int safexcel_sha3_512_digest(struct ahash_request *req)
2684{
2685	if (req->nbytes)
2686		return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2687
2688	/* HW cannot do zero length hash, use fallback instead */
2689	return safexcel_sha3_digest_fallback(req);
2690}
2691
2692struct safexcel_alg_template safexcel_alg_sha3_512 = {
2693	.type = SAFEXCEL_ALG_TYPE_AHASH,
2694	.algo_mask = SAFEXCEL_ALG_SHA3,
2695	.alg.ahash = {
2696		.init = safexcel_sha3_512_init,
2697		.update = safexcel_sha3_update,
2698		.final = safexcel_sha3_final,
2699		.finup = safexcel_sha3_finup,
2700		.digest = safexcel_sha3_512_digest,
2701		.export = safexcel_sha3_export,
2702		.import = safexcel_sha3_import,
2703		.halg = {
2704			.digestsize = SHA3_512_DIGEST_SIZE,
2705			.statesize = sizeof(struct safexcel_ahash_export_state),
2706			.base = {
2707				.cra_name = "sha3-512",
2708				.cra_driver_name = "safexcel-sha3-512",
2709				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2710				.cra_flags = CRYPTO_ALG_ASYNC |
2711					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2712					     CRYPTO_ALG_NEED_FALLBACK,
2713				.cra_blocksize = SHA3_512_BLOCK_SIZE,
2714				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2715				.cra_init = safexcel_sha3_cra_init,
2716				.cra_exit = safexcel_sha3_cra_exit,
2717				.cra_module = THIS_MODULE,
2718			},
2719		},
2720	},
2721};
2722
2723static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2724{
2725	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2726	int ret;
2727
2728	ret = safexcel_sha3_cra_init(tfm);
2729	if (ret)
2730		return ret;
2731
2732	/* Allocate precalc basic digest implementation */
2733	ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2734	if (IS_ERR(ctx->shpre))
2735		return PTR_ERR(ctx->shpre);
2736
2737	ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2738			      crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2739	if (!ctx->shdesc) {
2740		crypto_free_shash(ctx->shpre);
2741		return -ENOMEM;
2742	}
2743	ctx->shdesc->tfm = ctx->shpre;
2744	return 0;
2745}
2746
2747static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2748{
2749	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2750
2751	crypto_free_ahash(ctx->fback);
2752	crypto_free_shash(ctx->shpre);
2753	kfree(ctx->shdesc);
2754	safexcel_ahash_cra_exit(tfm);
2755}
2756
2757static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2758				     unsigned int keylen)
2759{
2760	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2761	int ret = 0;
2762
2763	if (keylen > crypto_ahash_blocksize(tfm)) {
2764		/*
2765		 * If the key is larger than the blocksize, then hash it
2766		 * first using our fallback cipher
2767		 */
2768		ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2769					  ctx->base.ipad.byte);
2770		keylen = crypto_shash_digestsize(ctx->shpre);
2771
2772		/*
2773		 * If the digest is larger than half the blocksize, we need to
2774		 * move the rest to opad due to the way our HMAC infra works.
2775		 */
2776		if (keylen > crypto_ahash_blocksize(tfm) / 2)
2777			/* Buffers overlap, need to use memmove iso memcpy! */
2778			memmove(&ctx->base.opad,
2779				ctx->base.ipad.byte +
2780					crypto_ahash_blocksize(tfm) / 2,
2781				keylen - crypto_ahash_blocksize(tfm) / 2);
2782	} else {
2783		/*
2784		 * Copy the key to our ipad & opad buffers
2785		 * Note that ipad and opad each contain one half of the key,
2786		 * to match the existing HMAC driver infrastructure.
2787		 */
2788		if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2789			memcpy(&ctx->base.ipad, key, keylen);
2790		} else {
2791			memcpy(&ctx->base.ipad, key,
2792			       crypto_ahash_blocksize(tfm) / 2);
2793			memcpy(&ctx->base.opad,
2794			       key + crypto_ahash_blocksize(tfm) / 2,
2795			       keylen - crypto_ahash_blocksize(tfm) / 2);
2796		}
2797	}
2798
2799	/* Pad key with zeroes */
2800	if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2801		memset(ctx->base.ipad.byte + keylen, 0,
2802		       crypto_ahash_blocksize(tfm) / 2 - keylen);
2803		memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2804	} else {
2805		memset(ctx->base.opad.byte + keylen -
2806		       crypto_ahash_blocksize(tfm) / 2, 0,
2807		       crypto_ahash_blocksize(tfm) - keylen);
2808	}
2809
2810	/* If doing fallback, still need to set the new key! */
2811	ctx->fb_do_setkey = true;
2812	return ret;
2813}
2814
2815static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2816{
2817	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2818	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2819	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2820
2821	memset(req, 0, sizeof(*req));
2822
2823	/* Copy (half of) the key */
2824	memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2825	/* Start of HMAC should have len == processed == blocksize */
2826	req->len	= SHA3_224_BLOCK_SIZE;
2827	req->processed	= SHA3_224_BLOCK_SIZE;
2828	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2829	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2830	req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2831	req->digest_sz = SHA3_224_DIGEST_SIZE;
2832	req->block_sz = SHA3_224_BLOCK_SIZE;
2833	req->hmac = true;
2834	ctx->do_fallback = false;
2835	ctx->fb_init_done = false;
2836	return 0;
2837}
2838
2839static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2840{
2841	if (req->nbytes)
2842		return safexcel_hmac_sha3_224_init(req) ?:
2843		       safexcel_ahash_finup(req);
2844
2845	/* HW cannot do zero length HMAC, use fallback instead */
2846	return safexcel_sha3_digest_fallback(req);
2847}
2848
2849static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2850{
2851	return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2852}
2853
2854struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2855	.type = SAFEXCEL_ALG_TYPE_AHASH,
2856	.algo_mask = SAFEXCEL_ALG_SHA3,
2857	.alg.ahash = {
2858		.init = safexcel_hmac_sha3_224_init,
2859		.update = safexcel_sha3_update,
2860		.final = safexcel_sha3_final,
2861		.finup = safexcel_sha3_finup,
2862		.digest = safexcel_hmac_sha3_224_digest,
2863		.setkey = safexcel_hmac_sha3_setkey,
2864		.export = safexcel_sha3_export,
2865		.import = safexcel_sha3_import,
2866		.halg = {
2867			.digestsize = SHA3_224_DIGEST_SIZE,
2868			.statesize = sizeof(struct safexcel_ahash_export_state),
2869			.base = {
2870				.cra_name = "hmac(sha3-224)",
2871				.cra_driver_name = "safexcel-hmac-sha3-224",
2872				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2873				.cra_flags = CRYPTO_ALG_ASYNC |
2874					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2875					     CRYPTO_ALG_NEED_FALLBACK,
2876				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2877				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2878				.cra_init = safexcel_hmac_sha3_224_cra_init,
2879				.cra_exit = safexcel_hmac_sha3_cra_exit,
2880				.cra_module = THIS_MODULE,
2881			},
2882		},
2883	},
2884};
2885
2886static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2887{
2888	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2889	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2890	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2891
2892	memset(req, 0, sizeof(*req));
2893
2894	/* Copy (half of) the key */
2895	memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2896	/* Start of HMAC should have len == processed == blocksize */
2897	req->len	= SHA3_256_BLOCK_SIZE;
2898	req->processed	= SHA3_256_BLOCK_SIZE;
2899	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2900	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2901	req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2902	req->digest_sz = SHA3_256_DIGEST_SIZE;
2903	req->block_sz = SHA3_256_BLOCK_SIZE;
2904	req->hmac = true;
2905	ctx->do_fallback = false;
2906	ctx->fb_init_done = false;
2907	return 0;
2908}
2909
2910static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2911{
2912	if (req->nbytes)
2913		return safexcel_hmac_sha3_256_init(req) ?:
2914		       safexcel_ahash_finup(req);
2915
2916	/* HW cannot do zero length HMAC, use fallback instead */
2917	return safexcel_sha3_digest_fallback(req);
2918}
2919
2920static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2921{
2922	return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2923}
2924
2925struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2926	.type = SAFEXCEL_ALG_TYPE_AHASH,
2927	.algo_mask = SAFEXCEL_ALG_SHA3,
2928	.alg.ahash = {
2929		.init = safexcel_hmac_sha3_256_init,
2930		.update = safexcel_sha3_update,
2931		.final = safexcel_sha3_final,
2932		.finup = safexcel_sha3_finup,
2933		.digest = safexcel_hmac_sha3_256_digest,
2934		.setkey = safexcel_hmac_sha3_setkey,
2935		.export = safexcel_sha3_export,
2936		.import = safexcel_sha3_import,
2937		.halg = {
2938			.digestsize = SHA3_256_DIGEST_SIZE,
2939			.statesize = sizeof(struct safexcel_ahash_export_state),
2940			.base = {
2941				.cra_name = "hmac(sha3-256)",
2942				.cra_driver_name = "safexcel-hmac-sha3-256",
2943				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2944				.cra_flags = CRYPTO_ALG_ASYNC |
2945					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2946					     CRYPTO_ALG_NEED_FALLBACK,
2947				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2948				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2949				.cra_init = safexcel_hmac_sha3_256_cra_init,
2950				.cra_exit = safexcel_hmac_sha3_cra_exit,
2951				.cra_module = THIS_MODULE,
2952			},
2953		},
2954	},
2955};
2956
2957static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
2958{
2959	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2960	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2961	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2962
2963	memset(req, 0, sizeof(*req));
2964
2965	/* Copy (half of) the key */
2966	memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
2967	/* Start of HMAC should have len == processed == blocksize */
2968	req->len	= SHA3_384_BLOCK_SIZE;
2969	req->processed	= SHA3_384_BLOCK_SIZE;
2970	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2971	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2972	req->state_sz = SHA3_384_BLOCK_SIZE / 2;
2973	req->digest_sz = SHA3_384_DIGEST_SIZE;
2974	req->block_sz = SHA3_384_BLOCK_SIZE;
2975	req->hmac = true;
2976	ctx->do_fallback = false;
2977	ctx->fb_init_done = false;
2978	return 0;
2979}
2980
2981static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
2982{
2983	if (req->nbytes)
2984		return safexcel_hmac_sha3_384_init(req) ?:
2985		       safexcel_ahash_finup(req);
2986
2987	/* HW cannot do zero length HMAC, use fallback instead */
2988	return safexcel_sha3_digest_fallback(req);
2989}
2990
2991static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
2992{
2993	return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
2994}
2995
2996struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
2997	.type = SAFEXCEL_ALG_TYPE_AHASH,
2998	.algo_mask = SAFEXCEL_ALG_SHA3,
2999	.alg.ahash = {
3000		.init = safexcel_hmac_sha3_384_init,
3001		.update = safexcel_sha3_update,
3002		.final = safexcel_sha3_final,
3003		.finup = safexcel_sha3_finup,
3004		.digest = safexcel_hmac_sha3_384_digest,
3005		.setkey = safexcel_hmac_sha3_setkey,
3006		.export = safexcel_sha3_export,
3007		.import = safexcel_sha3_import,
3008		.halg = {
3009			.digestsize = SHA3_384_DIGEST_SIZE,
3010			.statesize = sizeof(struct safexcel_ahash_export_state),
3011			.base = {
3012				.cra_name = "hmac(sha3-384)",
3013				.cra_driver_name = "safexcel-hmac-sha3-384",
3014				.cra_priority = SAFEXCEL_CRA_PRIORITY,
3015				.cra_flags = CRYPTO_ALG_ASYNC |
3016					     CRYPTO_ALG_KERN_DRIVER_ONLY |
3017					     CRYPTO_ALG_NEED_FALLBACK,
3018				.cra_blocksize = SHA3_384_BLOCK_SIZE,
3019				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3020				.cra_init = safexcel_hmac_sha3_384_cra_init,
3021				.cra_exit = safexcel_hmac_sha3_cra_exit,
3022				.cra_module = THIS_MODULE,
3023			},
3024		},
3025	},
3026};
3027
3028static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3029{
3030	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3031	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3032	struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
3033
3034	memset(req, 0, sizeof(*req));
3035
3036	/* Copy (half of) the key */
3037	memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
3038	/* Start of HMAC should have len == processed == blocksize */
3039	req->len	= SHA3_512_BLOCK_SIZE;
3040	req->processed	= SHA3_512_BLOCK_SIZE;
3041	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3042	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3043	req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3044	req->digest_sz = SHA3_512_DIGEST_SIZE;
3045	req->block_sz = SHA3_512_BLOCK_SIZE;
3046	req->hmac = true;
3047	ctx->do_fallback = false;
3048	ctx->fb_init_done = false;
3049	return 0;
3050}
3051
3052static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3053{
3054	if (req->nbytes)
3055		return safexcel_hmac_sha3_512_init(req) ?:
3056		       safexcel_ahash_finup(req);
3057
3058	/* HW cannot do zero length HMAC, use fallback instead */
3059	return safexcel_sha3_digest_fallback(req);
3060}
3061
3062static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3063{
3064	return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3065}
3066struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3067	.type = SAFEXCEL_ALG_TYPE_AHASH,
3068	.algo_mask = SAFEXCEL_ALG_SHA3,
3069	.alg.ahash = {
3070		.init = safexcel_hmac_sha3_512_init,
3071		.update = safexcel_sha3_update,
3072		.final = safexcel_sha3_final,
3073		.finup = safexcel_sha3_finup,
3074		.digest = safexcel_hmac_sha3_512_digest,
3075		.setkey = safexcel_hmac_sha3_setkey,
3076		.export = safexcel_sha3_export,
3077		.import = safexcel_sha3_import,
3078		.halg = {
3079			.digestsize = SHA3_512_DIGEST_SIZE,
3080			.statesize = sizeof(struct safexcel_ahash_export_state),
3081			.base = {
3082				.cra_name = "hmac(sha3-512)",
3083				.cra_driver_name = "safexcel-hmac-sha3-512",
3084				.cra_priority = SAFEXCEL_CRA_PRIORITY,
3085				.cra_flags = CRYPTO_ALG_ASYNC |
3086					     CRYPTO_ALG_KERN_DRIVER_ONLY |
3087					     CRYPTO_ALG_NEED_FALLBACK,
3088				.cra_blocksize = SHA3_512_BLOCK_SIZE,
3089				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3090				.cra_init = safexcel_hmac_sha3_512_cra_init,
3091				.cra_exit = safexcel_hmac_sha3_cra_exit,
3092				.cra_module = THIS_MODULE,
3093			},
3094		},
3095	},
3096};
v5.14.15
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (C) 2017 Marvell
   4 *
   5 * Antoine Tenart <antoine.tenart@free-electrons.com>
   6 */
   7
   8#include <crypto/aes.h>
   9#include <crypto/hmac.h>
  10#include <crypto/md5.h>
  11#include <crypto/sha1.h>
  12#include <crypto/sha2.h>
  13#include <crypto/sha3.h>
  14#include <crypto/skcipher.h>
  15#include <crypto/sm3.h>
  16#include <crypto/internal/cipher.h>
  17#include <linux/device.h>
  18#include <linux/dma-mapping.h>
  19#include <linux/dmapool.h>
  20
  21#include "safexcel.h"
  22
  23struct safexcel_ahash_ctx {
  24	struct safexcel_context base;
  25
  26	u32 alg;
  27	u8  key_sz;
  28	bool cbcmac;
  29	bool do_fallback;
  30	bool fb_init_done;
  31	bool fb_do_setkey;
  32
  33	struct crypto_cipher *kaes;
  34	struct crypto_ahash *fback;
  35	struct crypto_shash *shpre;
  36	struct shash_desc *shdesc;
  37};
  38
  39struct safexcel_ahash_req {
  40	bool last_req;
  41	bool finish;
  42	bool hmac;
  43	bool needs_inv;
  44	bool hmac_zlen;
  45	bool len_is_le;
  46	bool not_first;
  47	bool xcbcmac;
  48
  49	int nents;
  50	dma_addr_t result_dma;
  51
  52	u32 digest;
  53
  54	u8 state_sz;    /* expected state size, only set once */
  55	u8 block_sz;    /* block size, only set once */
  56	u8 digest_sz;   /* output digest size, only set once */
  57	__le32 state[SHA3_512_BLOCK_SIZE /
  58		     sizeof(__le32)] __aligned(sizeof(__le32));
  59
  60	u64 len;
  61	u64 processed;
  62
  63	u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
  64	dma_addr_t cache_dma;
  65	unsigned int cache_sz;
  66
  67	u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
  68};
  69
  70static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
  71{
  72	return req->len - req->processed;
  73}
  74
  75static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
  76				u32 input_length, u32 result_length,
  77				bool cbcmac)
  78{
  79	struct safexcel_token *token =
  80		(struct safexcel_token *)cdesc->control_data.token;
  81
  82	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
  83	token[0].packet_length = input_length;
  84	token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
  85
  86	input_length &= 15;
  87	if (unlikely(cbcmac && input_length)) {
  88		token[0].stat =  0;
  89		token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
  90		token[1].packet_length = 16 - input_length;
  91		token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
  92		token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
  93	} else {
  94		token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
  95		eip197_noop_token(&token[1]);
  96	}
  97
  98	token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
  99	token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
 100			EIP197_TOKEN_STAT_LAST_PACKET;
 101	token[2].packet_length = result_length;
 102	token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 103				EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 104
 105	eip197_noop_token(&token[3]);
 106}
 107
 108static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
 109				     struct safexcel_ahash_req *req,
 110				     struct safexcel_command_desc *cdesc)
 111{
 112	struct safexcel_crypto_priv *priv = ctx->base.priv;
 113	u64 count = 0;
 114
 115	cdesc->control_data.control0 = ctx->alg;
 116	cdesc->control_data.control1 = 0;
 117
 118	/*
 119	 * Copy the input digest if needed, and setup the context
 120	 * fields. Do this now as we need it to setup the first command
 121	 * descriptor.
 122	 */
 123	if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
 124		if (req->xcbcmac)
 125			memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
 126		else
 127			memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
 128
 129		if (!req->finish && req->xcbcmac)
 130			cdesc->control_data.control0 |=
 131				CONTEXT_CONTROL_DIGEST_XCM |
 132				CONTEXT_CONTROL_TYPE_HASH_OUT  |
 133				CONTEXT_CONTROL_NO_FINISH_HASH |
 134				CONTEXT_CONTROL_SIZE(req->state_sz /
 135						     sizeof(u32));
 136		else
 137			cdesc->control_data.control0 |=
 138				CONTEXT_CONTROL_DIGEST_XCM |
 139				CONTEXT_CONTROL_TYPE_HASH_OUT  |
 140				CONTEXT_CONTROL_SIZE(req->state_sz /
 141						     sizeof(u32));
 142		return;
 143	} else if (!req->processed) {
 144		/* First - and possibly only - block of basic hash only */
 145		if (req->finish)
 146			cdesc->control_data.control0 |= req->digest |
 147				CONTEXT_CONTROL_TYPE_HASH_OUT |
 148				CONTEXT_CONTROL_RESTART_HASH  |
 149				/* ensure its not 0! */
 150				CONTEXT_CONTROL_SIZE(1);
 151		else
 152			cdesc->control_data.control0 |= req->digest |
 153				CONTEXT_CONTROL_TYPE_HASH_OUT  |
 154				CONTEXT_CONTROL_RESTART_HASH   |
 155				CONTEXT_CONTROL_NO_FINISH_HASH |
 156				/* ensure its not 0! */
 157				CONTEXT_CONTROL_SIZE(1);
 158		return;
 159	}
 160
 161	/* Hash continuation or HMAC, setup (inner) digest from state */
 162	memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
 163
 164	if (req->finish) {
 165		/* Compute digest count for hash/HMAC finish operations */
 166		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
 167		    req->hmac_zlen || (req->processed != req->block_sz)) {
 168			count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
 169
 170			/* This is a hardware limitation, as the
 171			 * counter must fit into an u32. This represents
 172			 * a fairly big amount of input data, so we
 173			 * shouldn't see this.
 174			 */
 175			if (unlikely(count & 0xffffffff00000000ULL)) {
 176				dev_warn(priv->dev,
 177					 "Input data is too big\n");
 178				return;
 179			}
 180		}
 181
 182		if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
 183		    /* Special case: zero length HMAC */
 184		    req->hmac_zlen ||
 185		    /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
 186		    (req->processed != req->block_sz)) {
 187			/* Basic hash continue operation, need digest + cnt */
 188			cdesc->control_data.control0 |=
 189				CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
 190				CONTEXT_CONTROL_TYPE_HASH_OUT |
 191				CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 192			/* For zero-len HMAC, don't finalize, already padded! */
 193			if (req->hmac_zlen)
 194				cdesc->control_data.control0 |=
 195					CONTEXT_CONTROL_NO_FINISH_HASH;
 196			cdesc->control_data.control1 |=
 197				CONTEXT_CONTROL_DIGEST_CNT;
 198			ctx->base.ctxr->data[req->state_sz >> 2] =
 199				cpu_to_le32(count);
 200			req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 201
 202			/* Clear zero-length HMAC flag for next operation! */
 203			req->hmac_zlen = false;
 204		} else { /* HMAC */
 205			/* Need outer digest for HMAC finalization */
 206			memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
 207			       &ctx->base.opad, req->state_sz);
 208
 209			/* Single pass HMAC - no digest count */
 210			cdesc->control_data.control0 |=
 211				CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
 212				CONTEXT_CONTROL_TYPE_HASH_OUT |
 213				CONTEXT_CONTROL_DIGEST_HMAC;
 214		}
 215	} else { /* Hash continuation, do not finish yet */
 216		cdesc->control_data.control0 |=
 217			CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
 218			CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
 219			CONTEXT_CONTROL_TYPE_HASH_OUT |
 220			CONTEXT_CONTROL_NO_FINISH_HASH;
 221	}
 222}
 223
 224static int safexcel_ahash_enqueue(struct ahash_request *areq);
 225
 226static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
 227				      int ring,
 228				      struct crypto_async_request *async,
 229				      bool *should_complete, int *ret)
 230{
 231	struct safexcel_result_desc *rdesc;
 232	struct ahash_request *areq = ahash_request_cast(async);
 233	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
 234	struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
 235	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
 236	u64 cache_len;
 237
 238	*ret = 0;
 239
 240	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 241	if (IS_ERR(rdesc)) {
 242		dev_err(priv->dev,
 243			"hash: result: could not retrieve the result descriptor\n");
 244		*ret = PTR_ERR(rdesc);
 245	} else {
 246		*ret = safexcel_rdesc_check_errors(priv, rdesc);
 247	}
 248
 249	safexcel_complete(priv, ring);
 250
 251	if (sreq->nents) {
 252		dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
 253		sreq->nents = 0;
 254	}
 255
 256	if (sreq->result_dma) {
 257		dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
 258				 DMA_FROM_DEVICE);
 259		sreq->result_dma = 0;
 260	}
 261
 262	if (sreq->cache_dma) {
 263		dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
 264				 DMA_TO_DEVICE);
 265		sreq->cache_dma = 0;
 266		sreq->cache_sz = 0;
 267	}
 268
 269	if (sreq->finish) {
 270		if (sreq->hmac &&
 271		    (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
 272			/* Faking HMAC using hash - need to do outer hash */
 273			memcpy(sreq->cache, sreq->state,
 274			       crypto_ahash_digestsize(ahash));
 275
 276			memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
 277
 278			sreq->len = sreq->block_sz +
 279				    crypto_ahash_digestsize(ahash);
 280			sreq->processed = sreq->block_sz;
 281			sreq->hmac = 0;
 282
 283			if (priv->flags & EIP197_TRC_CACHE)
 284				ctx->base.needs_inv = true;
 285			areq->nbytes = 0;
 286			safexcel_ahash_enqueue(areq);
 287
 288			*should_complete = false; /* Not done yet */
 289			return 1;
 290		}
 291
 292		if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
 293			     ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
 294			/* Undo final XOR with 0xffffffff ...*/
 295			*(__le32 *)areq->result = ~sreq->state[0];
 296		} else {
 297			memcpy(areq->result, sreq->state,
 298			       crypto_ahash_digestsize(ahash));
 299		}
 300	}
 301
 302	cache_len = safexcel_queued_len(sreq);
 303	if (cache_len)
 304		memcpy(sreq->cache, sreq->cache_next, cache_len);
 305
 306	*should_complete = true;
 307
 308	return 1;
 309}
 310
 311static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
 312				   int *commands, int *results)
 313{
 314	struct ahash_request *areq = ahash_request_cast(async);
 315	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 316	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 317	struct safexcel_crypto_priv *priv = ctx->base.priv;
 318	struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
 319	struct safexcel_result_desc *rdesc;
 320	struct scatterlist *sg;
 321	struct safexcel_token *dmmy;
 322	int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
 323	u64 queued, len;
 324
 325	queued = safexcel_queued_len(req);
 326	if (queued <= HASH_CACHE_SIZE)
 327		cache_len = queued;
 328	else
 329		cache_len = queued - areq->nbytes;
 330
 331	if (!req->finish && !req->last_req) {
 332		/* If this is not the last request and the queued data does not
 333		 * fit into full cache blocks, cache it for the next send call.
 334		 */
 335		extra = queued & (HASH_CACHE_SIZE - 1);
 336
 337		/* If this is not the last request and the queued data
 338		 * is a multiple of a block, cache the last one for now.
 339		 */
 340		if (!extra)
 341			extra = HASH_CACHE_SIZE;
 342
 343		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
 344				   req->cache_next, extra,
 345				   areq->nbytes - extra);
 346
 347		queued -= extra;
 348
 349		if (!queued) {
 350			*commands = 0;
 351			*results = 0;
 352			return 0;
 353		}
 354
 355		extra = 0;
 356	}
 357
 358	if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
 359		if (unlikely(cache_len < AES_BLOCK_SIZE)) {
 360			/*
 361			 * Cache contains less than 1 full block, complete.
 362			 */
 363			extra = AES_BLOCK_SIZE - cache_len;
 364			if (queued > cache_len) {
 365				/* More data follows: borrow bytes */
 366				u64 tmp = queued - cache_len;
 367
 368				skip = min_t(u64, tmp, extra);
 369				sg_pcopy_to_buffer(areq->src,
 370					sg_nents(areq->src),
 371					req->cache + cache_len,
 372					skip, 0);
 373			}
 374			extra -= skip;
 375			memset(req->cache + cache_len + skip, 0, extra);
 376			if (!ctx->cbcmac && extra) {
 377				// 10- padding for XCBCMAC & CMAC
 378				req->cache[cache_len + skip] = 0x80;
 379				// HW will use K2 iso K3 - compensate!
 380				for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
 381					u32 *cache = (void *)req->cache;
 382					u32 *ipad = ctx->base.ipad.word;
 383					u32 x;
 384
 385					x = ipad[i] ^ ipad[i + 4];
 386					cache[i] ^= swab(x);
 387				}
 388			}
 389			cache_len = AES_BLOCK_SIZE;
 390			queued = queued + extra;
 391		}
 392
 393		/* XCBC continue: XOR previous result into 1st word */
 394		crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
 395	}
 396
 397	len = queued;
 398	/* Add a command descriptor for the cached data, if any */
 399	if (cache_len) {
 400		req->cache_dma = dma_map_single(priv->dev, req->cache,
 401						cache_len, DMA_TO_DEVICE);
 402		if (dma_mapping_error(priv->dev, req->cache_dma))
 403			return -EINVAL;
 404
 405		req->cache_sz = cache_len;
 406		first_cdesc = safexcel_add_cdesc(priv, ring, 1,
 407						 (cache_len == len),
 408						 req->cache_dma, cache_len,
 409						 len, ctx->base.ctxr_dma,
 410						 &dmmy);
 411		if (IS_ERR(first_cdesc)) {
 412			ret = PTR_ERR(first_cdesc);
 413			goto unmap_cache;
 414		}
 415		n_cdesc++;
 416
 417		queued -= cache_len;
 418		if (!queued)
 419			goto send_command;
 420	}
 421
 422	/* Now handle the current ahash request buffer(s) */
 423	req->nents = dma_map_sg(priv->dev, areq->src,
 424				sg_nents_for_len(areq->src,
 425						 areq->nbytes),
 426				DMA_TO_DEVICE);
 427	if (!req->nents) {
 428		ret = -ENOMEM;
 429		goto cdesc_rollback;
 430	}
 431
 432	for_each_sg(areq->src, sg, req->nents, i) {
 433		int sglen = sg_dma_len(sg);
 434
 435		if (unlikely(sglen <= skip)) {
 436			skip -= sglen;
 437			continue;
 438		}
 439
 440		/* Do not overflow the request */
 441		if ((queued + skip) <= sglen)
 442			sglen = queued;
 443		else
 444			sglen -= skip;
 445
 446		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
 447					   !(queued - sglen),
 448					   sg_dma_address(sg) + skip, sglen,
 449					   len, ctx->base.ctxr_dma, &dmmy);
 450		if (IS_ERR(cdesc)) {
 451			ret = PTR_ERR(cdesc);
 452			goto unmap_sg;
 453		}
 454
 455		if (!n_cdesc)
 456			first_cdesc = cdesc;
 457		n_cdesc++;
 458
 459		queued -= sglen;
 460		if (!queued)
 461			break;
 462		skip = 0;
 463	}
 464
 465send_command:
 466	/* Setup the context options */
 467	safexcel_context_control(ctx, req, first_cdesc);
 468
 469	/* Add the token */
 470	safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
 471
 472	req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
 473					 DMA_FROM_DEVICE);
 474	if (dma_mapping_error(priv->dev, req->result_dma)) {
 475		ret = -EINVAL;
 476		goto unmap_sg;
 477	}
 478
 479	/* Add a result descriptor */
 480	rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
 481				   req->digest_sz);
 482	if (IS_ERR(rdesc)) {
 483		ret = PTR_ERR(rdesc);
 484		goto unmap_result;
 485	}
 486
 487	safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
 488
 489	req->processed += len - extra;
 490
 491	*commands = n_cdesc;
 492	*results = 1;
 493	return 0;
 494
 495unmap_result:
 496	dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
 497			 DMA_FROM_DEVICE);
 498unmap_sg:
 499	if (req->nents) {
 500		dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
 501		req->nents = 0;
 502	}
 503cdesc_rollback:
 504	for (i = 0; i < n_cdesc; i++)
 505		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
 506unmap_cache:
 507	if (req->cache_dma) {
 508		dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
 509				 DMA_TO_DEVICE);
 510		req->cache_dma = 0;
 511		req->cache_sz = 0;
 512	}
 513
 514	return ret;
 515}
 516
 517static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
 518				      int ring,
 519				      struct crypto_async_request *async,
 520				      bool *should_complete, int *ret)
 521{
 522	struct safexcel_result_desc *rdesc;
 523	struct ahash_request *areq = ahash_request_cast(async);
 524	struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
 525	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
 526	int enq_ret;
 527
 528	*ret = 0;
 529
 530	rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 531	if (IS_ERR(rdesc)) {
 532		dev_err(priv->dev,
 533			"hash: invalidate: could not retrieve the result descriptor\n");
 534		*ret = PTR_ERR(rdesc);
 535	} else {
 536		*ret = safexcel_rdesc_check_errors(priv, rdesc);
 537	}
 538
 539	safexcel_complete(priv, ring);
 540
 541	if (ctx->base.exit_inv) {
 542		dma_pool_free(priv->context_pool, ctx->base.ctxr,
 543			      ctx->base.ctxr_dma);
 544
 545		*should_complete = true;
 546		return 1;
 547	}
 548
 549	ring = safexcel_select_ring(priv);
 550	ctx->base.ring = ring;
 551
 552	spin_lock_bh(&priv->ring[ring].queue_lock);
 553	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
 554	spin_unlock_bh(&priv->ring[ring].queue_lock);
 555
 556	if (enq_ret != -EINPROGRESS)
 557		*ret = enq_ret;
 558
 559	queue_work(priv->ring[ring].workqueue,
 560		   &priv->ring[ring].work_data.work);
 561
 562	*should_complete = false;
 563
 564	return 1;
 565}
 566
 567static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
 568				  struct crypto_async_request *async,
 569				  bool *should_complete, int *ret)
 570{
 571	struct ahash_request *areq = ahash_request_cast(async);
 572	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 573	int err;
 574
 575	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
 576
 577	if (req->needs_inv) {
 578		req->needs_inv = false;
 579		err = safexcel_handle_inv_result(priv, ring, async,
 580						 should_complete, ret);
 581	} else {
 582		err = safexcel_handle_req_result(priv, ring, async,
 583						 should_complete, ret);
 584	}
 585
 586	return err;
 587}
 588
 589static int safexcel_ahash_send_inv(struct crypto_async_request *async,
 590				   int ring, int *commands, int *results)
 591{
 592	struct ahash_request *areq = ahash_request_cast(async);
 593	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 594	int ret;
 595
 596	ret = safexcel_invalidate_cache(async, ctx->base.priv,
 597					ctx->base.ctxr_dma, ring);
 598	if (unlikely(ret))
 599		return ret;
 600
 601	*commands = 1;
 602	*results = 1;
 603
 604	return 0;
 605}
 606
 607static int safexcel_ahash_send(struct crypto_async_request *async,
 608			       int ring, int *commands, int *results)
 609{
 610	struct ahash_request *areq = ahash_request_cast(async);
 611	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 612	int ret;
 613
 614	if (req->needs_inv)
 615		ret = safexcel_ahash_send_inv(async, ring, commands, results);
 616	else
 617		ret = safexcel_ahash_send_req(async, ring, commands, results);
 618
 619	return ret;
 620}
 621
 622static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
 623{
 624	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 625	struct safexcel_crypto_priv *priv = ctx->base.priv;
 626	EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
 627	struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
 628	struct safexcel_inv_result result = {};
 629	int ring = ctx->base.ring;
 
 630
 631	memset(req, 0, EIP197_AHASH_REQ_SIZE);
 632
 633	/* create invalidation request */
 634	init_completion(&result.completion);
 635	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 636				   safexcel_inv_complete, &result);
 637
 638	ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
 639	ctx = crypto_tfm_ctx(req->base.tfm);
 640	ctx->base.exit_inv = true;
 641	rctx->needs_inv = true;
 642
 643	spin_lock_bh(&priv->ring[ring].queue_lock);
 644	crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
 645	spin_unlock_bh(&priv->ring[ring].queue_lock);
 646
 647	queue_work(priv->ring[ring].workqueue,
 648		   &priv->ring[ring].work_data.work);
 649
 650	wait_for_completion(&result.completion);
 651
 652	if (result.error) {
 653		dev_warn(priv->dev, "hash: completion error (%d)\n",
 654			 result.error);
 655		return result.error;
 656	}
 657
 658	return 0;
 659}
 660
 661/* safexcel_ahash_cache: cache data until at least one request can be sent to
 662 * the engine, aka. when there is at least 1 block size in the pipe.
 663 */
 664static int safexcel_ahash_cache(struct ahash_request *areq)
 665{
 666	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 667	u64 cache_len;
 668
 669	/* cache_len: everything accepted by the driver but not sent yet,
 670	 * tot sz handled by update() - last req sz - tot sz handled by send()
 671	 */
 672	cache_len = safexcel_queued_len(req);
 673
 674	/*
 675	 * In case there isn't enough bytes to proceed (less than a
 676	 * block size), cache the data until we have enough.
 677	 */
 678	if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
 679		sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
 680				   req->cache + cache_len,
 681				   areq->nbytes, 0);
 682		return 0;
 683	}
 684
 685	/* We couldn't cache all the data */
 686	return -E2BIG;
 687}
 688
 689static int safexcel_ahash_enqueue(struct ahash_request *areq)
 690{
 691	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 692	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 693	struct safexcel_crypto_priv *priv = ctx->base.priv;
 694	int ret, ring;
 695
 696	req->needs_inv = false;
 697
 698	if (ctx->base.ctxr) {
 699		if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
 700		     /* invalidate for *any* non-XCBC continuation */
 701		   ((req->not_first && !req->xcbcmac) ||
 702		     /* invalidate if (i)digest changed */
 703		     memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
 704		     /* invalidate for HMAC finish with odigest changed */
 705		     (req->finish && req->hmac &&
 706		      memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
 707			     &ctx->base.opad, req->state_sz))))
 708			/*
 709			 * We're still setting needs_inv here, even though it is
 710			 * cleared right away, because the needs_inv flag can be
 711			 * set in other functions and we want to keep the same
 712			 * logic.
 713			 */
 714			ctx->base.needs_inv = true;
 715
 716		if (ctx->base.needs_inv) {
 717			ctx->base.needs_inv = false;
 718			req->needs_inv = true;
 719		}
 720	} else {
 721		ctx->base.ring = safexcel_select_ring(priv);
 722		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
 723						 EIP197_GFP_FLAGS(areq->base),
 724						 &ctx->base.ctxr_dma);
 725		if (!ctx->base.ctxr)
 726			return -ENOMEM;
 727	}
 728	req->not_first = true;
 729
 730	ring = ctx->base.ring;
 731
 732	spin_lock_bh(&priv->ring[ring].queue_lock);
 733	ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
 734	spin_unlock_bh(&priv->ring[ring].queue_lock);
 735
 736	queue_work(priv->ring[ring].workqueue,
 737		   &priv->ring[ring].work_data.work);
 738
 739	return ret;
 740}
 741
 742static int safexcel_ahash_update(struct ahash_request *areq)
 743{
 744	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 745	int ret;
 746
 747	/* If the request is 0 length, do nothing */
 748	if (!areq->nbytes)
 749		return 0;
 750
 751	/* Add request to the cache if it fits */
 752	ret = safexcel_ahash_cache(areq);
 753
 754	/* Update total request length */
 755	req->len += areq->nbytes;
 756
 757	/* If not all data could fit into the cache, go process the excess.
 758	 * Also go process immediately for an HMAC IV precompute, which
 759	 * will never be finished at all, but needs to be processed anyway.
 760	 */
 761	if ((ret && !req->finish) || req->last_req)
 762		return safexcel_ahash_enqueue(areq);
 763
 764	return 0;
 765}
 766
 767static int safexcel_ahash_final(struct ahash_request *areq)
 768{
 769	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 770	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 771
 772	req->finish = true;
 773
 774	if (unlikely(!req->len && !areq->nbytes)) {
 775		/*
 776		 * If we have an overall 0 length *hash* request:
 777		 * The HW cannot do 0 length hash, so we provide the correct
 778		 * result directly here.
 779		 */
 780		if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
 781			memcpy(areq->result, md5_zero_message_hash,
 782			       MD5_DIGEST_SIZE);
 783		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
 784			memcpy(areq->result, sha1_zero_message_hash,
 785			       SHA1_DIGEST_SIZE);
 786		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
 787			memcpy(areq->result, sha224_zero_message_hash,
 788			       SHA224_DIGEST_SIZE);
 789		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
 790			memcpy(areq->result, sha256_zero_message_hash,
 791			       SHA256_DIGEST_SIZE);
 792		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
 793			memcpy(areq->result, sha384_zero_message_hash,
 794			       SHA384_DIGEST_SIZE);
 795		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
 796			memcpy(areq->result, sha512_zero_message_hash,
 797			       SHA512_DIGEST_SIZE);
 798		else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
 799			memcpy(areq->result,
 800			       EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
 801		}
 802
 803		return 0;
 804	} else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
 805			    ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
 806			    req->len == sizeof(u32) && !areq->nbytes)) {
 807		/* Zero length CRC32 */
 808		memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
 809		return 0;
 810	} else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
 811			    !areq->nbytes)) {
 812		/* Zero length CBC MAC */
 813		memset(areq->result, 0, AES_BLOCK_SIZE);
 814		return 0;
 815	} else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
 816			    !areq->nbytes)) {
 817		/* Zero length (X)CBC/CMAC */
 818		int i;
 819
 820		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
 821			u32 *result = (void *)areq->result;
 822
 823			/* K3 */
 824			result[i] = swab(ctx->base.ipad.word[i + 4]);
 825		}
 826		areq->result[0] ^= 0x80;			// 10- padding
 827		crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
 828		return 0;
 829	} else if (unlikely(req->hmac &&
 830			    (req->len == req->block_sz) &&
 831			    !areq->nbytes)) {
 832		/*
 833		 * If we have an overall 0 length *HMAC* request:
 834		 * For HMAC, we need to finalize the inner digest
 835		 * and then perform the outer hash.
 836		 */
 837
 838		/* generate pad block in the cache */
 839		/* start with a hash block of all zeroes */
 840		memset(req->cache, 0, req->block_sz);
 841		/* set the first byte to 0x80 to 'append a 1 bit' */
 842		req->cache[0] = 0x80;
 843		/* add the length in bits in the last 2 bytes */
 844		if (req->len_is_le) {
 845			/* Little endian length word (e.g. MD5) */
 846			req->cache[req->block_sz-8] = (req->block_sz << 3) &
 847						      255;
 848			req->cache[req->block_sz-7] = (req->block_sz >> 5);
 849		} else {
 850			/* Big endian length word (e.g. any SHA) */
 851			req->cache[req->block_sz-2] = (req->block_sz >> 5);
 852			req->cache[req->block_sz-1] = (req->block_sz << 3) &
 853						      255;
 854		}
 855
 856		req->len += req->block_sz; /* plus 1 hash block */
 857
 858		/* Set special zero-length HMAC flag */
 859		req->hmac_zlen = true;
 860
 861		/* Finalize HMAC */
 862		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
 863	} else if (req->hmac) {
 864		/* Finalize HMAC */
 865		req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
 866	}
 867
 868	return safexcel_ahash_enqueue(areq);
 869}
 870
 871static int safexcel_ahash_finup(struct ahash_request *areq)
 872{
 873	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 874
 875	req->finish = true;
 876
 877	safexcel_ahash_update(areq);
 878	return safexcel_ahash_final(areq);
 879}
 880
 881static int safexcel_ahash_export(struct ahash_request *areq, void *out)
 882{
 883	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 884	struct safexcel_ahash_export_state *export = out;
 885
 886	export->len = req->len;
 887	export->processed = req->processed;
 888
 889	export->digest = req->digest;
 890
 891	memcpy(export->state, req->state, req->state_sz);
 892	memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
 893
 894	return 0;
 895}
 896
 897static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
 898{
 899	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 900	const struct safexcel_ahash_export_state *export = in;
 901	int ret;
 902
 903	ret = crypto_ahash_init(areq);
 904	if (ret)
 905		return ret;
 906
 907	req->len = export->len;
 908	req->processed = export->processed;
 909
 910	req->digest = export->digest;
 911
 912	memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
 913	memcpy(req->state, export->state, req->state_sz);
 914
 915	return 0;
 916}
 917
 918static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
 919{
 920	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 921	struct safexcel_alg_template *tmpl =
 922		container_of(__crypto_ahash_alg(tfm->__crt_alg),
 923			     struct safexcel_alg_template, alg.ahash);
 924
 925	ctx->base.priv = tmpl->priv;
 926	ctx->base.send = safexcel_ahash_send;
 927	ctx->base.handle_result = safexcel_handle_result;
 928	ctx->fb_do_setkey = false;
 929
 930	crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
 931				 sizeof(struct safexcel_ahash_req));
 932	return 0;
 933}
 934
 935static int safexcel_sha1_init(struct ahash_request *areq)
 936{
 937	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
 938	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
 939
 940	memset(req, 0, sizeof(*req));
 941
 942	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
 943	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
 944	req->state_sz = SHA1_DIGEST_SIZE;
 945	req->digest_sz = SHA1_DIGEST_SIZE;
 946	req->block_sz = SHA1_BLOCK_SIZE;
 947
 948	return 0;
 949}
 950
 951static int safexcel_sha1_digest(struct ahash_request *areq)
 952{
 953	int ret = safexcel_sha1_init(areq);
 954
 955	if (ret)
 956		return ret;
 957
 958	return safexcel_ahash_finup(areq);
 959}
 960
 961static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
 962{
 963	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
 964	struct safexcel_crypto_priv *priv = ctx->base.priv;
 965	int ret;
 966
 967	/* context not allocated, skip invalidation */
 968	if (!ctx->base.ctxr)
 969		return;
 970
 971	if (priv->flags & EIP197_TRC_CACHE) {
 972		ret = safexcel_ahash_exit_inv(tfm);
 973		if (ret)
 974			dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
 975	} else {
 976		dma_pool_free(priv->context_pool, ctx->base.ctxr,
 977			      ctx->base.ctxr_dma);
 978	}
 979}
 980
 981struct safexcel_alg_template safexcel_alg_sha1 = {
 982	.type = SAFEXCEL_ALG_TYPE_AHASH,
 983	.algo_mask = SAFEXCEL_ALG_SHA1,
 984	.alg.ahash = {
 985		.init = safexcel_sha1_init,
 986		.update = safexcel_ahash_update,
 987		.final = safexcel_ahash_final,
 988		.finup = safexcel_ahash_finup,
 989		.digest = safexcel_sha1_digest,
 990		.export = safexcel_ahash_export,
 991		.import = safexcel_ahash_import,
 992		.halg = {
 993			.digestsize = SHA1_DIGEST_SIZE,
 994			.statesize = sizeof(struct safexcel_ahash_export_state),
 995			.base = {
 996				.cra_name = "sha1",
 997				.cra_driver_name = "safexcel-sha1",
 998				.cra_priority = SAFEXCEL_CRA_PRIORITY,
 999				.cra_flags = CRYPTO_ALG_ASYNC |
1000					     CRYPTO_ALG_ALLOCATES_MEMORY |
1001					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1002				.cra_blocksize = SHA1_BLOCK_SIZE,
1003				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1004				.cra_init = safexcel_ahash_cra_init,
1005				.cra_exit = safexcel_ahash_cra_exit,
1006				.cra_module = THIS_MODULE,
1007			},
1008		},
1009	},
1010};
1011
1012static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1013{
1014	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1015	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1016
1017	memset(req, 0, sizeof(*req));
1018
1019	/* Start from ipad precompute */
1020	memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1021	/* Already processed the key^ipad part now! */
1022	req->len	= SHA1_BLOCK_SIZE;
1023	req->processed	= SHA1_BLOCK_SIZE;
1024
1025	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1026	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1027	req->state_sz = SHA1_DIGEST_SIZE;
1028	req->digest_sz = SHA1_DIGEST_SIZE;
1029	req->block_sz = SHA1_BLOCK_SIZE;
1030	req->hmac = true;
1031
1032	return 0;
1033}
1034
1035static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1036{
1037	int ret = safexcel_hmac_sha1_init(areq);
1038
1039	if (ret)
1040		return ret;
1041
1042	return safexcel_ahash_finup(areq);
1043}
1044
1045struct safexcel_ahash_result {
1046	struct completion completion;
1047	int error;
1048};
1049
1050static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1051{
1052	struct safexcel_ahash_result *result = req->data;
1053
1054	if (error == -EINPROGRESS)
1055		return;
1056
1057	result->error = error;
1058	complete(&result->completion);
1059}
1060
1061static int safexcel_hmac_init_pad(struct ahash_request *areq,
1062				  unsigned int blocksize, const u8 *key,
1063				  unsigned int keylen, u8 *ipad, u8 *opad)
1064{
1065	struct safexcel_ahash_result result;
1066	struct scatterlist sg;
1067	int ret, i;
1068	u8 *keydup;
1069
1070	if (keylen <= blocksize) {
1071		memcpy(ipad, key, keylen);
1072	} else {
1073		keydup = kmemdup(key, keylen, GFP_KERNEL);
1074		if (!keydup)
1075			return -ENOMEM;
1076
1077		ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1078					   safexcel_ahash_complete, &result);
1079		sg_init_one(&sg, keydup, keylen);
1080		ahash_request_set_crypt(areq, &sg, ipad, keylen);
1081		init_completion(&result.completion);
1082
1083		ret = crypto_ahash_digest(areq);
1084		if (ret == -EINPROGRESS || ret == -EBUSY) {
1085			wait_for_completion_interruptible(&result.completion);
1086			ret = result.error;
1087		}
1088
1089		/* Avoid leaking */
1090		kfree_sensitive(keydup);
1091
1092		if (ret)
1093			return ret;
1094
1095		keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1096	}
1097
1098	memset(ipad + keylen, 0, blocksize - keylen);
1099	memcpy(opad, ipad, blocksize);
1100
1101	for (i = 0; i < blocksize; i++) {
1102		ipad[i] ^= HMAC_IPAD_VALUE;
1103		opad[i] ^= HMAC_OPAD_VALUE;
1104	}
1105
1106	return 0;
1107}
1108
1109static int safexcel_hmac_init_iv(struct ahash_request *areq,
1110				 unsigned int blocksize, u8 *pad, void *state)
1111{
1112	struct safexcel_ahash_result result;
1113	struct safexcel_ahash_req *req;
 
1114	struct scatterlist sg;
1115	int ret;
1116
1117	ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1118				   safexcel_ahash_complete, &result);
1119	sg_init_one(&sg, pad, blocksize);
1120	ahash_request_set_crypt(areq, &sg, pad, blocksize);
1121	init_completion(&result.completion);
1122
1123	ret = crypto_ahash_init(areq);
1124	if (ret)
1125		return ret;
1126
1127	req = ahash_request_ctx(areq);
1128	req->hmac = true;
1129	req->last_req = true;
1130
1131	ret = crypto_ahash_update(areq);
1132	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1133		return ret;
1134
1135	wait_for_completion_interruptible(&result.completion);
1136	if (result.error)
1137		return result.error;
1138
1139	return crypto_ahash_export(areq, state);
1140}
1141
1142static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1143				  unsigned int keylen,
1144				  void *istate, void *ostate)
1145{
1146	struct ahash_request *areq;
1147	struct crypto_ahash *tfm;
1148	unsigned int blocksize;
1149	u8 *ipad, *opad;
1150	int ret;
1151
1152	tfm = crypto_alloc_ahash(alg, 0, 0);
1153	if (IS_ERR(tfm))
1154		return PTR_ERR(tfm);
1155
1156	areq = ahash_request_alloc(tfm, GFP_KERNEL);
1157	if (!areq) {
1158		ret = -ENOMEM;
1159		goto free_ahash;
1160	}
1161
1162	crypto_ahash_clear_flags(tfm, ~0);
1163	blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1164
1165	ipad = kcalloc(2, blocksize, GFP_KERNEL);
1166	if (!ipad) {
1167		ret = -ENOMEM;
1168		goto free_request;
1169	}
1170
1171	opad = ipad + blocksize;
1172
1173	ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1174	if (ret)
1175		goto free_ipad;
1176
1177	ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1178	if (ret)
1179		goto free_ipad;
1180
1181	ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1182
1183free_ipad:
1184	kfree(ipad);
1185free_request:
1186	ahash_request_free(areq);
1187free_ahash:
1188	crypto_free_ahash(tfm);
1189
1190	return ret;
1191}
1192
1193int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1194			 unsigned int keylen, const char *alg,
1195			 unsigned int state_sz)
1196{
1197	struct safexcel_crypto_priv *priv = base->priv;
1198	struct safexcel_ahash_export_state istate, ostate;
1199	int ret;
1200
1201	ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1202	if (ret)
1203		return ret;
1204
1205	if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1206	    (memcmp(&base->ipad, istate.state, state_sz) ||
1207	     memcmp(&base->opad, ostate.state, state_sz)))
1208		base->needs_inv = true;
1209
1210	memcpy(&base->ipad, &istate.state, state_sz);
1211	memcpy(&base->opad, &ostate.state, state_sz);
1212
1213	return 0;
1214}
1215
1216static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1217				    unsigned int keylen, const char *alg,
1218				    unsigned int state_sz)
1219{
1220	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
1221
1222	return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
1223}
1224
1225static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1226				     unsigned int keylen)
1227{
1228	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1229					SHA1_DIGEST_SIZE);
1230}
1231
1232struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1233	.type = SAFEXCEL_ALG_TYPE_AHASH,
1234	.algo_mask = SAFEXCEL_ALG_SHA1,
1235	.alg.ahash = {
1236		.init = safexcel_hmac_sha1_init,
1237		.update = safexcel_ahash_update,
1238		.final = safexcel_ahash_final,
1239		.finup = safexcel_ahash_finup,
1240		.digest = safexcel_hmac_sha1_digest,
1241		.setkey = safexcel_hmac_sha1_setkey,
1242		.export = safexcel_ahash_export,
1243		.import = safexcel_ahash_import,
1244		.halg = {
1245			.digestsize = SHA1_DIGEST_SIZE,
1246			.statesize = sizeof(struct safexcel_ahash_export_state),
1247			.base = {
1248				.cra_name = "hmac(sha1)",
1249				.cra_driver_name = "safexcel-hmac-sha1",
1250				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1251				.cra_flags = CRYPTO_ALG_ASYNC |
1252					     CRYPTO_ALG_ALLOCATES_MEMORY |
1253					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1254				.cra_blocksize = SHA1_BLOCK_SIZE,
1255				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1256				.cra_init = safexcel_ahash_cra_init,
1257				.cra_exit = safexcel_ahash_cra_exit,
1258				.cra_module = THIS_MODULE,
1259			},
1260		},
1261	},
1262};
1263
1264static int safexcel_sha256_init(struct ahash_request *areq)
1265{
1266	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1267	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1268
1269	memset(req, 0, sizeof(*req));
1270
1271	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1272	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1273	req->state_sz = SHA256_DIGEST_SIZE;
1274	req->digest_sz = SHA256_DIGEST_SIZE;
1275	req->block_sz = SHA256_BLOCK_SIZE;
1276
1277	return 0;
1278}
1279
1280static int safexcel_sha256_digest(struct ahash_request *areq)
1281{
1282	int ret = safexcel_sha256_init(areq);
1283
1284	if (ret)
1285		return ret;
1286
1287	return safexcel_ahash_finup(areq);
1288}
1289
1290struct safexcel_alg_template safexcel_alg_sha256 = {
1291	.type = SAFEXCEL_ALG_TYPE_AHASH,
1292	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1293	.alg.ahash = {
1294		.init = safexcel_sha256_init,
1295		.update = safexcel_ahash_update,
1296		.final = safexcel_ahash_final,
1297		.finup = safexcel_ahash_finup,
1298		.digest = safexcel_sha256_digest,
1299		.export = safexcel_ahash_export,
1300		.import = safexcel_ahash_import,
1301		.halg = {
1302			.digestsize = SHA256_DIGEST_SIZE,
1303			.statesize = sizeof(struct safexcel_ahash_export_state),
1304			.base = {
1305				.cra_name = "sha256",
1306				.cra_driver_name = "safexcel-sha256",
1307				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1308				.cra_flags = CRYPTO_ALG_ASYNC |
1309					     CRYPTO_ALG_ALLOCATES_MEMORY |
1310					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1311				.cra_blocksize = SHA256_BLOCK_SIZE,
1312				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1313				.cra_init = safexcel_ahash_cra_init,
1314				.cra_exit = safexcel_ahash_cra_exit,
1315				.cra_module = THIS_MODULE,
1316			},
1317		},
1318	},
1319};
1320
1321static int safexcel_sha224_init(struct ahash_request *areq)
1322{
1323	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1324	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1325
1326	memset(req, 0, sizeof(*req));
1327
1328	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1329	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1330	req->state_sz = SHA256_DIGEST_SIZE;
1331	req->digest_sz = SHA256_DIGEST_SIZE;
1332	req->block_sz = SHA256_BLOCK_SIZE;
1333
1334	return 0;
1335}
1336
1337static int safexcel_sha224_digest(struct ahash_request *areq)
1338{
1339	int ret = safexcel_sha224_init(areq);
1340
1341	if (ret)
1342		return ret;
1343
1344	return safexcel_ahash_finup(areq);
1345}
1346
1347struct safexcel_alg_template safexcel_alg_sha224 = {
1348	.type = SAFEXCEL_ALG_TYPE_AHASH,
1349	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1350	.alg.ahash = {
1351		.init = safexcel_sha224_init,
1352		.update = safexcel_ahash_update,
1353		.final = safexcel_ahash_final,
1354		.finup = safexcel_ahash_finup,
1355		.digest = safexcel_sha224_digest,
1356		.export = safexcel_ahash_export,
1357		.import = safexcel_ahash_import,
1358		.halg = {
1359			.digestsize = SHA224_DIGEST_SIZE,
1360			.statesize = sizeof(struct safexcel_ahash_export_state),
1361			.base = {
1362				.cra_name = "sha224",
1363				.cra_driver_name = "safexcel-sha224",
1364				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1365				.cra_flags = CRYPTO_ALG_ASYNC |
1366					     CRYPTO_ALG_ALLOCATES_MEMORY |
1367					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1368				.cra_blocksize = SHA224_BLOCK_SIZE,
1369				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1370				.cra_init = safexcel_ahash_cra_init,
1371				.cra_exit = safexcel_ahash_cra_exit,
1372				.cra_module = THIS_MODULE,
1373			},
1374		},
1375	},
1376};
1377
1378static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1379				       unsigned int keylen)
1380{
1381	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1382					SHA256_DIGEST_SIZE);
1383}
1384
1385static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1386{
1387	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1388	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1389
1390	memset(req, 0, sizeof(*req));
1391
1392	/* Start from ipad precompute */
1393	memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1394	/* Already processed the key^ipad part now! */
1395	req->len	= SHA256_BLOCK_SIZE;
1396	req->processed	= SHA256_BLOCK_SIZE;
1397
1398	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1399	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1400	req->state_sz = SHA256_DIGEST_SIZE;
1401	req->digest_sz = SHA256_DIGEST_SIZE;
1402	req->block_sz = SHA256_BLOCK_SIZE;
1403	req->hmac = true;
1404
1405	return 0;
1406}
1407
1408static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1409{
1410	int ret = safexcel_hmac_sha224_init(areq);
1411
1412	if (ret)
1413		return ret;
1414
1415	return safexcel_ahash_finup(areq);
1416}
1417
1418struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1419	.type = SAFEXCEL_ALG_TYPE_AHASH,
1420	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1421	.alg.ahash = {
1422		.init = safexcel_hmac_sha224_init,
1423		.update = safexcel_ahash_update,
1424		.final = safexcel_ahash_final,
1425		.finup = safexcel_ahash_finup,
1426		.digest = safexcel_hmac_sha224_digest,
1427		.setkey = safexcel_hmac_sha224_setkey,
1428		.export = safexcel_ahash_export,
1429		.import = safexcel_ahash_import,
1430		.halg = {
1431			.digestsize = SHA224_DIGEST_SIZE,
1432			.statesize = sizeof(struct safexcel_ahash_export_state),
1433			.base = {
1434				.cra_name = "hmac(sha224)",
1435				.cra_driver_name = "safexcel-hmac-sha224",
1436				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1437				.cra_flags = CRYPTO_ALG_ASYNC |
1438					     CRYPTO_ALG_ALLOCATES_MEMORY |
1439					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1440				.cra_blocksize = SHA224_BLOCK_SIZE,
1441				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1442				.cra_init = safexcel_ahash_cra_init,
1443				.cra_exit = safexcel_ahash_cra_exit,
1444				.cra_module = THIS_MODULE,
1445			},
1446		},
1447	},
1448};
1449
1450static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1451				     unsigned int keylen)
1452{
1453	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1454					SHA256_DIGEST_SIZE);
1455}
1456
1457static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1458{
1459	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1460	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1461
1462	memset(req, 0, sizeof(*req));
1463
1464	/* Start from ipad precompute */
1465	memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1466	/* Already processed the key^ipad part now! */
1467	req->len	= SHA256_BLOCK_SIZE;
1468	req->processed	= SHA256_BLOCK_SIZE;
1469
1470	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1471	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1472	req->state_sz = SHA256_DIGEST_SIZE;
1473	req->digest_sz = SHA256_DIGEST_SIZE;
1474	req->block_sz = SHA256_BLOCK_SIZE;
1475	req->hmac = true;
1476
1477	return 0;
1478}
1479
1480static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1481{
1482	int ret = safexcel_hmac_sha256_init(areq);
1483
1484	if (ret)
1485		return ret;
1486
1487	return safexcel_ahash_finup(areq);
1488}
1489
1490struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1491	.type = SAFEXCEL_ALG_TYPE_AHASH,
1492	.algo_mask = SAFEXCEL_ALG_SHA2_256,
1493	.alg.ahash = {
1494		.init = safexcel_hmac_sha256_init,
1495		.update = safexcel_ahash_update,
1496		.final = safexcel_ahash_final,
1497		.finup = safexcel_ahash_finup,
1498		.digest = safexcel_hmac_sha256_digest,
1499		.setkey = safexcel_hmac_sha256_setkey,
1500		.export = safexcel_ahash_export,
1501		.import = safexcel_ahash_import,
1502		.halg = {
1503			.digestsize = SHA256_DIGEST_SIZE,
1504			.statesize = sizeof(struct safexcel_ahash_export_state),
1505			.base = {
1506				.cra_name = "hmac(sha256)",
1507				.cra_driver_name = "safexcel-hmac-sha256",
1508				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1509				.cra_flags = CRYPTO_ALG_ASYNC |
1510					     CRYPTO_ALG_ALLOCATES_MEMORY |
1511					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1512				.cra_blocksize = SHA256_BLOCK_SIZE,
1513				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1514				.cra_init = safexcel_ahash_cra_init,
1515				.cra_exit = safexcel_ahash_cra_exit,
1516				.cra_module = THIS_MODULE,
1517			},
1518		},
1519	},
1520};
1521
1522static int safexcel_sha512_init(struct ahash_request *areq)
1523{
1524	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1525	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1526
1527	memset(req, 0, sizeof(*req));
1528
1529	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1530	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1531	req->state_sz = SHA512_DIGEST_SIZE;
1532	req->digest_sz = SHA512_DIGEST_SIZE;
1533	req->block_sz = SHA512_BLOCK_SIZE;
1534
1535	return 0;
1536}
1537
1538static int safexcel_sha512_digest(struct ahash_request *areq)
1539{
1540	int ret = safexcel_sha512_init(areq);
1541
1542	if (ret)
1543		return ret;
1544
1545	return safexcel_ahash_finup(areq);
1546}
1547
1548struct safexcel_alg_template safexcel_alg_sha512 = {
1549	.type = SAFEXCEL_ALG_TYPE_AHASH,
1550	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1551	.alg.ahash = {
1552		.init = safexcel_sha512_init,
1553		.update = safexcel_ahash_update,
1554		.final = safexcel_ahash_final,
1555		.finup = safexcel_ahash_finup,
1556		.digest = safexcel_sha512_digest,
1557		.export = safexcel_ahash_export,
1558		.import = safexcel_ahash_import,
1559		.halg = {
1560			.digestsize = SHA512_DIGEST_SIZE,
1561			.statesize = sizeof(struct safexcel_ahash_export_state),
1562			.base = {
1563				.cra_name = "sha512",
1564				.cra_driver_name = "safexcel-sha512",
1565				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1566				.cra_flags = CRYPTO_ALG_ASYNC |
1567					     CRYPTO_ALG_ALLOCATES_MEMORY |
1568					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1569				.cra_blocksize = SHA512_BLOCK_SIZE,
1570				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1571				.cra_init = safexcel_ahash_cra_init,
1572				.cra_exit = safexcel_ahash_cra_exit,
1573				.cra_module = THIS_MODULE,
1574			},
1575		},
1576	},
1577};
1578
1579static int safexcel_sha384_init(struct ahash_request *areq)
1580{
1581	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1582	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1583
1584	memset(req, 0, sizeof(*req));
1585
1586	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1587	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1588	req->state_sz = SHA512_DIGEST_SIZE;
1589	req->digest_sz = SHA512_DIGEST_SIZE;
1590	req->block_sz = SHA512_BLOCK_SIZE;
1591
1592	return 0;
1593}
1594
1595static int safexcel_sha384_digest(struct ahash_request *areq)
1596{
1597	int ret = safexcel_sha384_init(areq);
1598
1599	if (ret)
1600		return ret;
1601
1602	return safexcel_ahash_finup(areq);
1603}
1604
1605struct safexcel_alg_template safexcel_alg_sha384 = {
1606	.type = SAFEXCEL_ALG_TYPE_AHASH,
1607	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1608	.alg.ahash = {
1609		.init = safexcel_sha384_init,
1610		.update = safexcel_ahash_update,
1611		.final = safexcel_ahash_final,
1612		.finup = safexcel_ahash_finup,
1613		.digest = safexcel_sha384_digest,
1614		.export = safexcel_ahash_export,
1615		.import = safexcel_ahash_import,
1616		.halg = {
1617			.digestsize = SHA384_DIGEST_SIZE,
1618			.statesize = sizeof(struct safexcel_ahash_export_state),
1619			.base = {
1620				.cra_name = "sha384",
1621				.cra_driver_name = "safexcel-sha384",
1622				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1623				.cra_flags = CRYPTO_ALG_ASYNC |
1624					     CRYPTO_ALG_ALLOCATES_MEMORY |
1625					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1626				.cra_blocksize = SHA384_BLOCK_SIZE,
1627				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1628				.cra_init = safexcel_ahash_cra_init,
1629				.cra_exit = safexcel_ahash_cra_exit,
1630				.cra_module = THIS_MODULE,
1631			},
1632		},
1633	},
1634};
1635
1636static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1637				       unsigned int keylen)
1638{
1639	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1640					SHA512_DIGEST_SIZE);
1641}
1642
1643static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1644{
1645	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1646	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1647
1648	memset(req, 0, sizeof(*req));
1649
1650	/* Start from ipad precompute */
1651	memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1652	/* Already processed the key^ipad part now! */
1653	req->len	= SHA512_BLOCK_SIZE;
1654	req->processed	= SHA512_BLOCK_SIZE;
1655
1656	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1657	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1658	req->state_sz = SHA512_DIGEST_SIZE;
1659	req->digest_sz = SHA512_DIGEST_SIZE;
1660	req->block_sz = SHA512_BLOCK_SIZE;
1661	req->hmac = true;
1662
1663	return 0;
1664}
1665
1666static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1667{
1668	int ret = safexcel_hmac_sha512_init(areq);
1669
1670	if (ret)
1671		return ret;
1672
1673	return safexcel_ahash_finup(areq);
1674}
1675
1676struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1677	.type = SAFEXCEL_ALG_TYPE_AHASH,
1678	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1679	.alg.ahash = {
1680		.init = safexcel_hmac_sha512_init,
1681		.update = safexcel_ahash_update,
1682		.final = safexcel_ahash_final,
1683		.finup = safexcel_ahash_finup,
1684		.digest = safexcel_hmac_sha512_digest,
1685		.setkey = safexcel_hmac_sha512_setkey,
1686		.export = safexcel_ahash_export,
1687		.import = safexcel_ahash_import,
1688		.halg = {
1689			.digestsize = SHA512_DIGEST_SIZE,
1690			.statesize = sizeof(struct safexcel_ahash_export_state),
1691			.base = {
1692				.cra_name = "hmac(sha512)",
1693				.cra_driver_name = "safexcel-hmac-sha512",
1694				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1695				.cra_flags = CRYPTO_ALG_ASYNC |
1696					     CRYPTO_ALG_ALLOCATES_MEMORY |
1697					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1698				.cra_blocksize = SHA512_BLOCK_SIZE,
1699				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1700				.cra_init = safexcel_ahash_cra_init,
1701				.cra_exit = safexcel_ahash_cra_exit,
1702				.cra_module = THIS_MODULE,
1703			},
1704		},
1705	},
1706};
1707
1708static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1709				       unsigned int keylen)
1710{
1711	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1712					SHA512_DIGEST_SIZE);
1713}
1714
1715static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1716{
1717	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1718	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1719
1720	memset(req, 0, sizeof(*req));
1721
1722	/* Start from ipad precompute */
1723	memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1724	/* Already processed the key^ipad part now! */
1725	req->len	= SHA512_BLOCK_SIZE;
1726	req->processed	= SHA512_BLOCK_SIZE;
1727
1728	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1729	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1730	req->state_sz = SHA512_DIGEST_SIZE;
1731	req->digest_sz = SHA512_DIGEST_SIZE;
1732	req->block_sz = SHA512_BLOCK_SIZE;
1733	req->hmac = true;
1734
1735	return 0;
1736}
1737
1738static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1739{
1740	int ret = safexcel_hmac_sha384_init(areq);
1741
1742	if (ret)
1743		return ret;
1744
1745	return safexcel_ahash_finup(areq);
1746}
1747
1748struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1749	.type = SAFEXCEL_ALG_TYPE_AHASH,
1750	.algo_mask = SAFEXCEL_ALG_SHA2_512,
1751	.alg.ahash = {
1752		.init = safexcel_hmac_sha384_init,
1753		.update = safexcel_ahash_update,
1754		.final = safexcel_ahash_final,
1755		.finup = safexcel_ahash_finup,
1756		.digest = safexcel_hmac_sha384_digest,
1757		.setkey = safexcel_hmac_sha384_setkey,
1758		.export = safexcel_ahash_export,
1759		.import = safexcel_ahash_import,
1760		.halg = {
1761			.digestsize = SHA384_DIGEST_SIZE,
1762			.statesize = sizeof(struct safexcel_ahash_export_state),
1763			.base = {
1764				.cra_name = "hmac(sha384)",
1765				.cra_driver_name = "safexcel-hmac-sha384",
1766				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1767				.cra_flags = CRYPTO_ALG_ASYNC |
1768					     CRYPTO_ALG_ALLOCATES_MEMORY |
1769					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1770				.cra_blocksize = SHA384_BLOCK_SIZE,
1771				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1772				.cra_init = safexcel_ahash_cra_init,
1773				.cra_exit = safexcel_ahash_cra_exit,
1774				.cra_module = THIS_MODULE,
1775			},
1776		},
1777	},
1778};
1779
1780static int safexcel_md5_init(struct ahash_request *areq)
1781{
1782	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1783	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1784
1785	memset(req, 0, sizeof(*req));
1786
1787	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1788	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1789	req->state_sz = MD5_DIGEST_SIZE;
1790	req->digest_sz = MD5_DIGEST_SIZE;
1791	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1792
1793	return 0;
1794}
1795
1796static int safexcel_md5_digest(struct ahash_request *areq)
1797{
1798	int ret = safexcel_md5_init(areq);
1799
1800	if (ret)
1801		return ret;
1802
1803	return safexcel_ahash_finup(areq);
1804}
1805
1806struct safexcel_alg_template safexcel_alg_md5 = {
1807	.type = SAFEXCEL_ALG_TYPE_AHASH,
1808	.algo_mask = SAFEXCEL_ALG_MD5,
1809	.alg.ahash = {
1810		.init = safexcel_md5_init,
1811		.update = safexcel_ahash_update,
1812		.final = safexcel_ahash_final,
1813		.finup = safexcel_ahash_finup,
1814		.digest = safexcel_md5_digest,
1815		.export = safexcel_ahash_export,
1816		.import = safexcel_ahash_import,
1817		.halg = {
1818			.digestsize = MD5_DIGEST_SIZE,
1819			.statesize = sizeof(struct safexcel_ahash_export_state),
1820			.base = {
1821				.cra_name = "md5",
1822				.cra_driver_name = "safexcel-md5",
1823				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1824				.cra_flags = CRYPTO_ALG_ASYNC |
1825					     CRYPTO_ALG_ALLOCATES_MEMORY |
1826					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1827				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1828				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1829				.cra_init = safexcel_ahash_cra_init,
1830				.cra_exit = safexcel_ahash_cra_exit,
1831				.cra_module = THIS_MODULE,
1832			},
1833		},
1834	},
1835};
1836
1837static int safexcel_hmac_md5_init(struct ahash_request *areq)
1838{
1839	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1840	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1841
1842	memset(req, 0, sizeof(*req));
1843
1844	/* Start from ipad precompute */
1845	memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1846	/* Already processed the key^ipad part now! */
1847	req->len	= MD5_HMAC_BLOCK_SIZE;
1848	req->processed	= MD5_HMAC_BLOCK_SIZE;
1849
1850	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1851	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1852	req->state_sz = MD5_DIGEST_SIZE;
1853	req->digest_sz = MD5_DIGEST_SIZE;
1854	req->block_sz = MD5_HMAC_BLOCK_SIZE;
1855	req->len_is_le = true; /* MD5 is little endian! ... */
1856	req->hmac = true;
1857
1858	return 0;
1859}
1860
1861static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1862				     unsigned int keylen)
1863{
1864	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1865					MD5_DIGEST_SIZE);
1866}
1867
1868static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1869{
1870	int ret = safexcel_hmac_md5_init(areq);
1871
1872	if (ret)
1873		return ret;
1874
1875	return safexcel_ahash_finup(areq);
1876}
1877
1878struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1879	.type = SAFEXCEL_ALG_TYPE_AHASH,
1880	.algo_mask = SAFEXCEL_ALG_MD5,
1881	.alg.ahash = {
1882		.init = safexcel_hmac_md5_init,
1883		.update = safexcel_ahash_update,
1884		.final = safexcel_ahash_final,
1885		.finup = safexcel_ahash_finup,
1886		.digest = safexcel_hmac_md5_digest,
1887		.setkey = safexcel_hmac_md5_setkey,
1888		.export = safexcel_ahash_export,
1889		.import = safexcel_ahash_import,
1890		.halg = {
1891			.digestsize = MD5_DIGEST_SIZE,
1892			.statesize = sizeof(struct safexcel_ahash_export_state),
1893			.base = {
1894				.cra_name = "hmac(md5)",
1895				.cra_driver_name = "safexcel-hmac-md5",
1896				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1897				.cra_flags = CRYPTO_ALG_ASYNC |
1898					     CRYPTO_ALG_ALLOCATES_MEMORY |
1899					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1900				.cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1901				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1902				.cra_init = safexcel_ahash_cra_init,
1903				.cra_exit = safexcel_ahash_cra_exit,
1904				.cra_module = THIS_MODULE,
1905			},
1906		},
1907	},
1908};
1909
1910static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1911{
1912	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1913	int ret = safexcel_ahash_cra_init(tfm);
1914
1915	/* Default 'key' is all zeroes */
1916	memset(&ctx->base.ipad, 0, sizeof(u32));
1917	return ret;
1918}
1919
1920static int safexcel_crc32_init(struct ahash_request *areq)
1921{
1922	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1923	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1924
1925	memset(req, 0, sizeof(*req));
1926
1927	/* Start from loaded key */
1928	req->state[0]	= cpu_to_le32(~ctx->base.ipad.word[0]);
1929	/* Set processed to non-zero to enable invalidation detection */
1930	req->len	= sizeof(u32);
1931	req->processed	= sizeof(u32);
1932
1933	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1934	req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1935	req->state_sz = sizeof(u32);
1936	req->digest_sz = sizeof(u32);
1937	req->block_sz = sizeof(u32);
1938
1939	return 0;
1940}
1941
1942static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1943				 unsigned int keylen)
1944{
1945	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1946
1947	if (keylen != sizeof(u32))
1948		return -EINVAL;
1949
1950	memcpy(&ctx->base.ipad, key, sizeof(u32));
1951	return 0;
1952}
1953
1954static int safexcel_crc32_digest(struct ahash_request *areq)
1955{
1956	return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1957}
1958
1959struct safexcel_alg_template safexcel_alg_crc32 = {
1960	.type = SAFEXCEL_ALG_TYPE_AHASH,
1961	.algo_mask = 0,
1962	.alg.ahash = {
1963		.init = safexcel_crc32_init,
1964		.update = safexcel_ahash_update,
1965		.final = safexcel_ahash_final,
1966		.finup = safexcel_ahash_finup,
1967		.digest = safexcel_crc32_digest,
1968		.setkey = safexcel_crc32_setkey,
1969		.export = safexcel_ahash_export,
1970		.import = safexcel_ahash_import,
1971		.halg = {
1972			.digestsize = sizeof(u32),
1973			.statesize = sizeof(struct safexcel_ahash_export_state),
1974			.base = {
1975				.cra_name = "crc32",
1976				.cra_driver_name = "safexcel-crc32",
1977				.cra_priority = SAFEXCEL_CRA_PRIORITY,
1978				.cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1979					     CRYPTO_ALG_ASYNC |
1980					     CRYPTO_ALG_ALLOCATES_MEMORY |
1981					     CRYPTO_ALG_KERN_DRIVER_ONLY,
1982				.cra_blocksize = 1,
1983				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1984				.cra_init = safexcel_crc32_cra_init,
1985				.cra_exit = safexcel_ahash_cra_exit,
1986				.cra_module = THIS_MODULE,
1987			},
1988		},
1989	},
1990};
1991
1992static int safexcel_cbcmac_init(struct ahash_request *areq)
1993{
1994	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1995	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1996
1997	memset(req, 0, sizeof(*req));
1998
1999	/* Start from loaded keys */
2000	memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
2001	/* Set processed to non-zero to enable invalidation detection */
2002	req->len	= AES_BLOCK_SIZE;
2003	req->processed	= AES_BLOCK_SIZE;
2004
2005	req->digest   = CONTEXT_CONTROL_DIGEST_XCM;
2006	req->state_sz = ctx->key_sz;
2007	req->digest_sz = AES_BLOCK_SIZE;
2008	req->block_sz = AES_BLOCK_SIZE;
2009	req->xcbcmac  = true;
2010
2011	return 0;
2012}
2013
2014static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2015				 unsigned int len)
2016{
2017	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2018	struct crypto_aes_ctx aes;
2019	int ret, i;
2020
2021	ret = aes_expandkey(&aes, key, len);
2022	if (ret)
2023		return ret;
2024
2025	memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
2026	for (i = 0; i < len / sizeof(u32); i++)
2027		ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
2028
2029	if (len == AES_KEYSIZE_192) {
2030		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2031		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2032	} else if (len == AES_KEYSIZE_256) {
2033		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2034		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2035	} else {
2036		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2037		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2038	}
2039	ctx->cbcmac  = true;
2040
2041	memzero_explicit(&aes, sizeof(aes));
2042	return 0;
2043}
2044
2045static int safexcel_cbcmac_digest(struct ahash_request *areq)
2046{
2047	return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2048}
2049
2050struct safexcel_alg_template safexcel_alg_cbcmac = {
2051	.type = SAFEXCEL_ALG_TYPE_AHASH,
2052	.algo_mask = 0,
2053	.alg.ahash = {
2054		.init = safexcel_cbcmac_init,
2055		.update = safexcel_ahash_update,
2056		.final = safexcel_ahash_final,
2057		.finup = safexcel_ahash_finup,
2058		.digest = safexcel_cbcmac_digest,
2059		.setkey = safexcel_cbcmac_setkey,
2060		.export = safexcel_ahash_export,
2061		.import = safexcel_ahash_import,
2062		.halg = {
2063			.digestsize = AES_BLOCK_SIZE,
2064			.statesize = sizeof(struct safexcel_ahash_export_state),
2065			.base = {
2066				.cra_name = "cbcmac(aes)",
2067				.cra_driver_name = "safexcel-cbcmac-aes",
2068				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2069				.cra_flags = CRYPTO_ALG_ASYNC |
2070					     CRYPTO_ALG_ALLOCATES_MEMORY |
2071					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2072				.cra_blocksize = 1,
2073				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2074				.cra_init = safexcel_ahash_cra_init,
2075				.cra_exit = safexcel_ahash_cra_exit,
2076				.cra_module = THIS_MODULE,
2077			},
2078		},
2079	},
2080};
2081
2082static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2083				 unsigned int len)
2084{
2085	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2086	struct crypto_aes_ctx aes;
2087	u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2088	int ret, i;
2089
2090	ret = aes_expandkey(&aes, key, len);
2091	if (ret)
2092		return ret;
2093
2094	/* precompute the XCBC key material */
2095	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2096	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2097				CRYPTO_TFM_REQ_MASK);
2098	ret = crypto_cipher_setkey(ctx->kaes, key, len);
2099	if (ret)
2100		return ret;
2101
2102	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2103		"\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2104	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2105		"\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2106	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2107		"\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2108	for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2109		ctx->base.ipad.word[i] = swab(key_tmp[i]);
2110
2111	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2112	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2113				CRYPTO_TFM_REQ_MASK);
2114	ret = crypto_cipher_setkey(ctx->kaes,
2115				   (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2116				   AES_MIN_KEY_SIZE);
2117	if (ret)
2118		return ret;
2119
2120	ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2121	ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2122	ctx->cbcmac = false;
2123
2124	memzero_explicit(&aes, sizeof(aes));
2125	return 0;
2126}
2127
2128static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2129{
2130	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2131
2132	safexcel_ahash_cra_init(tfm);
2133	ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2134	return PTR_ERR_OR_ZERO(ctx->kaes);
2135}
2136
2137static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2138{
2139	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2140
2141	crypto_free_cipher(ctx->kaes);
2142	safexcel_ahash_cra_exit(tfm);
2143}
2144
2145struct safexcel_alg_template safexcel_alg_xcbcmac = {
2146	.type = SAFEXCEL_ALG_TYPE_AHASH,
2147	.algo_mask = 0,
2148	.alg.ahash = {
2149		.init = safexcel_cbcmac_init,
2150		.update = safexcel_ahash_update,
2151		.final = safexcel_ahash_final,
2152		.finup = safexcel_ahash_finup,
2153		.digest = safexcel_cbcmac_digest,
2154		.setkey = safexcel_xcbcmac_setkey,
2155		.export = safexcel_ahash_export,
2156		.import = safexcel_ahash_import,
2157		.halg = {
2158			.digestsize = AES_BLOCK_SIZE,
2159			.statesize = sizeof(struct safexcel_ahash_export_state),
2160			.base = {
2161				.cra_name = "xcbc(aes)",
2162				.cra_driver_name = "safexcel-xcbc-aes",
2163				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2164				.cra_flags = CRYPTO_ALG_ASYNC |
2165					     CRYPTO_ALG_ALLOCATES_MEMORY |
2166					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2167				.cra_blocksize = AES_BLOCK_SIZE,
2168				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2169				.cra_init = safexcel_xcbcmac_cra_init,
2170				.cra_exit = safexcel_xcbcmac_cra_exit,
2171				.cra_module = THIS_MODULE,
2172			},
2173		},
2174	},
2175};
2176
2177static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2178				unsigned int len)
2179{
2180	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2181	struct crypto_aes_ctx aes;
2182	__be64 consts[4];
2183	u64 _const[2];
2184	u8 msb_mask, gfmask;
2185	int ret, i;
2186
2187	ret = aes_expandkey(&aes, key, len);
 
2188	if (ret)
2189		return ret;
2190
2191	for (i = 0; i < len / sizeof(u32); i++)
2192		ctx->base.ipad.word[i + 8] = swab(aes.key_enc[i]);
2193
2194	/* precompute the CMAC key material */
2195	crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2196	crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2197				CRYPTO_TFM_REQ_MASK);
2198	ret = crypto_cipher_setkey(ctx->kaes, key, len);
2199	if (ret)
2200		return ret;
2201
2202	/* code below borrowed from crypto/cmac.c */
2203	/* encrypt the zero block */
2204	memset(consts, 0, AES_BLOCK_SIZE);
2205	crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2206
2207	gfmask = 0x87;
2208	_const[0] = be64_to_cpu(consts[1]);
2209	_const[1] = be64_to_cpu(consts[0]);
2210
2211	/* gf(2^128) multiply zero-ciphertext with u and u^2 */
2212	for (i = 0; i < 4; i += 2) {
2213		msb_mask = ((s64)_const[1] >> 63) & gfmask;
2214		_const[1] = (_const[1] << 1) | (_const[0] >> 63);
2215		_const[0] = (_const[0] << 1) ^ msb_mask;
2216
2217		consts[i + 0] = cpu_to_be64(_const[1]);
2218		consts[i + 1] = cpu_to_be64(_const[0]);
2219	}
2220	/* end of code borrowed from crypto/cmac.c */
2221
2222	for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2223		ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2224
2225	if (len == AES_KEYSIZE_192) {
2226		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2227		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2228	} else if (len == AES_KEYSIZE_256) {
2229		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2230		ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2231	} else {
2232		ctx->alg    = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2233		ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2234	}
2235	ctx->cbcmac = false;
2236
2237	memzero_explicit(&aes, sizeof(aes));
2238	return 0;
2239}
2240
2241struct safexcel_alg_template safexcel_alg_cmac = {
2242	.type = SAFEXCEL_ALG_TYPE_AHASH,
2243	.algo_mask = 0,
2244	.alg.ahash = {
2245		.init = safexcel_cbcmac_init,
2246		.update = safexcel_ahash_update,
2247		.final = safexcel_ahash_final,
2248		.finup = safexcel_ahash_finup,
2249		.digest = safexcel_cbcmac_digest,
2250		.setkey = safexcel_cmac_setkey,
2251		.export = safexcel_ahash_export,
2252		.import = safexcel_ahash_import,
2253		.halg = {
2254			.digestsize = AES_BLOCK_SIZE,
2255			.statesize = sizeof(struct safexcel_ahash_export_state),
2256			.base = {
2257				.cra_name = "cmac(aes)",
2258				.cra_driver_name = "safexcel-cmac-aes",
2259				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2260				.cra_flags = CRYPTO_ALG_ASYNC |
2261					     CRYPTO_ALG_ALLOCATES_MEMORY |
2262					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2263				.cra_blocksize = AES_BLOCK_SIZE,
2264				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2265				.cra_init = safexcel_xcbcmac_cra_init,
2266				.cra_exit = safexcel_xcbcmac_cra_exit,
2267				.cra_module = THIS_MODULE,
2268			},
2269		},
2270	},
2271};
2272
2273static int safexcel_sm3_init(struct ahash_request *areq)
2274{
2275	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2276	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2277
2278	memset(req, 0, sizeof(*req));
2279
2280	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2281	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2282	req->state_sz = SM3_DIGEST_SIZE;
2283	req->digest_sz = SM3_DIGEST_SIZE;
2284	req->block_sz = SM3_BLOCK_SIZE;
2285
2286	return 0;
2287}
2288
2289static int safexcel_sm3_digest(struct ahash_request *areq)
2290{
2291	int ret = safexcel_sm3_init(areq);
2292
2293	if (ret)
2294		return ret;
2295
2296	return safexcel_ahash_finup(areq);
2297}
2298
2299struct safexcel_alg_template safexcel_alg_sm3 = {
2300	.type = SAFEXCEL_ALG_TYPE_AHASH,
2301	.algo_mask = SAFEXCEL_ALG_SM3,
2302	.alg.ahash = {
2303		.init = safexcel_sm3_init,
2304		.update = safexcel_ahash_update,
2305		.final = safexcel_ahash_final,
2306		.finup = safexcel_ahash_finup,
2307		.digest = safexcel_sm3_digest,
2308		.export = safexcel_ahash_export,
2309		.import = safexcel_ahash_import,
2310		.halg = {
2311			.digestsize = SM3_DIGEST_SIZE,
2312			.statesize = sizeof(struct safexcel_ahash_export_state),
2313			.base = {
2314				.cra_name = "sm3",
2315				.cra_driver_name = "safexcel-sm3",
2316				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2317				.cra_flags = CRYPTO_ALG_ASYNC |
2318					     CRYPTO_ALG_ALLOCATES_MEMORY |
2319					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2320				.cra_blocksize = SM3_BLOCK_SIZE,
2321				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2322				.cra_init = safexcel_ahash_cra_init,
2323				.cra_exit = safexcel_ahash_cra_exit,
2324				.cra_module = THIS_MODULE,
2325			},
2326		},
2327	},
2328};
2329
2330static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2331				    unsigned int keylen)
2332{
2333	return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2334					SM3_DIGEST_SIZE);
2335}
2336
2337static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2338{
2339	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2340	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2341
2342	memset(req, 0, sizeof(*req));
2343
2344	/* Start from ipad precompute */
2345	memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2346	/* Already processed the key^ipad part now! */
2347	req->len	= SM3_BLOCK_SIZE;
2348	req->processed	= SM3_BLOCK_SIZE;
2349
2350	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2351	req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2352	req->state_sz = SM3_DIGEST_SIZE;
2353	req->digest_sz = SM3_DIGEST_SIZE;
2354	req->block_sz = SM3_BLOCK_SIZE;
2355	req->hmac = true;
2356
2357	return 0;
2358}
2359
2360static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2361{
2362	int ret = safexcel_hmac_sm3_init(areq);
2363
2364	if (ret)
2365		return ret;
2366
2367	return safexcel_ahash_finup(areq);
2368}
2369
2370struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2371	.type = SAFEXCEL_ALG_TYPE_AHASH,
2372	.algo_mask = SAFEXCEL_ALG_SM3,
2373	.alg.ahash = {
2374		.init = safexcel_hmac_sm3_init,
2375		.update = safexcel_ahash_update,
2376		.final = safexcel_ahash_final,
2377		.finup = safexcel_ahash_finup,
2378		.digest = safexcel_hmac_sm3_digest,
2379		.setkey = safexcel_hmac_sm3_setkey,
2380		.export = safexcel_ahash_export,
2381		.import = safexcel_ahash_import,
2382		.halg = {
2383			.digestsize = SM3_DIGEST_SIZE,
2384			.statesize = sizeof(struct safexcel_ahash_export_state),
2385			.base = {
2386				.cra_name = "hmac(sm3)",
2387				.cra_driver_name = "safexcel-hmac-sm3",
2388				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2389				.cra_flags = CRYPTO_ALG_ASYNC |
2390					     CRYPTO_ALG_ALLOCATES_MEMORY |
2391					     CRYPTO_ALG_KERN_DRIVER_ONLY,
2392				.cra_blocksize = SM3_BLOCK_SIZE,
2393				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2394				.cra_init = safexcel_ahash_cra_init,
2395				.cra_exit = safexcel_ahash_cra_exit,
2396				.cra_module = THIS_MODULE,
2397			},
2398		},
2399	},
2400};
2401
2402static int safexcel_sha3_224_init(struct ahash_request *areq)
2403{
2404	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2405	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2406	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2407
2408	memset(req, 0, sizeof(*req));
2409
2410	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2411	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2412	req->state_sz = SHA3_224_DIGEST_SIZE;
2413	req->digest_sz = SHA3_224_DIGEST_SIZE;
2414	req->block_sz = SHA3_224_BLOCK_SIZE;
2415	ctx->do_fallback = false;
2416	ctx->fb_init_done = false;
2417	return 0;
2418}
2419
2420static int safexcel_sha3_fbcheck(struct ahash_request *req)
2421{
2422	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2423	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2424	struct ahash_request *subreq = ahash_request_ctx(req);
2425	int ret = 0;
2426
2427	if (ctx->do_fallback) {
2428		ahash_request_set_tfm(subreq, ctx->fback);
2429		ahash_request_set_callback(subreq, req->base.flags,
2430					   req->base.complete, req->base.data);
2431		ahash_request_set_crypt(subreq, req->src, req->result,
2432					req->nbytes);
2433		if (!ctx->fb_init_done) {
2434			if (ctx->fb_do_setkey) {
2435				/* Set fallback cipher HMAC key */
2436				u8 key[SHA3_224_BLOCK_SIZE];
2437
2438				memcpy(key, &ctx->base.ipad,
2439				       crypto_ahash_blocksize(ctx->fback) / 2);
2440				memcpy(key +
2441				       crypto_ahash_blocksize(ctx->fback) / 2,
2442				       &ctx->base.opad,
2443				       crypto_ahash_blocksize(ctx->fback) / 2);
2444				ret = crypto_ahash_setkey(ctx->fback, key,
2445					crypto_ahash_blocksize(ctx->fback));
2446				memzero_explicit(key,
2447					crypto_ahash_blocksize(ctx->fback));
2448				ctx->fb_do_setkey = false;
2449			}
2450			ret = ret ?: crypto_ahash_init(subreq);
2451			ctx->fb_init_done = true;
2452		}
2453	}
2454	return ret;
2455}
2456
2457static int safexcel_sha3_update(struct ahash_request *req)
2458{
2459	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2460	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2461	struct ahash_request *subreq = ahash_request_ctx(req);
2462
2463	ctx->do_fallback = true;
2464	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2465}
2466
2467static int safexcel_sha3_final(struct ahash_request *req)
2468{
2469	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2470	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2471	struct ahash_request *subreq = ahash_request_ctx(req);
2472
2473	ctx->do_fallback = true;
2474	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2475}
2476
2477static int safexcel_sha3_finup(struct ahash_request *req)
2478{
2479	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2480	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2481	struct ahash_request *subreq = ahash_request_ctx(req);
2482
2483	ctx->do_fallback |= !req->nbytes;
2484	if (ctx->do_fallback)
2485		/* Update or ex/import happened or len 0, cannot use the HW */
2486		return safexcel_sha3_fbcheck(req) ?:
2487		       crypto_ahash_finup(subreq);
2488	else
2489		return safexcel_ahash_finup(req);
2490}
2491
2492static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2493{
2494	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2495	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2496	struct ahash_request *subreq = ahash_request_ctx(req);
2497
2498	ctx->do_fallback = true;
2499	ctx->fb_init_done = false;
2500	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2501}
2502
2503static int safexcel_sha3_224_digest(struct ahash_request *req)
2504{
2505	if (req->nbytes)
2506		return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2507
2508	/* HW cannot do zero length hash, use fallback instead */
2509	return safexcel_sha3_digest_fallback(req);
2510}
2511
2512static int safexcel_sha3_export(struct ahash_request *req, void *out)
2513{
2514	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2515	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2516	struct ahash_request *subreq = ahash_request_ctx(req);
2517
2518	ctx->do_fallback = true;
2519	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2520}
2521
2522static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2523{
2524	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2525	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2526	struct ahash_request *subreq = ahash_request_ctx(req);
2527
2528	ctx->do_fallback = true;
2529	return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2530	// return safexcel_ahash_import(req, in);
2531}
2532
2533static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2534{
2535	struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2536	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2537
2538	safexcel_ahash_cra_init(tfm);
2539
2540	/* Allocate fallback implementation */
2541	ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2542					CRYPTO_ALG_ASYNC |
2543					CRYPTO_ALG_NEED_FALLBACK);
2544	if (IS_ERR(ctx->fback))
2545		return PTR_ERR(ctx->fback);
2546
2547	/* Update statesize from fallback algorithm! */
2548	crypto_hash_alg_common(ahash)->statesize =
2549		crypto_ahash_statesize(ctx->fback);
2550	crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2551					    sizeof(struct ahash_request) +
2552					    crypto_ahash_reqsize(ctx->fback)));
 
2553	return 0;
2554}
2555
2556static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2557{
2558	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2559
2560	crypto_free_ahash(ctx->fback);
2561	safexcel_ahash_cra_exit(tfm);
2562}
2563
2564struct safexcel_alg_template safexcel_alg_sha3_224 = {
2565	.type = SAFEXCEL_ALG_TYPE_AHASH,
2566	.algo_mask = SAFEXCEL_ALG_SHA3,
2567	.alg.ahash = {
2568		.init = safexcel_sha3_224_init,
2569		.update = safexcel_sha3_update,
2570		.final = safexcel_sha3_final,
2571		.finup = safexcel_sha3_finup,
2572		.digest = safexcel_sha3_224_digest,
2573		.export = safexcel_sha3_export,
2574		.import = safexcel_sha3_import,
2575		.halg = {
2576			.digestsize = SHA3_224_DIGEST_SIZE,
2577			.statesize = sizeof(struct safexcel_ahash_export_state),
2578			.base = {
2579				.cra_name = "sha3-224",
2580				.cra_driver_name = "safexcel-sha3-224",
2581				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2582				.cra_flags = CRYPTO_ALG_ASYNC |
2583					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2584					     CRYPTO_ALG_NEED_FALLBACK,
2585				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2586				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2587				.cra_init = safexcel_sha3_cra_init,
2588				.cra_exit = safexcel_sha3_cra_exit,
2589				.cra_module = THIS_MODULE,
2590			},
2591		},
2592	},
2593};
2594
2595static int safexcel_sha3_256_init(struct ahash_request *areq)
2596{
2597	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2598	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2599	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2600
2601	memset(req, 0, sizeof(*req));
2602
2603	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2604	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2605	req->state_sz = SHA3_256_DIGEST_SIZE;
2606	req->digest_sz = SHA3_256_DIGEST_SIZE;
2607	req->block_sz = SHA3_256_BLOCK_SIZE;
2608	ctx->do_fallback = false;
2609	ctx->fb_init_done = false;
2610	return 0;
2611}
2612
2613static int safexcel_sha3_256_digest(struct ahash_request *req)
2614{
2615	if (req->nbytes)
2616		return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2617
2618	/* HW cannot do zero length hash, use fallback instead */
2619	return safexcel_sha3_digest_fallback(req);
2620}
2621
2622struct safexcel_alg_template safexcel_alg_sha3_256 = {
2623	.type = SAFEXCEL_ALG_TYPE_AHASH,
2624	.algo_mask = SAFEXCEL_ALG_SHA3,
2625	.alg.ahash = {
2626		.init = safexcel_sha3_256_init,
2627		.update = safexcel_sha3_update,
2628		.final = safexcel_sha3_final,
2629		.finup = safexcel_sha3_finup,
2630		.digest = safexcel_sha3_256_digest,
2631		.export = safexcel_sha3_export,
2632		.import = safexcel_sha3_import,
2633		.halg = {
2634			.digestsize = SHA3_256_DIGEST_SIZE,
2635			.statesize = sizeof(struct safexcel_ahash_export_state),
2636			.base = {
2637				.cra_name = "sha3-256",
2638				.cra_driver_name = "safexcel-sha3-256",
2639				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2640				.cra_flags = CRYPTO_ALG_ASYNC |
2641					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2642					     CRYPTO_ALG_NEED_FALLBACK,
2643				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2644				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2645				.cra_init = safexcel_sha3_cra_init,
2646				.cra_exit = safexcel_sha3_cra_exit,
2647				.cra_module = THIS_MODULE,
2648			},
2649		},
2650	},
2651};
2652
2653static int safexcel_sha3_384_init(struct ahash_request *areq)
2654{
2655	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2656	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2657	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2658
2659	memset(req, 0, sizeof(*req));
2660
2661	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2662	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2663	req->state_sz = SHA3_384_DIGEST_SIZE;
2664	req->digest_sz = SHA3_384_DIGEST_SIZE;
2665	req->block_sz = SHA3_384_BLOCK_SIZE;
2666	ctx->do_fallback = false;
2667	ctx->fb_init_done = false;
2668	return 0;
2669}
2670
2671static int safexcel_sha3_384_digest(struct ahash_request *req)
2672{
2673	if (req->nbytes)
2674		return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2675
2676	/* HW cannot do zero length hash, use fallback instead */
2677	return safexcel_sha3_digest_fallback(req);
2678}
2679
2680struct safexcel_alg_template safexcel_alg_sha3_384 = {
2681	.type = SAFEXCEL_ALG_TYPE_AHASH,
2682	.algo_mask = SAFEXCEL_ALG_SHA3,
2683	.alg.ahash = {
2684		.init = safexcel_sha3_384_init,
2685		.update = safexcel_sha3_update,
2686		.final = safexcel_sha3_final,
2687		.finup = safexcel_sha3_finup,
2688		.digest = safexcel_sha3_384_digest,
2689		.export = safexcel_sha3_export,
2690		.import = safexcel_sha3_import,
2691		.halg = {
2692			.digestsize = SHA3_384_DIGEST_SIZE,
2693			.statesize = sizeof(struct safexcel_ahash_export_state),
2694			.base = {
2695				.cra_name = "sha3-384",
2696				.cra_driver_name = "safexcel-sha3-384",
2697				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2698				.cra_flags = CRYPTO_ALG_ASYNC |
2699					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2700					     CRYPTO_ALG_NEED_FALLBACK,
2701				.cra_blocksize = SHA3_384_BLOCK_SIZE,
2702				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2703				.cra_init = safexcel_sha3_cra_init,
2704				.cra_exit = safexcel_sha3_cra_exit,
2705				.cra_module = THIS_MODULE,
2706			},
2707		},
2708	},
2709};
2710
2711static int safexcel_sha3_512_init(struct ahash_request *areq)
2712{
2713	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2714	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2715	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2716
2717	memset(req, 0, sizeof(*req));
2718
2719	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2720	req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2721	req->state_sz = SHA3_512_DIGEST_SIZE;
2722	req->digest_sz = SHA3_512_DIGEST_SIZE;
2723	req->block_sz = SHA3_512_BLOCK_SIZE;
2724	ctx->do_fallback = false;
2725	ctx->fb_init_done = false;
2726	return 0;
2727}
2728
2729static int safexcel_sha3_512_digest(struct ahash_request *req)
2730{
2731	if (req->nbytes)
2732		return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2733
2734	/* HW cannot do zero length hash, use fallback instead */
2735	return safexcel_sha3_digest_fallback(req);
2736}
2737
2738struct safexcel_alg_template safexcel_alg_sha3_512 = {
2739	.type = SAFEXCEL_ALG_TYPE_AHASH,
2740	.algo_mask = SAFEXCEL_ALG_SHA3,
2741	.alg.ahash = {
2742		.init = safexcel_sha3_512_init,
2743		.update = safexcel_sha3_update,
2744		.final = safexcel_sha3_final,
2745		.finup = safexcel_sha3_finup,
2746		.digest = safexcel_sha3_512_digest,
2747		.export = safexcel_sha3_export,
2748		.import = safexcel_sha3_import,
2749		.halg = {
2750			.digestsize = SHA3_512_DIGEST_SIZE,
2751			.statesize = sizeof(struct safexcel_ahash_export_state),
2752			.base = {
2753				.cra_name = "sha3-512",
2754				.cra_driver_name = "safexcel-sha3-512",
2755				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2756				.cra_flags = CRYPTO_ALG_ASYNC |
2757					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2758					     CRYPTO_ALG_NEED_FALLBACK,
2759				.cra_blocksize = SHA3_512_BLOCK_SIZE,
2760				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2761				.cra_init = safexcel_sha3_cra_init,
2762				.cra_exit = safexcel_sha3_cra_exit,
2763				.cra_module = THIS_MODULE,
2764			},
2765		},
2766	},
2767};
2768
2769static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2770{
2771	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2772	int ret;
2773
2774	ret = safexcel_sha3_cra_init(tfm);
2775	if (ret)
2776		return ret;
2777
2778	/* Allocate precalc basic digest implementation */
2779	ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2780	if (IS_ERR(ctx->shpre))
2781		return PTR_ERR(ctx->shpre);
2782
2783	ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2784			      crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2785	if (!ctx->shdesc) {
2786		crypto_free_shash(ctx->shpre);
2787		return -ENOMEM;
2788	}
2789	ctx->shdesc->tfm = ctx->shpre;
2790	return 0;
2791}
2792
2793static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2794{
2795	struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2796
2797	crypto_free_ahash(ctx->fback);
2798	crypto_free_shash(ctx->shpre);
2799	kfree(ctx->shdesc);
2800	safexcel_ahash_cra_exit(tfm);
2801}
2802
2803static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2804				     unsigned int keylen)
2805{
2806	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2807	int ret = 0;
2808
2809	if (keylen > crypto_ahash_blocksize(tfm)) {
2810		/*
2811		 * If the key is larger than the blocksize, then hash it
2812		 * first using our fallback cipher
2813		 */
2814		ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2815					  ctx->base.ipad.byte);
2816		keylen = crypto_shash_digestsize(ctx->shpre);
2817
2818		/*
2819		 * If the digest is larger than half the blocksize, we need to
2820		 * move the rest to opad due to the way our HMAC infra works.
2821		 */
2822		if (keylen > crypto_ahash_blocksize(tfm) / 2)
2823			/* Buffers overlap, need to use memmove iso memcpy! */
2824			memmove(&ctx->base.opad,
2825				ctx->base.ipad.byte +
2826					crypto_ahash_blocksize(tfm) / 2,
2827				keylen - crypto_ahash_blocksize(tfm) / 2);
2828	} else {
2829		/*
2830		 * Copy the key to our ipad & opad buffers
2831		 * Note that ipad and opad each contain one half of the key,
2832		 * to match the existing HMAC driver infrastructure.
2833		 */
2834		if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2835			memcpy(&ctx->base.ipad, key, keylen);
2836		} else {
2837			memcpy(&ctx->base.ipad, key,
2838			       crypto_ahash_blocksize(tfm) / 2);
2839			memcpy(&ctx->base.opad,
2840			       key + crypto_ahash_blocksize(tfm) / 2,
2841			       keylen - crypto_ahash_blocksize(tfm) / 2);
2842		}
2843	}
2844
2845	/* Pad key with zeroes */
2846	if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2847		memset(ctx->base.ipad.byte + keylen, 0,
2848		       crypto_ahash_blocksize(tfm) / 2 - keylen);
2849		memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2850	} else {
2851		memset(ctx->base.opad.byte + keylen -
2852		       crypto_ahash_blocksize(tfm) / 2, 0,
2853		       crypto_ahash_blocksize(tfm) - keylen);
2854	}
2855
2856	/* If doing fallback, still need to set the new key! */
2857	ctx->fb_do_setkey = true;
2858	return ret;
2859}
2860
2861static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2862{
2863	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2864	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2865	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2866
2867	memset(req, 0, sizeof(*req));
2868
2869	/* Copy (half of) the key */
2870	memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2871	/* Start of HMAC should have len == processed == blocksize */
2872	req->len	= SHA3_224_BLOCK_SIZE;
2873	req->processed	= SHA3_224_BLOCK_SIZE;
2874	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2875	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2876	req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2877	req->digest_sz = SHA3_224_DIGEST_SIZE;
2878	req->block_sz = SHA3_224_BLOCK_SIZE;
2879	req->hmac = true;
2880	ctx->do_fallback = false;
2881	ctx->fb_init_done = false;
2882	return 0;
2883}
2884
2885static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2886{
2887	if (req->nbytes)
2888		return safexcel_hmac_sha3_224_init(req) ?:
2889		       safexcel_ahash_finup(req);
2890
2891	/* HW cannot do zero length HMAC, use fallback instead */
2892	return safexcel_sha3_digest_fallback(req);
2893}
2894
2895static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2896{
2897	return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2898}
2899
2900struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2901	.type = SAFEXCEL_ALG_TYPE_AHASH,
2902	.algo_mask = SAFEXCEL_ALG_SHA3,
2903	.alg.ahash = {
2904		.init = safexcel_hmac_sha3_224_init,
2905		.update = safexcel_sha3_update,
2906		.final = safexcel_sha3_final,
2907		.finup = safexcel_sha3_finup,
2908		.digest = safexcel_hmac_sha3_224_digest,
2909		.setkey = safexcel_hmac_sha3_setkey,
2910		.export = safexcel_sha3_export,
2911		.import = safexcel_sha3_import,
2912		.halg = {
2913			.digestsize = SHA3_224_DIGEST_SIZE,
2914			.statesize = sizeof(struct safexcel_ahash_export_state),
2915			.base = {
2916				.cra_name = "hmac(sha3-224)",
2917				.cra_driver_name = "safexcel-hmac-sha3-224",
2918				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2919				.cra_flags = CRYPTO_ALG_ASYNC |
2920					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2921					     CRYPTO_ALG_NEED_FALLBACK,
2922				.cra_blocksize = SHA3_224_BLOCK_SIZE,
2923				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2924				.cra_init = safexcel_hmac_sha3_224_cra_init,
2925				.cra_exit = safexcel_hmac_sha3_cra_exit,
2926				.cra_module = THIS_MODULE,
2927			},
2928		},
2929	},
2930};
2931
2932static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2933{
2934	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2935	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2936	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2937
2938	memset(req, 0, sizeof(*req));
2939
2940	/* Copy (half of) the key */
2941	memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2942	/* Start of HMAC should have len == processed == blocksize */
2943	req->len	= SHA3_256_BLOCK_SIZE;
2944	req->processed	= SHA3_256_BLOCK_SIZE;
2945	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2946	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2947	req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2948	req->digest_sz = SHA3_256_DIGEST_SIZE;
2949	req->block_sz = SHA3_256_BLOCK_SIZE;
2950	req->hmac = true;
2951	ctx->do_fallback = false;
2952	ctx->fb_init_done = false;
2953	return 0;
2954}
2955
2956static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2957{
2958	if (req->nbytes)
2959		return safexcel_hmac_sha3_256_init(req) ?:
2960		       safexcel_ahash_finup(req);
2961
2962	/* HW cannot do zero length HMAC, use fallback instead */
2963	return safexcel_sha3_digest_fallback(req);
2964}
2965
2966static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2967{
2968	return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2969}
2970
2971struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2972	.type = SAFEXCEL_ALG_TYPE_AHASH,
2973	.algo_mask = SAFEXCEL_ALG_SHA3,
2974	.alg.ahash = {
2975		.init = safexcel_hmac_sha3_256_init,
2976		.update = safexcel_sha3_update,
2977		.final = safexcel_sha3_final,
2978		.finup = safexcel_sha3_finup,
2979		.digest = safexcel_hmac_sha3_256_digest,
2980		.setkey = safexcel_hmac_sha3_setkey,
2981		.export = safexcel_sha3_export,
2982		.import = safexcel_sha3_import,
2983		.halg = {
2984			.digestsize = SHA3_256_DIGEST_SIZE,
2985			.statesize = sizeof(struct safexcel_ahash_export_state),
2986			.base = {
2987				.cra_name = "hmac(sha3-256)",
2988				.cra_driver_name = "safexcel-hmac-sha3-256",
2989				.cra_priority = SAFEXCEL_CRA_PRIORITY,
2990				.cra_flags = CRYPTO_ALG_ASYNC |
2991					     CRYPTO_ALG_KERN_DRIVER_ONLY |
2992					     CRYPTO_ALG_NEED_FALLBACK,
2993				.cra_blocksize = SHA3_256_BLOCK_SIZE,
2994				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2995				.cra_init = safexcel_hmac_sha3_256_cra_init,
2996				.cra_exit = safexcel_hmac_sha3_cra_exit,
2997				.cra_module = THIS_MODULE,
2998			},
2999		},
3000	},
3001};
3002
3003static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
3004{
3005	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3006	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3007	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3008
3009	memset(req, 0, sizeof(*req));
3010
3011	/* Copy (half of) the key */
3012	memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
3013	/* Start of HMAC should have len == processed == blocksize */
3014	req->len	= SHA3_384_BLOCK_SIZE;
3015	req->processed	= SHA3_384_BLOCK_SIZE;
3016	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
3017	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3018	req->state_sz = SHA3_384_BLOCK_SIZE / 2;
3019	req->digest_sz = SHA3_384_DIGEST_SIZE;
3020	req->block_sz = SHA3_384_BLOCK_SIZE;
3021	req->hmac = true;
3022	ctx->do_fallback = false;
3023	ctx->fb_init_done = false;
3024	return 0;
3025}
3026
3027static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3028{
3029	if (req->nbytes)
3030		return safexcel_hmac_sha3_384_init(req) ?:
3031		       safexcel_ahash_finup(req);
3032
3033	/* HW cannot do zero length HMAC, use fallback instead */
3034	return safexcel_sha3_digest_fallback(req);
3035}
3036
3037static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3038{
3039	return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3040}
3041
3042struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3043	.type = SAFEXCEL_ALG_TYPE_AHASH,
3044	.algo_mask = SAFEXCEL_ALG_SHA3,
3045	.alg.ahash = {
3046		.init = safexcel_hmac_sha3_384_init,
3047		.update = safexcel_sha3_update,
3048		.final = safexcel_sha3_final,
3049		.finup = safexcel_sha3_finup,
3050		.digest = safexcel_hmac_sha3_384_digest,
3051		.setkey = safexcel_hmac_sha3_setkey,
3052		.export = safexcel_sha3_export,
3053		.import = safexcel_sha3_import,
3054		.halg = {
3055			.digestsize = SHA3_384_DIGEST_SIZE,
3056			.statesize = sizeof(struct safexcel_ahash_export_state),
3057			.base = {
3058				.cra_name = "hmac(sha3-384)",
3059				.cra_driver_name = "safexcel-hmac-sha3-384",
3060				.cra_priority = SAFEXCEL_CRA_PRIORITY,
3061				.cra_flags = CRYPTO_ALG_ASYNC |
3062					     CRYPTO_ALG_KERN_DRIVER_ONLY |
3063					     CRYPTO_ALG_NEED_FALLBACK,
3064				.cra_blocksize = SHA3_384_BLOCK_SIZE,
3065				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3066				.cra_init = safexcel_hmac_sha3_384_cra_init,
3067				.cra_exit = safexcel_hmac_sha3_cra_exit,
3068				.cra_module = THIS_MODULE,
3069			},
3070		},
3071	},
3072};
3073
3074static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3075{
3076	struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3077	struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3078	struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3079
3080	memset(req, 0, sizeof(*req));
3081
3082	/* Copy (half of) the key */
3083	memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
3084	/* Start of HMAC should have len == processed == blocksize */
3085	req->len	= SHA3_512_BLOCK_SIZE;
3086	req->processed	= SHA3_512_BLOCK_SIZE;
3087	ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3088	req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3089	req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3090	req->digest_sz = SHA3_512_DIGEST_SIZE;
3091	req->block_sz = SHA3_512_BLOCK_SIZE;
3092	req->hmac = true;
3093	ctx->do_fallback = false;
3094	ctx->fb_init_done = false;
3095	return 0;
3096}
3097
3098static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3099{
3100	if (req->nbytes)
3101		return safexcel_hmac_sha3_512_init(req) ?:
3102		       safexcel_ahash_finup(req);
3103
3104	/* HW cannot do zero length HMAC, use fallback instead */
3105	return safexcel_sha3_digest_fallback(req);
3106}
3107
3108static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3109{
3110	return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3111}
3112struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3113	.type = SAFEXCEL_ALG_TYPE_AHASH,
3114	.algo_mask = SAFEXCEL_ALG_SHA3,
3115	.alg.ahash = {
3116		.init = safexcel_hmac_sha3_512_init,
3117		.update = safexcel_sha3_update,
3118		.final = safexcel_sha3_final,
3119		.finup = safexcel_sha3_finup,
3120		.digest = safexcel_hmac_sha3_512_digest,
3121		.setkey = safexcel_hmac_sha3_setkey,
3122		.export = safexcel_sha3_export,
3123		.import = safexcel_sha3_import,
3124		.halg = {
3125			.digestsize = SHA3_512_DIGEST_SIZE,
3126			.statesize = sizeof(struct safexcel_ahash_export_state),
3127			.base = {
3128				.cra_name = "hmac(sha3-512)",
3129				.cra_driver_name = "safexcel-hmac-sha3-512",
3130				.cra_priority = SAFEXCEL_CRA_PRIORITY,
3131				.cra_flags = CRYPTO_ALG_ASYNC |
3132					     CRYPTO_ALG_KERN_DRIVER_ONLY |
3133					     CRYPTO_ALG_NEED_FALLBACK,
3134				.cra_blocksize = SHA3_512_BLOCK_SIZE,
3135				.cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3136				.cra_init = safexcel_hmac_sha3_512_cra_init,
3137				.cra_exit = safexcel_hmac_sha3_cra_exit,
3138				.cra_module = THIS_MODULE,
3139			},
3140		},
3141	},
3142};