Linux Audio

Check our new training course

Loading...
Note: File does not exist in v3.5.6.
   1// SPDX-License-Identifier: GPL-2.0
   2// Copyright (C) 2018 Spreadtrum Communications Inc.
   3
   4#include <linux/clk.h>
   5#include <linux/dmaengine.h>
   6#include <linux/dma-mapping.h>
   7#include <linux/dma/sprd-dma.h>
   8#include <linux/interrupt.h>
   9#include <linux/io.h>
  10#include <linux/iopoll.h>
  11#include <linux/kernel.h>
  12#include <linux/module.h>
  13#include <linux/of.h>
  14#include <linux/of_device.h>
  15#include <linux/of_dma.h>
  16#include <linux/platform_device.h>
  17#include <linux/pm_runtime.h>
  18#include <linux/spi/spi.h>
  19
  20#define SPRD_SPI_TXD			0x0
  21#define SPRD_SPI_CLKD			0x4
  22#define SPRD_SPI_CTL0			0x8
  23#define SPRD_SPI_CTL1			0xc
  24#define SPRD_SPI_CTL2			0x10
  25#define SPRD_SPI_CTL3			0x14
  26#define SPRD_SPI_CTL4			0x18
  27#define SPRD_SPI_CTL5			0x1c
  28#define SPRD_SPI_INT_EN			0x20
  29#define SPRD_SPI_INT_CLR		0x24
  30#define SPRD_SPI_INT_RAW_STS		0x28
  31#define SPRD_SPI_INT_MASK_STS		0x2c
  32#define SPRD_SPI_STS1			0x30
  33#define SPRD_SPI_STS2			0x34
  34#define SPRD_SPI_DSP_WAIT		0x38
  35#define SPRD_SPI_STS3			0x3c
  36#define SPRD_SPI_CTL6			0x40
  37#define SPRD_SPI_STS4			0x44
  38#define SPRD_SPI_FIFO_RST		0x48
  39#define SPRD_SPI_CTL7			0x4c
  40#define SPRD_SPI_STS5			0x50
  41#define SPRD_SPI_CTL8			0x54
  42#define SPRD_SPI_CTL9			0x58
  43#define SPRD_SPI_CTL10			0x5c
  44#define SPRD_SPI_CTL11			0x60
  45#define SPRD_SPI_CTL12			0x64
  46#define SPRD_SPI_STS6			0x68
  47#define SPRD_SPI_STS7			0x6c
  48#define SPRD_SPI_STS8			0x70
  49#define SPRD_SPI_STS9			0x74
  50
  51/* Bits & mask definition for register CTL0 */
  52#define SPRD_SPI_SCK_REV		BIT(13)
  53#define SPRD_SPI_NG_TX			BIT(1)
  54#define SPRD_SPI_NG_RX			BIT(0)
  55#define SPRD_SPI_CHNL_LEN_MASK		GENMASK(4, 0)
  56#define SPRD_SPI_CSN_MASK		GENMASK(11, 8)
  57#define SPRD_SPI_CS0_VALID		BIT(8)
  58
  59/* Bits & mask definition for register SPI_INT_EN */
  60#define SPRD_SPI_TX_END_INT_EN		BIT(8)
  61#define SPRD_SPI_RX_END_INT_EN		BIT(9)
  62
  63/* Bits & mask definition for register SPI_INT_RAW_STS */
  64#define SPRD_SPI_TX_END_RAW		BIT(8)
  65#define SPRD_SPI_RX_END_RAW		BIT(9)
  66
  67/* Bits & mask definition for register SPI_INT_CLR */
  68#define SPRD_SPI_TX_END_CLR		BIT(8)
  69#define SPRD_SPI_RX_END_CLR		BIT(9)
  70
  71/* Bits & mask definition for register INT_MASK_STS */
  72#define SPRD_SPI_MASK_RX_END		BIT(9)
  73#define SPRD_SPI_MASK_TX_END		BIT(8)
  74
  75/* Bits & mask definition for register STS2 */
  76#define SPRD_SPI_TX_BUSY		BIT(8)
  77
  78/* Bits & mask definition for register CTL1 */
  79#define SPRD_SPI_RX_MODE		BIT(12)
  80#define SPRD_SPI_TX_MODE		BIT(13)
  81#define SPRD_SPI_RTX_MD_MASK		GENMASK(13, 12)
  82
  83/* Bits & mask definition for register CTL2 */
  84#define SPRD_SPI_DMA_EN			BIT(6)
  85
  86/* Bits & mask definition for register CTL4 */
  87#define SPRD_SPI_START_RX		BIT(9)
  88#define SPRD_SPI_ONLY_RECV_MASK		GENMASK(8, 0)
  89
  90/* Bits & mask definition for register SPI_INT_CLR */
  91#define SPRD_SPI_RX_END_INT_CLR		BIT(9)
  92#define SPRD_SPI_TX_END_INT_CLR		BIT(8)
  93
  94/* Bits & mask definition for register SPI_INT_RAW */
  95#define SPRD_SPI_RX_END_IRQ		BIT(9)
  96#define SPRD_SPI_TX_END_IRQ		BIT(8)
  97
  98/* Bits & mask definition for register CTL12 */
  99#define SPRD_SPI_SW_RX_REQ		BIT(0)
 100#define SPRD_SPI_SW_TX_REQ		BIT(1)
 101
 102/* Bits & mask definition for register CTL7 */
 103#define SPRD_SPI_DATA_LINE2_EN		BIT(15)
 104#define SPRD_SPI_MODE_MASK		GENMASK(5, 3)
 105#define SPRD_SPI_MODE_OFFSET		3
 106#define SPRD_SPI_3WIRE_MODE		4
 107#define SPRD_SPI_4WIRE_MODE		0
 108
 109/* Bits & mask definition for register CTL8 */
 110#define SPRD_SPI_TX_MAX_LEN_MASK	GENMASK(19, 0)
 111#define SPRD_SPI_TX_LEN_H_MASK		GENMASK(3, 0)
 112#define SPRD_SPI_TX_LEN_H_OFFSET	16
 113
 114/* Bits & mask definition for register CTL9 */
 115#define SPRD_SPI_TX_LEN_L_MASK		GENMASK(15, 0)
 116
 117/* Bits & mask definition for register CTL10 */
 118#define SPRD_SPI_RX_MAX_LEN_MASK	GENMASK(19, 0)
 119#define SPRD_SPI_RX_LEN_H_MASK		GENMASK(3, 0)
 120#define SPRD_SPI_RX_LEN_H_OFFSET	16
 121
 122/* Bits & mask definition for register CTL11 */
 123#define SPRD_SPI_RX_LEN_L_MASK		GENMASK(15, 0)
 124
 125/* Default & maximum word delay cycles */
 126#define SPRD_SPI_MIN_DELAY_CYCLE	14
 127#define SPRD_SPI_MAX_DELAY_CYCLE	130
 128
 129#define SPRD_SPI_FIFO_SIZE		32
 130#define SPRD_SPI_CHIP_CS_NUM		0x4
 131#define SPRD_SPI_CHNL_LEN		2
 132#define SPRD_SPI_DEFAULT_SOURCE		26000000
 133#define SPRD_SPI_MAX_SPEED_HZ		48000000
 134#define SPRD_SPI_AUTOSUSPEND_DELAY	100
 135#define SPRD_SPI_DMA_STEP		8
 136
 137enum sprd_spi_dma_channel {
 138	SPRD_SPI_RX,
 139	SPRD_SPI_TX,
 140	SPRD_SPI_MAX,
 141};
 142
 143struct sprd_spi_dma {
 144	bool enable;
 145	struct dma_chan *dma_chan[SPRD_SPI_MAX];
 146	enum dma_slave_buswidth width;
 147	u32 fragmens_len;
 148	u32 rx_len;
 149};
 150
 151struct sprd_spi {
 152	void __iomem *base;
 153	phys_addr_t phy_base;
 154	struct device *dev;
 155	struct clk *clk;
 156	int irq;
 157	u32 src_clk;
 158	u32 hw_mode;
 159	u32 trans_len;
 160	u32 trans_mode;
 161	u32 word_delay;
 162	u32 hw_speed_hz;
 163	u32 len;
 164	int status;
 165	struct sprd_spi_dma dma;
 166	struct completion xfer_completion;
 167	const void *tx_buf;
 168	void *rx_buf;
 169	int (*read_bufs)(struct sprd_spi *ss, u32 len);
 170	int (*write_bufs)(struct sprd_spi *ss, u32 len);
 171};
 172
 173static u32 sprd_spi_transfer_max_timeout(struct sprd_spi *ss,
 174					 struct spi_transfer *t)
 175{
 176	/*
 177	 * The time spent on transmission of the full FIFO data is the maximum
 178	 * SPI transmission time.
 179	 */
 180	u32 size = t->bits_per_word * SPRD_SPI_FIFO_SIZE;
 181	u32 bit_time_us = DIV_ROUND_UP(USEC_PER_SEC, ss->hw_speed_hz);
 182	u32 total_time_us = size * bit_time_us;
 183	/*
 184	 * There is an interval between data and the data in our SPI hardware,
 185	 * so the total transmission time need add the interval time.
 186	 */
 187	u32 interval_cycle = SPRD_SPI_FIFO_SIZE * ss->word_delay;
 188	u32 interval_time_us = DIV_ROUND_UP(interval_cycle * USEC_PER_SEC,
 189					    ss->src_clk);
 190
 191	return total_time_us + interval_time_us;
 192}
 193
 194static int sprd_spi_wait_for_tx_end(struct sprd_spi *ss, struct spi_transfer *t)
 195{
 196	u32 val, us;
 197	int ret;
 198
 199	us = sprd_spi_transfer_max_timeout(ss, t);
 200	ret = readl_relaxed_poll_timeout(ss->base + SPRD_SPI_INT_RAW_STS, val,
 201					 val & SPRD_SPI_TX_END_IRQ, 0, us);
 202	if (ret) {
 203		dev_err(ss->dev, "SPI error, spi send timeout!\n");
 204		return ret;
 205	}
 206
 207	ret = readl_relaxed_poll_timeout(ss->base + SPRD_SPI_STS2, val,
 208					 !(val & SPRD_SPI_TX_BUSY), 0, us);
 209	if (ret) {
 210		dev_err(ss->dev, "SPI error, spi busy timeout!\n");
 211		return ret;
 212	}
 213
 214	writel_relaxed(SPRD_SPI_TX_END_INT_CLR, ss->base + SPRD_SPI_INT_CLR);
 215
 216	return 0;
 217}
 218
 219static int sprd_spi_wait_for_rx_end(struct sprd_spi *ss, struct spi_transfer *t)
 220{
 221	u32 val, us;
 222	int ret;
 223
 224	us = sprd_spi_transfer_max_timeout(ss, t);
 225	ret = readl_relaxed_poll_timeout(ss->base + SPRD_SPI_INT_RAW_STS, val,
 226					 val & SPRD_SPI_RX_END_IRQ, 0, us);
 227	if (ret) {
 228		dev_err(ss->dev, "SPI error, spi rx timeout!\n");
 229		return ret;
 230	}
 231
 232	writel_relaxed(SPRD_SPI_RX_END_INT_CLR, ss->base + SPRD_SPI_INT_CLR);
 233
 234	return 0;
 235}
 236
 237static void sprd_spi_tx_req(struct sprd_spi *ss)
 238{
 239	writel_relaxed(SPRD_SPI_SW_TX_REQ, ss->base + SPRD_SPI_CTL12);
 240}
 241
 242static void sprd_spi_rx_req(struct sprd_spi *ss)
 243{
 244	writel_relaxed(SPRD_SPI_SW_RX_REQ, ss->base + SPRD_SPI_CTL12);
 245}
 246
 247static void sprd_spi_enter_idle(struct sprd_spi *ss)
 248{
 249	u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL1);
 250
 251	val &= ~SPRD_SPI_RTX_MD_MASK;
 252	writel_relaxed(val, ss->base + SPRD_SPI_CTL1);
 253}
 254
 255static void sprd_spi_set_transfer_bits(struct sprd_spi *ss, u32 bits)
 256{
 257	u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL0);
 258
 259	/* Set the valid bits for every transaction */
 260	val &= ~(SPRD_SPI_CHNL_LEN_MASK << SPRD_SPI_CHNL_LEN);
 261	val |= bits << SPRD_SPI_CHNL_LEN;
 262	writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
 263}
 264
 265static void sprd_spi_set_tx_length(struct sprd_spi *ss, u32 length)
 266{
 267	u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL8);
 268
 269	length &= SPRD_SPI_TX_MAX_LEN_MASK;
 270	val &= ~SPRD_SPI_TX_LEN_H_MASK;
 271	val |= length >> SPRD_SPI_TX_LEN_H_OFFSET;
 272	writel_relaxed(val, ss->base + SPRD_SPI_CTL8);
 273
 274	val = length & SPRD_SPI_TX_LEN_L_MASK;
 275	writel_relaxed(val, ss->base + SPRD_SPI_CTL9);
 276}
 277
 278static void sprd_spi_set_rx_length(struct sprd_spi *ss, u32 length)
 279{
 280	u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL10);
 281
 282	length &= SPRD_SPI_RX_MAX_LEN_MASK;
 283	val &= ~SPRD_SPI_RX_LEN_H_MASK;
 284	val |= length >> SPRD_SPI_RX_LEN_H_OFFSET;
 285	writel_relaxed(val, ss->base + SPRD_SPI_CTL10);
 286
 287	val = length & SPRD_SPI_RX_LEN_L_MASK;
 288	writel_relaxed(val, ss->base + SPRD_SPI_CTL11);
 289}
 290
 291static void sprd_spi_chipselect(struct spi_device *sdev, bool cs)
 292{
 293	struct spi_controller *sctlr = sdev->controller;
 294	struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
 295	u32 val;
 296
 297	val = readl_relaxed(ss->base + SPRD_SPI_CTL0);
 298	/*  The SPI controller will pull down CS pin if cs is 0 */
 299	if (!cs) {
 300		val &= ~SPRD_SPI_CS0_VALID;
 301		writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
 302	} else {
 303		val |= SPRD_SPI_CSN_MASK;
 304		writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
 305	}
 306}
 307
 308static int sprd_spi_write_only_receive(struct sprd_spi *ss, u32 len)
 309{
 310	u32 val;
 311
 312	/* Clear the start receive bit and reset receive data number */
 313	val = readl_relaxed(ss->base + SPRD_SPI_CTL4);
 314	val &= ~(SPRD_SPI_START_RX | SPRD_SPI_ONLY_RECV_MASK);
 315	writel_relaxed(val, ss->base + SPRD_SPI_CTL4);
 316
 317	/* Set the receive data length */
 318	val = readl_relaxed(ss->base + SPRD_SPI_CTL4);
 319	val |= len & SPRD_SPI_ONLY_RECV_MASK;
 320	writel_relaxed(val, ss->base + SPRD_SPI_CTL4);
 321
 322	/* Trigger to receive data */
 323	val = readl_relaxed(ss->base + SPRD_SPI_CTL4);
 324	val |= SPRD_SPI_START_RX;
 325	writel_relaxed(val, ss->base + SPRD_SPI_CTL4);
 326
 327	return len;
 328}
 329
 330static int sprd_spi_write_bufs_u8(struct sprd_spi *ss, u32 len)
 331{
 332	u8 *tx_p = (u8 *)ss->tx_buf;
 333	int i;
 334
 335	for (i = 0; i < len; i++)
 336		writeb_relaxed(tx_p[i], ss->base + SPRD_SPI_TXD);
 337
 338	ss->tx_buf += i;
 339	return i;
 340}
 341
 342static int sprd_spi_write_bufs_u16(struct sprd_spi *ss, u32 len)
 343{
 344	u16 *tx_p = (u16 *)ss->tx_buf;
 345	int i;
 346
 347	for (i = 0; i < len; i++)
 348		writew_relaxed(tx_p[i], ss->base + SPRD_SPI_TXD);
 349
 350	ss->tx_buf += i << 1;
 351	return i << 1;
 352}
 353
 354static int sprd_spi_write_bufs_u32(struct sprd_spi *ss, u32 len)
 355{
 356	u32 *tx_p = (u32 *)ss->tx_buf;
 357	int i;
 358
 359	for (i = 0; i < len; i++)
 360		writel_relaxed(tx_p[i], ss->base + SPRD_SPI_TXD);
 361
 362	ss->tx_buf += i << 2;
 363	return i << 2;
 364}
 365
 366static int sprd_spi_read_bufs_u8(struct sprd_spi *ss, u32 len)
 367{
 368	u8 *rx_p = (u8 *)ss->rx_buf;
 369	int i;
 370
 371	for (i = 0; i < len; i++)
 372		rx_p[i] = readb_relaxed(ss->base + SPRD_SPI_TXD);
 373
 374	ss->rx_buf += i;
 375	return i;
 376}
 377
 378static int sprd_spi_read_bufs_u16(struct sprd_spi *ss, u32 len)
 379{
 380	u16 *rx_p = (u16 *)ss->rx_buf;
 381	int i;
 382
 383	for (i = 0; i < len; i++)
 384		rx_p[i] = readw_relaxed(ss->base + SPRD_SPI_TXD);
 385
 386	ss->rx_buf += i << 1;
 387	return i << 1;
 388}
 389
 390static int sprd_spi_read_bufs_u32(struct sprd_spi *ss, u32 len)
 391{
 392	u32 *rx_p = (u32 *)ss->rx_buf;
 393	int i;
 394
 395	for (i = 0; i < len; i++)
 396		rx_p[i] = readl_relaxed(ss->base + SPRD_SPI_TXD);
 397
 398	ss->rx_buf += i << 2;
 399	return i << 2;
 400}
 401
 402static int sprd_spi_txrx_bufs(struct spi_device *sdev, struct spi_transfer *t)
 403{
 404	struct sprd_spi *ss = spi_controller_get_devdata(sdev->controller);
 405	u32 trans_len = ss->trans_len, len;
 406	int ret, write_size = 0, read_size = 0;
 407
 408	while (trans_len) {
 409		len = trans_len > SPRD_SPI_FIFO_SIZE ? SPRD_SPI_FIFO_SIZE :
 410			trans_len;
 411		if (ss->trans_mode & SPRD_SPI_TX_MODE) {
 412			sprd_spi_set_tx_length(ss, len);
 413			write_size += ss->write_bufs(ss, len);
 414
 415			/*
 416			 * For our 3 wires mode or dual TX line mode, we need
 417			 * to request the controller to transfer.
 418			 */
 419			if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
 420				sprd_spi_tx_req(ss);
 421
 422			ret = sprd_spi_wait_for_tx_end(ss, t);
 423		} else {
 424			sprd_spi_set_rx_length(ss, len);
 425
 426			/*
 427			 * For our 3 wires mode or dual TX line mode, we need
 428			 * to request the controller to read.
 429			 */
 430			if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
 431				sprd_spi_rx_req(ss);
 432			else
 433				write_size += ss->write_bufs(ss, len);
 434
 435			ret = sprd_spi_wait_for_rx_end(ss, t);
 436		}
 437
 438		if (ret)
 439			goto complete;
 440
 441		if (ss->trans_mode & SPRD_SPI_RX_MODE)
 442			read_size += ss->read_bufs(ss, len);
 443
 444		trans_len -= len;
 445	}
 446
 447	if (ss->trans_mode & SPRD_SPI_TX_MODE)
 448		ret = write_size;
 449	else
 450		ret = read_size;
 451complete:
 452	sprd_spi_enter_idle(ss);
 453
 454	return ret;
 455}
 456
 457static void sprd_spi_irq_enable(struct sprd_spi *ss)
 458{
 459	u32 val;
 460
 461	/* Clear interrupt status before enabling interrupt. */
 462	writel_relaxed(SPRD_SPI_TX_END_CLR | SPRD_SPI_RX_END_CLR,
 463		ss->base + SPRD_SPI_INT_CLR);
 464	/* Enable SPI interrupt only in DMA mode. */
 465	val = readl_relaxed(ss->base + SPRD_SPI_INT_EN);
 466	writel_relaxed(val | SPRD_SPI_TX_END_INT_EN |
 467		       SPRD_SPI_RX_END_INT_EN,
 468		       ss->base + SPRD_SPI_INT_EN);
 469}
 470
 471static void sprd_spi_irq_disable(struct sprd_spi *ss)
 472{
 473	writel_relaxed(0, ss->base + SPRD_SPI_INT_EN);
 474}
 475
 476static void sprd_spi_dma_enable(struct sprd_spi *ss, bool enable)
 477{
 478	u32 val = readl_relaxed(ss->base + SPRD_SPI_CTL2);
 479
 480	if (enable)
 481		val |= SPRD_SPI_DMA_EN;
 482	else
 483		val &= ~SPRD_SPI_DMA_EN;
 484
 485	writel_relaxed(val, ss->base + SPRD_SPI_CTL2);
 486}
 487
 488static int sprd_spi_dma_submit(struct dma_chan *dma_chan,
 489			       struct dma_slave_config *c,
 490			       struct sg_table *sg,
 491			       enum dma_transfer_direction dir)
 492{
 493	struct dma_async_tx_descriptor *desc;
 494	dma_cookie_t cookie;
 495	unsigned long flags;
 496	int ret;
 497
 498	ret = dmaengine_slave_config(dma_chan, c);
 499	if (ret < 0)
 500		return ret;
 501
 502	flags = SPRD_DMA_FLAGS(SPRD_DMA_CHN_MODE_NONE, SPRD_DMA_NO_TRG,
 503			       SPRD_DMA_FRAG_REQ, SPRD_DMA_TRANS_INT);
 504	desc = dmaengine_prep_slave_sg(dma_chan, sg->sgl, sg->nents, dir, flags);
 505	if (!desc)
 506		return  -ENODEV;
 507
 508	cookie = dmaengine_submit(desc);
 509	if (dma_submit_error(cookie))
 510		return dma_submit_error(cookie);
 511
 512	dma_async_issue_pending(dma_chan);
 513
 514	return 0;
 515}
 516
 517static int sprd_spi_dma_rx_config(struct sprd_spi *ss, struct spi_transfer *t)
 518{
 519	struct dma_chan *dma_chan = ss->dma.dma_chan[SPRD_SPI_RX];
 520	struct dma_slave_config config = {
 521		.src_addr = ss->phy_base,
 522		.src_addr_width = ss->dma.width,
 523		.dst_addr_width = ss->dma.width,
 524		.dst_maxburst = ss->dma.fragmens_len,
 525	};
 526	int ret;
 527
 528	ret = sprd_spi_dma_submit(dma_chan, &config, &t->rx_sg, DMA_DEV_TO_MEM);
 529	if (ret)
 530		return ret;
 531
 532	return ss->dma.rx_len;
 533}
 534
 535static int sprd_spi_dma_tx_config(struct sprd_spi *ss, struct spi_transfer *t)
 536{
 537	struct dma_chan *dma_chan = ss->dma.dma_chan[SPRD_SPI_TX];
 538	struct dma_slave_config config = {
 539		.dst_addr = ss->phy_base,
 540		.src_addr_width = ss->dma.width,
 541		.dst_addr_width = ss->dma.width,
 542		.src_maxburst = ss->dma.fragmens_len,
 543	};
 544	int ret;
 545
 546	ret = sprd_spi_dma_submit(dma_chan, &config, &t->tx_sg, DMA_MEM_TO_DEV);
 547	if (ret)
 548		return ret;
 549
 550	return t->len;
 551}
 552
 553static int sprd_spi_dma_request(struct sprd_spi *ss)
 554{
 555	ss->dma.dma_chan[SPRD_SPI_RX] = dma_request_chan(ss->dev, "rx_chn");
 556	if (IS_ERR_OR_NULL(ss->dma.dma_chan[SPRD_SPI_RX]))
 557		return dev_err_probe(ss->dev, PTR_ERR(ss->dma.dma_chan[SPRD_SPI_RX]),
 558				     "request RX DMA channel failed!\n");
 559
 560	ss->dma.dma_chan[SPRD_SPI_TX]  = dma_request_chan(ss->dev, "tx_chn");
 561	if (IS_ERR_OR_NULL(ss->dma.dma_chan[SPRD_SPI_TX])) {
 562		dma_release_channel(ss->dma.dma_chan[SPRD_SPI_RX]);
 563		return dev_err_probe(ss->dev, PTR_ERR(ss->dma.dma_chan[SPRD_SPI_TX]),
 564				     "request TX DMA channel failed!\n");
 565	}
 566
 567	return 0;
 568}
 569
 570static void sprd_spi_dma_release(struct sprd_spi *ss)
 571{
 572	if (ss->dma.dma_chan[SPRD_SPI_RX])
 573		dma_release_channel(ss->dma.dma_chan[SPRD_SPI_RX]);
 574
 575	if (ss->dma.dma_chan[SPRD_SPI_TX])
 576		dma_release_channel(ss->dma.dma_chan[SPRD_SPI_TX]);
 577}
 578
 579static int sprd_spi_dma_txrx_bufs(struct spi_device *sdev,
 580				  struct spi_transfer *t)
 581{
 582	struct sprd_spi *ss = spi_master_get_devdata(sdev->master);
 583	u32 trans_len = ss->trans_len;
 584	int ret, write_size = 0;
 585
 586	reinit_completion(&ss->xfer_completion);
 587	sprd_spi_irq_enable(ss);
 588	if (ss->trans_mode & SPRD_SPI_TX_MODE) {
 589		write_size = sprd_spi_dma_tx_config(ss, t);
 590		sprd_spi_set_tx_length(ss, trans_len);
 591
 592		/*
 593		 * For our 3 wires mode or dual TX line mode, we need
 594		 * to request the controller to transfer.
 595		 */
 596		if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
 597			sprd_spi_tx_req(ss);
 598	} else {
 599		sprd_spi_set_rx_length(ss, trans_len);
 600
 601		/*
 602		 * For our 3 wires mode or dual TX line mode, we need
 603		 * to request the controller to read.
 604		 */
 605		if (ss->hw_mode & SPI_3WIRE || ss->hw_mode & SPI_TX_DUAL)
 606			sprd_spi_rx_req(ss);
 607		else
 608			write_size = ss->write_bufs(ss, trans_len);
 609	}
 610
 611	if (write_size < 0) {
 612		ret = write_size;
 613		dev_err(ss->dev, "failed to write, ret = %d\n", ret);
 614		goto trans_complete;
 615	}
 616
 617	if (ss->trans_mode & SPRD_SPI_RX_MODE) {
 618		/*
 619		 * Set up the DMA receive data length, which must be an
 620		 * integral multiple of fragment length. But when the length
 621		 * of received data is less than fragment length, DMA can be
 622		 * configured to receive data according to the actual length
 623		 * of received data.
 624		 */
 625		ss->dma.rx_len = t->len > ss->dma.fragmens_len ?
 626			(t->len - t->len % ss->dma.fragmens_len) :
 627			 t->len;
 628		ret = sprd_spi_dma_rx_config(ss, t);
 629		if (ret < 0) {
 630			dev_err(&sdev->dev,
 631				"failed to configure rx DMA, ret = %d\n", ret);
 632			goto trans_complete;
 633		}
 634	}
 635
 636	sprd_spi_dma_enable(ss, true);
 637	wait_for_completion(&(ss->xfer_completion));
 638
 639	if (ss->trans_mode & SPRD_SPI_TX_MODE)
 640		ret = write_size;
 641	else
 642		ret = ss->dma.rx_len;
 643
 644trans_complete:
 645	sprd_spi_dma_enable(ss, false);
 646	sprd_spi_enter_idle(ss);
 647	sprd_spi_irq_disable(ss);
 648
 649	return ret;
 650}
 651
 652static void sprd_spi_set_speed(struct sprd_spi *ss, u32 speed_hz)
 653{
 654	/*
 655	 * From SPI datasheet, the prescale calculation formula:
 656	 * prescale = SPI source clock / (2 * SPI_freq) - 1;
 657	 */
 658	u32 clk_div = DIV_ROUND_UP(ss->src_clk, speed_hz << 1) - 1;
 659
 660	/* Save the real hardware speed */
 661	ss->hw_speed_hz = (ss->src_clk >> 1) / (clk_div + 1);
 662	writel_relaxed(clk_div, ss->base + SPRD_SPI_CLKD);
 663}
 664
 665static int sprd_spi_init_hw(struct sprd_spi *ss, struct spi_transfer *t)
 666{
 667	struct spi_delay *d = &t->word_delay;
 668	u16 word_delay, interval;
 669	u32 val;
 670
 671	if (d->unit != SPI_DELAY_UNIT_SCK)
 672		return -EINVAL;
 673
 674	val = readl_relaxed(ss->base + SPRD_SPI_CTL0);
 675	val &= ~(SPRD_SPI_SCK_REV | SPRD_SPI_NG_TX | SPRD_SPI_NG_RX);
 676	/* Set default chip selection, clock phase and clock polarity */
 677	val |= ss->hw_mode & SPI_CPHA ? SPRD_SPI_NG_RX : SPRD_SPI_NG_TX;
 678	val |= ss->hw_mode & SPI_CPOL ? SPRD_SPI_SCK_REV : 0;
 679	writel_relaxed(val, ss->base + SPRD_SPI_CTL0);
 680
 681	/*
 682	 * Set the intervals of two SPI frames, and the inteval calculation
 683	 * formula as below per datasheet:
 684	 * interval time (source clock cycles) = interval * 4 + 10.
 685	 */
 686	word_delay = clamp_t(u16, d->value, SPRD_SPI_MIN_DELAY_CYCLE,
 687			     SPRD_SPI_MAX_DELAY_CYCLE);
 688	interval = DIV_ROUND_UP(word_delay - 10, 4);
 689	ss->word_delay = interval * 4 + 10;
 690	writel_relaxed(interval, ss->base + SPRD_SPI_CTL5);
 691
 692	/* Reset SPI fifo */
 693	writel_relaxed(1, ss->base + SPRD_SPI_FIFO_RST);
 694	writel_relaxed(0, ss->base + SPRD_SPI_FIFO_RST);
 695
 696	/* Set SPI work mode */
 697	val = readl_relaxed(ss->base + SPRD_SPI_CTL7);
 698	val &= ~SPRD_SPI_MODE_MASK;
 699
 700	if (ss->hw_mode & SPI_3WIRE)
 701		val |= SPRD_SPI_3WIRE_MODE << SPRD_SPI_MODE_OFFSET;
 702	else
 703		val |= SPRD_SPI_4WIRE_MODE << SPRD_SPI_MODE_OFFSET;
 704
 705	if (ss->hw_mode & SPI_TX_DUAL)
 706		val |= SPRD_SPI_DATA_LINE2_EN;
 707	else
 708		val &= ~SPRD_SPI_DATA_LINE2_EN;
 709
 710	writel_relaxed(val, ss->base + SPRD_SPI_CTL7);
 711
 712	return 0;
 713}
 714
 715static int sprd_spi_setup_transfer(struct spi_device *sdev,
 716				   struct spi_transfer *t)
 717{
 718	struct sprd_spi *ss = spi_controller_get_devdata(sdev->controller);
 719	u8 bits_per_word = t->bits_per_word;
 720	u32 val, mode = 0;
 721	int ret;
 722
 723	ss->len = t->len;
 724	ss->tx_buf = t->tx_buf;
 725	ss->rx_buf = t->rx_buf;
 726
 727	ss->hw_mode = sdev->mode;
 728	ret = sprd_spi_init_hw(ss, t);
 729	if (ret)
 730		return ret;
 731
 732	/* Set tansfer speed and valid bits */
 733	sprd_spi_set_speed(ss, t->speed_hz);
 734	sprd_spi_set_transfer_bits(ss, bits_per_word);
 735
 736	if (bits_per_word > 16)
 737		bits_per_word = round_up(bits_per_word, 16);
 738	else
 739		bits_per_word = round_up(bits_per_word, 8);
 740
 741	switch (bits_per_word) {
 742	case 8:
 743		ss->trans_len = t->len;
 744		ss->read_bufs = sprd_spi_read_bufs_u8;
 745		ss->write_bufs = sprd_spi_write_bufs_u8;
 746		ss->dma.width = DMA_SLAVE_BUSWIDTH_1_BYTE;
 747		ss->dma.fragmens_len = SPRD_SPI_DMA_STEP;
 748		break;
 749	case 16:
 750		ss->trans_len = t->len >> 1;
 751		ss->read_bufs = sprd_spi_read_bufs_u16;
 752		ss->write_bufs = sprd_spi_write_bufs_u16;
 753		ss->dma.width = DMA_SLAVE_BUSWIDTH_2_BYTES;
 754		ss->dma.fragmens_len = SPRD_SPI_DMA_STEP << 1;
 755		break;
 756	case 32:
 757		ss->trans_len = t->len >> 2;
 758		ss->read_bufs = sprd_spi_read_bufs_u32;
 759		ss->write_bufs = sprd_spi_write_bufs_u32;
 760		ss->dma.width = DMA_SLAVE_BUSWIDTH_4_BYTES;
 761		ss->dma.fragmens_len = SPRD_SPI_DMA_STEP << 2;
 762		break;
 763	default:
 764		return -EINVAL;
 765	}
 766
 767	/* Set transfer read or write mode */
 768	val = readl_relaxed(ss->base + SPRD_SPI_CTL1);
 769	val &= ~SPRD_SPI_RTX_MD_MASK;
 770	if (t->tx_buf)
 771		mode |= SPRD_SPI_TX_MODE;
 772	if (t->rx_buf)
 773		mode |= SPRD_SPI_RX_MODE;
 774
 775	writel_relaxed(val | mode, ss->base + SPRD_SPI_CTL1);
 776
 777	ss->trans_mode = mode;
 778
 779	/*
 780	 * If in only receive mode, we need to trigger the SPI controller to
 781	 * receive data automatically.
 782	 */
 783	if (ss->trans_mode == SPRD_SPI_RX_MODE)
 784		ss->write_bufs = sprd_spi_write_only_receive;
 785
 786	return 0;
 787}
 788
 789static int sprd_spi_transfer_one(struct spi_controller *sctlr,
 790				 struct spi_device *sdev,
 791				 struct spi_transfer *t)
 792{
 793	int ret;
 794
 795	ret = sprd_spi_setup_transfer(sdev, t);
 796	if (ret)
 797		goto setup_err;
 798
 799	if (sctlr->can_dma(sctlr, sdev, t))
 800		ret = sprd_spi_dma_txrx_bufs(sdev, t);
 801	else
 802		ret = sprd_spi_txrx_bufs(sdev, t);
 803
 804	if (ret == t->len)
 805		ret = 0;
 806	else if (ret >= 0)
 807		ret = -EREMOTEIO;
 808
 809setup_err:
 810	spi_finalize_current_transfer(sctlr);
 811
 812	return ret;
 813}
 814
 815static irqreturn_t sprd_spi_handle_irq(int irq, void *data)
 816{
 817	struct sprd_spi *ss = (struct sprd_spi *)data;
 818	u32 val = readl_relaxed(ss->base + SPRD_SPI_INT_MASK_STS);
 819
 820	if (val & SPRD_SPI_MASK_TX_END) {
 821		writel_relaxed(SPRD_SPI_TX_END_CLR, ss->base + SPRD_SPI_INT_CLR);
 822		if (!(ss->trans_mode & SPRD_SPI_RX_MODE))
 823			complete(&ss->xfer_completion);
 824
 825		return IRQ_HANDLED;
 826	}
 827
 828	if (val & SPRD_SPI_MASK_RX_END) {
 829		writel_relaxed(SPRD_SPI_RX_END_CLR, ss->base + SPRD_SPI_INT_CLR);
 830		if (ss->dma.rx_len < ss->len) {
 831			ss->rx_buf += ss->dma.rx_len;
 832			ss->dma.rx_len +=
 833				ss->read_bufs(ss, ss->len - ss->dma.rx_len);
 834		}
 835		complete(&ss->xfer_completion);
 836
 837		return IRQ_HANDLED;
 838	}
 839
 840	return IRQ_NONE;
 841}
 842
 843static int sprd_spi_irq_init(struct platform_device *pdev, struct sprd_spi *ss)
 844{
 845	int ret;
 846
 847	ss->irq = platform_get_irq(pdev, 0);
 848	if (ss->irq < 0)
 849		return ss->irq;
 850
 851	ret = devm_request_irq(&pdev->dev, ss->irq, sprd_spi_handle_irq,
 852				0, pdev->name, ss);
 853	if (ret)
 854		dev_err(&pdev->dev, "failed to request spi irq %d, ret = %d\n",
 855			ss->irq, ret);
 856
 857	return ret;
 858}
 859
 860static int sprd_spi_clk_init(struct platform_device *pdev, struct sprd_spi *ss)
 861{
 862	struct clk *clk_spi, *clk_parent;
 863
 864	clk_spi = devm_clk_get(&pdev->dev, "spi");
 865	if (IS_ERR(clk_spi)) {
 866		dev_warn(&pdev->dev, "can't get the spi clock\n");
 867		clk_spi = NULL;
 868	}
 869
 870	clk_parent = devm_clk_get(&pdev->dev, "source");
 871	if (IS_ERR(clk_parent)) {
 872		dev_warn(&pdev->dev, "can't get the source clock\n");
 873		clk_parent = NULL;
 874	}
 875
 876	ss->clk = devm_clk_get(&pdev->dev, "enable");
 877	if (IS_ERR(ss->clk)) {
 878		dev_err(&pdev->dev, "can't get the enable clock\n");
 879		return PTR_ERR(ss->clk);
 880	}
 881
 882	if (!clk_set_parent(clk_spi, clk_parent))
 883		ss->src_clk = clk_get_rate(clk_spi);
 884	else
 885		ss->src_clk = SPRD_SPI_DEFAULT_SOURCE;
 886
 887	return 0;
 888}
 889
 890static bool sprd_spi_can_dma(struct spi_controller *sctlr,
 891			     struct spi_device *spi, struct spi_transfer *t)
 892{
 893	struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
 894
 895	return ss->dma.enable && (t->len > SPRD_SPI_FIFO_SIZE);
 896}
 897
 898static int sprd_spi_dma_init(struct platform_device *pdev, struct sprd_spi *ss)
 899{
 900	int ret;
 901
 902	ret = sprd_spi_dma_request(ss);
 903	if (ret) {
 904		if (ret == -EPROBE_DEFER)
 905			return ret;
 906
 907		dev_warn(&pdev->dev,
 908			 "failed to request dma, enter no dma mode, ret = %d\n",
 909			 ret);
 910
 911		return 0;
 912	}
 913
 914	ss->dma.enable = true;
 915
 916	return 0;
 917}
 918
 919static int sprd_spi_probe(struct platform_device *pdev)
 920{
 921	struct spi_controller *sctlr;
 922	struct resource *res;
 923	struct sprd_spi *ss;
 924	int ret;
 925
 926	pdev->id = of_alias_get_id(pdev->dev.of_node, "spi");
 927	sctlr = spi_alloc_master(&pdev->dev, sizeof(*ss));
 928	if (!sctlr)
 929		return -ENOMEM;
 930
 931	ss = spi_controller_get_devdata(sctlr);
 932	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
 933	ss->base = devm_ioremap_resource(&pdev->dev, res);
 934	if (IS_ERR(ss->base)) {
 935		ret = PTR_ERR(ss->base);
 936		goto free_controller;
 937	}
 938
 939	ss->phy_base = res->start;
 940	ss->dev = &pdev->dev;
 941	sctlr->dev.of_node = pdev->dev.of_node;
 942	sctlr->mode_bits = SPI_CPOL | SPI_CPHA | SPI_3WIRE | SPI_TX_DUAL;
 943	sctlr->bus_num = pdev->id;
 944	sctlr->set_cs = sprd_spi_chipselect;
 945	sctlr->transfer_one = sprd_spi_transfer_one;
 946	sctlr->can_dma = sprd_spi_can_dma;
 947	sctlr->auto_runtime_pm = true;
 948	sctlr->max_speed_hz = min_t(u32, ss->src_clk >> 1,
 949				    SPRD_SPI_MAX_SPEED_HZ);
 950
 951	init_completion(&ss->xfer_completion);
 952	platform_set_drvdata(pdev, sctlr);
 953	ret = sprd_spi_clk_init(pdev, ss);
 954	if (ret)
 955		goto free_controller;
 956
 957	ret = sprd_spi_irq_init(pdev, ss);
 958	if (ret)
 959		goto free_controller;
 960
 961	ret = sprd_spi_dma_init(pdev, ss);
 962	if (ret)
 963		goto free_controller;
 964
 965	ret = clk_prepare_enable(ss->clk);
 966	if (ret)
 967		goto release_dma;
 968
 969	ret = pm_runtime_set_active(&pdev->dev);
 970	if (ret < 0)
 971		goto disable_clk;
 972
 973	pm_runtime_set_autosuspend_delay(&pdev->dev,
 974					 SPRD_SPI_AUTOSUSPEND_DELAY);
 975	pm_runtime_use_autosuspend(&pdev->dev);
 976	pm_runtime_enable(&pdev->dev);
 977	ret = pm_runtime_get_sync(&pdev->dev);
 978	if (ret < 0) {
 979		dev_err(&pdev->dev, "failed to resume SPI controller\n");
 980		goto err_rpm_put;
 981	}
 982
 983	ret = devm_spi_register_controller(&pdev->dev, sctlr);
 984	if (ret)
 985		goto err_rpm_put;
 986
 987	pm_runtime_mark_last_busy(&pdev->dev);
 988	pm_runtime_put_autosuspend(&pdev->dev);
 989
 990	return 0;
 991
 992err_rpm_put:
 993	pm_runtime_put_noidle(&pdev->dev);
 994	pm_runtime_disable(&pdev->dev);
 995disable_clk:
 996	clk_disable_unprepare(ss->clk);
 997release_dma:
 998	sprd_spi_dma_release(ss);
 999free_controller:
1000	spi_controller_put(sctlr);
1001
1002	return ret;
1003}
1004
1005static int sprd_spi_remove(struct platform_device *pdev)
1006{
1007	struct spi_controller *sctlr = platform_get_drvdata(pdev);
1008	struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
1009	int ret;
1010
1011	ret = pm_runtime_resume_and_get(ss->dev);
1012	if (ret < 0) {
1013		dev_err(ss->dev, "failed to resume SPI controller\n");
1014		return ret;
1015	}
1016
1017	spi_controller_suspend(sctlr);
1018
1019	if (ss->dma.enable)
1020		sprd_spi_dma_release(ss);
1021	clk_disable_unprepare(ss->clk);
1022	pm_runtime_put_noidle(&pdev->dev);
1023	pm_runtime_disable(&pdev->dev);
1024
1025	return 0;
1026}
1027
1028static int __maybe_unused sprd_spi_runtime_suspend(struct device *dev)
1029{
1030	struct spi_controller *sctlr = dev_get_drvdata(dev);
1031	struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
1032
1033	if (ss->dma.enable)
1034		sprd_spi_dma_release(ss);
1035
1036	clk_disable_unprepare(ss->clk);
1037
1038	return 0;
1039}
1040
1041static int __maybe_unused sprd_spi_runtime_resume(struct device *dev)
1042{
1043	struct spi_controller *sctlr = dev_get_drvdata(dev);
1044	struct sprd_spi *ss = spi_controller_get_devdata(sctlr);
1045	int ret;
1046
1047	ret = clk_prepare_enable(ss->clk);
1048	if (ret)
1049		return ret;
1050
1051	if (!ss->dma.enable)
1052		return 0;
1053
1054	ret = sprd_spi_dma_request(ss);
1055	if (ret)
1056		clk_disable_unprepare(ss->clk);
1057
1058	return ret;
1059}
1060
1061static const struct dev_pm_ops sprd_spi_pm_ops = {
1062	SET_RUNTIME_PM_OPS(sprd_spi_runtime_suspend,
1063			   sprd_spi_runtime_resume, NULL)
1064};
1065
1066static const struct of_device_id sprd_spi_of_match[] = {
1067	{ .compatible = "sprd,sc9860-spi", },
1068	{ /* sentinel */ }
1069};
1070MODULE_DEVICE_TABLE(of, sprd_spi_of_match);
1071
1072static struct platform_driver sprd_spi_driver = {
1073	.driver = {
1074		.name = "sprd-spi",
1075		.of_match_table = sprd_spi_of_match,
1076		.pm = &sprd_spi_pm_ops,
1077	},
1078	.probe = sprd_spi_probe,
1079	.remove  = sprd_spi_remove,
1080};
1081
1082module_platform_driver(sprd_spi_driver);
1083
1084MODULE_DESCRIPTION("Spreadtrum SPI Controller driver");
1085MODULE_AUTHOR("Lanqing Liu <lanqing.liu@spreadtrum.com>");
1086MODULE_LICENSE("GPL v2");