Linux Audio

Check our new training course

Loading...
v5.9
  1// SPDX-License-Identifier: GPL-2.0-only
  2/*
  3 * SPI driver for Nvidia's Tegra20 Serial Flash Controller.
  4 *
  5 * Copyright (c) 2012, NVIDIA CORPORATION.  All rights reserved.
  6 *
  7 * Author: Laxman Dewangan <ldewangan@nvidia.com>
 
 
 
 
 
 
 
 
 
 
 
 
  8 */
  9
 10#include <linux/clk.h>
 11#include <linux/completion.h>
 12#include <linux/delay.h>
 13#include <linux/err.h>
 14#include <linux/interrupt.h>
 15#include <linux/io.h>
 16#include <linux/kernel.h>
 17#include <linux/kthread.h>
 18#include <linux/module.h>
 19#include <linux/platform_device.h>
 20#include <linux/pm_runtime.h>
 21#include <linux/of.h>
 22#include <linux/of_device.h>
 23#include <linux/reset.h>
 24#include <linux/spi/spi.h>
 25
 26#define SPI_COMMAND				0x000
 27#define SPI_GO					BIT(30)
 28#define SPI_M_S					BIT(28)
 29#define SPI_ACTIVE_SCLK_MASK			(0x3 << 26)
 30#define SPI_ACTIVE_SCLK_DRIVE_LOW		(0 << 26)
 31#define SPI_ACTIVE_SCLK_DRIVE_HIGH		(1 << 26)
 32#define SPI_ACTIVE_SCLK_PULL_LOW		(2 << 26)
 33#define SPI_ACTIVE_SCLK_PULL_HIGH		(3 << 26)
 34
 35#define SPI_CK_SDA_FALLING			(1 << 21)
 36#define SPI_CK_SDA_RISING			(0 << 21)
 37#define SPI_CK_SDA_MASK				(1 << 21)
 38#define SPI_ACTIVE_SDA				(0x3 << 18)
 39#define SPI_ACTIVE_SDA_DRIVE_LOW		(0 << 18)
 40#define SPI_ACTIVE_SDA_DRIVE_HIGH		(1 << 18)
 41#define SPI_ACTIVE_SDA_PULL_LOW			(2 << 18)
 42#define SPI_ACTIVE_SDA_PULL_HIGH		(3 << 18)
 43
 44#define SPI_CS_POL_INVERT			BIT(16)
 45#define SPI_TX_EN				BIT(15)
 46#define SPI_RX_EN				BIT(14)
 47#define SPI_CS_VAL_HIGH				BIT(13)
 48#define SPI_CS_VAL_LOW				0x0
 49#define SPI_CS_SW				BIT(12)
 50#define SPI_CS_HW				0x0
 51#define SPI_CS_DELAY_MASK			(7 << 9)
 52#define SPI_CS3_EN				BIT(8)
 53#define SPI_CS2_EN				BIT(7)
 54#define SPI_CS1_EN				BIT(6)
 55#define SPI_CS0_EN				BIT(5)
 56
 57#define SPI_CS_MASK			(SPI_CS3_EN | SPI_CS2_EN |	\
 58					SPI_CS1_EN | SPI_CS0_EN)
 59#define SPI_BIT_LENGTH(x)		(((x) & 0x1f) << 0)
 60
 61#define SPI_MODES			(SPI_ACTIVE_SCLK_MASK | SPI_CK_SDA_MASK)
 62
 63#define SPI_STATUS			0x004
 64#define SPI_BSY				BIT(31)
 65#define SPI_RDY				BIT(30)
 66#define SPI_TXF_FLUSH			BIT(29)
 67#define SPI_RXF_FLUSH			BIT(28)
 68#define SPI_RX_UNF			BIT(27)
 69#define SPI_TX_OVF			BIT(26)
 70#define SPI_RXF_EMPTY			BIT(25)
 71#define SPI_RXF_FULL			BIT(24)
 72#define SPI_TXF_EMPTY			BIT(23)
 73#define SPI_TXF_FULL			BIT(22)
 74#define SPI_BLK_CNT(count)		(((count) & 0xffff) + 1)
 75
 76#define SPI_FIFO_ERROR			(SPI_RX_UNF | SPI_TX_OVF)
 77#define SPI_FIFO_EMPTY			(SPI_TX_EMPTY | SPI_RX_EMPTY)
 78
 79#define SPI_RX_CMP			0x8
 80#define SPI_DMA_CTL			0x0C
 81#define SPI_DMA_EN			BIT(31)
 82#define SPI_IE_RXC			BIT(27)
 83#define SPI_IE_TXC			BIT(26)
 84#define SPI_PACKED			BIT(20)
 85#define SPI_RX_TRIG_MASK		(0x3 << 18)
 86#define SPI_RX_TRIG_1W			(0x0 << 18)
 87#define SPI_RX_TRIG_4W			(0x1 << 18)
 88#define SPI_TX_TRIG_MASK		(0x3 << 16)
 89#define SPI_TX_TRIG_1W			(0x0 << 16)
 90#define SPI_TX_TRIG_4W			(0x1 << 16)
 91#define SPI_DMA_BLK_COUNT(count)	(((count) - 1) & 0xFFFF)
 92
 93#define SPI_TX_FIFO			0x10
 94#define SPI_RX_FIFO			0x20
 95
 96#define DATA_DIR_TX			(1 << 0)
 97#define DATA_DIR_RX			(1 << 1)
 98
 99#define MAX_CHIP_SELECT			4
100#define SPI_FIFO_DEPTH			4
101#define SPI_DMA_TIMEOUT               (msecs_to_jiffies(1000))
102
103struct tegra_sflash_data {
104	struct device				*dev;
105	struct spi_master			*master;
106	spinlock_t				lock;
107
108	struct clk				*clk;
109	struct reset_control			*rst;
110	void __iomem				*base;
111	unsigned				irq;
112	u32					cur_speed;
113
114	struct spi_device			*cur_spi;
115	unsigned				cur_pos;
116	unsigned				cur_len;
117	unsigned				bytes_per_word;
118	unsigned				cur_direction;
119	unsigned				curr_xfer_words;
120
121	unsigned				cur_rx_pos;
122	unsigned				cur_tx_pos;
123
124	u32					tx_status;
125	u32					rx_status;
126	u32					status_reg;
127
128	u32					def_command_reg;
129	u32					command_reg;
130	u32					dma_control_reg;
131
132	struct completion			xfer_completion;
133	struct spi_transfer			*curr_xfer;
134};
135
136static int tegra_sflash_runtime_suspend(struct device *dev);
137static int tegra_sflash_runtime_resume(struct device *dev);
138
139static inline u32 tegra_sflash_readl(struct tegra_sflash_data *tsd,
140		unsigned long reg)
141{
142	return readl(tsd->base + reg);
143}
144
145static inline void tegra_sflash_writel(struct tegra_sflash_data *tsd,
146		u32 val, unsigned long reg)
147{
148	writel(val, tsd->base + reg);
149}
150
151static void tegra_sflash_clear_status(struct tegra_sflash_data *tsd)
152{
153	/* Write 1 to clear status register */
154	tegra_sflash_writel(tsd, SPI_RDY | SPI_FIFO_ERROR, SPI_STATUS);
155}
156
157static unsigned tegra_sflash_calculate_curr_xfer_param(
158	struct spi_device *spi, struct tegra_sflash_data *tsd,
159	struct spi_transfer *t)
160{
161	unsigned remain_len = t->len - tsd->cur_pos;
162	unsigned max_word;
163
164	tsd->bytes_per_word = DIV_ROUND_UP(t->bits_per_word, 8);
165	max_word = remain_len / tsd->bytes_per_word;
166	if (max_word > SPI_FIFO_DEPTH)
167		max_word = SPI_FIFO_DEPTH;
168	tsd->curr_xfer_words = max_word;
169	return max_word;
170}
171
172static unsigned tegra_sflash_fill_tx_fifo_from_client_txbuf(
173	struct tegra_sflash_data *tsd, struct spi_transfer *t)
174{
175	unsigned nbytes;
176	u32 status;
177	unsigned max_n_32bit = tsd->curr_xfer_words;
178	u8 *tx_buf = (u8 *)t->tx_buf + tsd->cur_tx_pos;
179
180	if (max_n_32bit > SPI_FIFO_DEPTH)
181		max_n_32bit = SPI_FIFO_DEPTH;
182	nbytes = max_n_32bit * tsd->bytes_per_word;
183
184	status = tegra_sflash_readl(tsd, SPI_STATUS);
185	while (!(status & SPI_TXF_FULL)) {
186		int i;
187		u32 x = 0;
188
189		for (i = 0; nbytes && (i < tsd->bytes_per_word);
190							i++, nbytes--)
191			x |= (u32)(*tx_buf++) << (i * 8);
192		tegra_sflash_writel(tsd, x, SPI_TX_FIFO);
193		if (!nbytes)
194			break;
195
196		status = tegra_sflash_readl(tsd, SPI_STATUS);
197	}
198	tsd->cur_tx_pos += max_n_32bit * tsd->bytes_per_word;
199	return max_n_32bit;
200}
201
202static int tegra_sflash_read_rx_fifo_to_client_rxbuf(
203		struct tegra_sflash_data *tsd, struct spi_transfer *t)
204{
205	u32 status;
206	unsigned int read_words = 0;
207	u8 *rx_buf = (u8 *)t->rx_buf + tsd->cur_rx_pos;
208
209	status = tegra_sflash_readl(tsd, SPI_STATUS);
210	while (!(status & SPI_RXF_EMPTY)) {
211		int i;
212		u32 x = tegra_sflash_readl(tsd, SPI_RX_FIFO);
213
214		for (i = 0; (i < tsd->bytes_per_word); i++)
215			*rx_buf++ = (x >> (i*8)) & 0xFF;
216		read_words++;
217		status = tegra_sflash_readl(tsd, SPI_STATUS);
218	}
219	tsd->cur_rx_pos += read_words * tsd->bytes_per_word;
220	return 0;
221}
222
223static int tegra_sflash_start_cpu_based_transfer(
224		struct tegra_sflash_data *tsd, struct spi_transfer *t)
225{
226	u32 val = 0;
227	unsigned cur_words;
228
229	if (tsd->cur_direction & DATA_DIR_TX)
230		val |= SPI_IE_TXC;
231
232	if (tsd->cur_direction & DATA_DIR_RX)
233		val |= SPI_IE_RXC;
234
235	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
236	tsd->dma_control_reg = val;
237
238	if (tsd->cur_direction & DATA_DIR_TX)
239		cur_words = tegra_sflash_fill_tx_fifo_from_client_txbuf(tsd, t);
240	else
241		cur_words = tsd->curr_xfer_words;
242	val |= SPI_DMA_BLK_COUNT(cur_words);
243	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
244	tsd->dma_control_reg = val;
245	val |= SPI_DMA_EN;
246	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
247	return 0;
248}
249
250static int tegra_sflash_start_transfer_one(struct spi_device *spi,
251		struct spi_transfer *t, bool is_first_of_msg,
252		bool is_single_xfer)
253{
254	struct tegra_sflash_data *tsd = spi_master_get_devdata(spi->master);
255	u32 speed;
256	u32 command;
257
258	speed = t->speed_hz;
259	if (speed != tsd->cur_speed) {
260		clk_set_rate(tsd->clk, speed);
261		tsd->cur_speed = speed;
262	}
263
264	tsd->cur_spi = spi;
265	tsd->cur_pos = 0;
266	tsd->cur_rx_pos = 0;
267	tsd->cur_tx_pos = 0;
268	tsd->curr_xfer = t;
269	tegra_sflash_calculate_curr_xfer_param(spi, tsd, t);
270	if (is_first_of_msg) {
271		command = tsd->def_command_reg;
272		command |= SPI_BIT_LENGTH(t->bits_per_word - 1);
273		command |= SPI_CS_VAL_HIGH;
274
275		command &= ~SPI_MODES;
276		if (spi->mode & SPI_CPHA)
277			command |= SPI_CK_SDA_FALLING;
278
279		if (spi->mode & SPI_CPOL)
280			command |= SPI_ACTIVE_SCLK_DRIVE_HIGH;
281		else
282			command |= SPI_ACTIVE_SCLK_DRIVE_LOW;
283		command |= SPI_CS0_EN << spi->chip_select;
284	} else {
285		command = tsd->command_reg;
286		command &= ~SPI_BIT_LENGTH(~0);
287		command |= SPI_BIT_LENGTH(t->bits_per_word - 1);
288		command &= ~(SPI_RX_EN | SPI_TX_EN);
289	}
290
291	tsd->cur_direction = 0;
292	if (t->rx_buf) {
293		command |= SPI_RX_EN;
294		tsd->cur_direction |= DATA_DIR_RX;
295	}
296	if (t->tx_buf) {
297		command |= SPI_TX_EN;
298		tsd->cur_direction |= DATA_DIR_TX;
299	}
300	tegra_sflash_writel(tsd, command, SPI_COMMAND);
301	tsd->command_reg = command;
302
303	return tegra_sflash_start_cpu_based_transfer(tsd, t);
304}
305
306static int tegra_sflash_transfer_one_message(struct spi_master *master,
307			struct spi_message *msg)
308{
309	bool is_first_msg = true;
310	int single_xfer;
311	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
312	struct spi_transfer *xfer;
313	struct spi_device *spi = msg->spi;
314	int ret;
315
316	msg->status = 0;
317	msg->actual_length = 0;
318	single_xfer = list_is_singular(&msg->transfers);
319	list_for_each_entry(xfer, &msg->transfers, transfer_list) {
320		reinit_completion(&tsd->xfer_completion);
321		ret = tegra_sflash_start_transfer_one(spi, xfer,
322					is_first_msg, single_xfer);
323		if (ret < 0) {
324			dev_err(tsd->dev,
325				"spi can not start transfer, err %d\n", ret);
326			goto exit;
327		}
328		is_first_msg = false;
329		ret = wait_for_completion_timeout(&tsd->xfer_completion,
330						SPI_DMA_TIMEOUT);
331		if (WARN_ON(ret == 0)) {
332			dev_err(tsd->dev,
333				"spi transfer timeout, err %d\n", ret);
334			ret = -EIO;
335			goto exit;
336		}
337
338		if (tsd->tx_status ||  tsd->rx_status) {
339			dev_err(tsd->dev, "Error in Transfer\n");
340			ret = -EIO;
341			goto exit;
342		}
343		msg->actual_length += xfer->len;
344		if (xfer->cs_change &&
345		    (xfer->delay_usecs || xfer->delay.value)) {
346			tegra_sflash_writel(tsd, tsd->def_command_reg,
347					SPI_COMMAND);
348			spi_transfer_delay_exec(xfer);
349		}
350	}
351	ret = 0;
352exit:
353	tegra_sflash_writel(tsd, tsd->def_command_reg, SPI_COMMAND);
354	msg->status = ret;
355	spi_finalize_current_message(master);
356	return ret;
357}
358
359static irqreturn_t handle_cpu_based_xfer(struct tegra_sflash_data *tsd)
360{
361	struct spi_transfer *t = tsd->curr_xfer;
362	unsigned long flags;
363
364	spin_lock_irqsave(&tsd->lock, flags);
365	if (tsd->tx_status || tsd->rx_status || (tsd->status_reg & SPI_BSY)) {
366		dev_err(tsd->dev,
367			"CpuXfer ERROR bit set 0x%x\n", tsd->status_reg);
368		dev_err(tsd->dev,
369			"CpuXfer 0x%08x:0x%08x\n", tsd->command_reg,
370				tsd->dma_control_reg);
371		reset_control_assert(tsd->rst);
372		udelay(2);
373		reset_control_deassert(tsd->rst);
374		complete(&tsd->xfer_completion);
375		goto exit;
376	}
377
378	if (tsd->cur_direction & DATA_DIR_RX)
379		tegra_sflash_read_rx_fifo_to_client_rxbuf(tsd, t);
380
381	if (tsd->cur_direction & DATA_DIR_TX)
382		tsd->cur_pos = tsd->cur_tx_pos;
383	else
384		tsd->cur_pos = tsd->cur_rx_pos;
385
386	if (tsd->cur_pos == t->len) {
387		complete(&tsd->xfer_completion);
388		goto exit;
389	}
390
391	tegra_sflash_calculate_curr_xfer_param(tsd->cur_spi, tsd, t);
392	tegra_sflash_start_cpu_based_transfer(tsd, t);
393exit:
394	spin_unlock_irqrestore(&tsd->lock, flags);
395	return IRQ_HANDLED;
396}
397
398static irqreturn_t tegra_sflash_isr(int irq, void *context_data)
399{
400	struct tegra_sflash_data *tsd = context_data;
401
402	tsd->status_reg = tegra_sflash_readl(tsd, SPI_STATUS);
403	if (tsd->cur_direction & DATA_DIR_TX)
404		tsd->tx_status = tsd->status_reg & SPI_TX_OVF;
405
406	if (tsd->cur_direction & DATA_DIR_RX)
407		tsd->rx_status = tsd->status_reg & SPI_RX_UNF;
408	tegra_sflash_clear_status(tsd);
409
410	return handle_cpu_based_xfer(tsd);
411}
412
413static const struct of_device_id tegra_sflash_of_match[] = {
414	{ .compatible = "nvidia,tegra20-sflash", },
415	{}
416};
417MODULE_DEVICE_TABLE(of, tegra_sflash_of_match);
418
419static int tegra_sflash_probe(struct platform_device *pdev)
420{
421	struct spi_master	*master;
422	struct tegra_sflash_data	*tsd;
 
423	int ret;
424	const struct of_device_id *match;
425
426	match = of_match_device(tegra_sflash_of_match, &pdev->dev);
427	if (!match) {
428		dev_err(&pdev->dev, "Error: No device match found\n");
429		return -ENODEV;
430	}
431
432	master = spi_alloc_master(&pdev->dev, sizeof(*tsd));
433	if (!master) {
434		dev_err(&pdev->dev, "master allocation failed\n");
435		return -ENOMEM;
436	}
437
438	/* the spi->mode bits understood by this driver: */
439	master->mode_bits = SPI_CPOL | SPI_CPHA;
440	master->transfer_one_message = tegra_sflash_transfer_one_message;
441	master->auto_runtime_pm = true;
442	master->num_chipselect = MAX_CHIP_SELECT;
443
444	platform_set_drvdata(pdev, master);
445	tsd = spi_master_get_devdata(master);
446	tsd->master = master;
447	tsd->dev = &pdev->dev;
448	spin_lock_init(&tsd->lock);
449
450	if (of_property_read_u32(tsd->dev->of_node, "spi-max-frequency",
451				 &master->max_speed_hz))
452		master->max_speed_hz = 25000000; /* 25MHz */
453
454	tsd->base = devm_platform_ioremap_resource(pdev, 0);
 
455	if (IS_ERR(tsd->base)) {
456		ret = PTR_ERR(tsd->base);
457		goto exit_free_master;
458	}
459
460	tsd->irq = platform_get_irq(pdev, 0);
461	ret = request_irq(tsd->irq, tegra_sflash_isr, 0,
462			dev_name(&pdev->dev), tsd);
463	if (ret < 0) {
464		dev_err(&pdev->dev, "Failed to register ISR for IRQ %d\n",
465					tsd->irq);
466		goto exit_free_master;
467	}
468
469	tsd->clk = devm_clk_get(&pdev->dev, NULL);
470	if (IS_ERR(tsd->clk)) {
471		dev_err(&pdev->dev, "can not get clock\n");
472		ret = PTR_ERR(tsd->clk);
473		goto exit_free_irq;
474	}
475
476	tsd->rst = devm_reset_control_get_exclusive(&pdev->dev, "spi");
477	if (IS_ERR(tsd->rst)) {
478		dev_err(&pdev->dev, "can not get reset\n");
479		ret = PTR_ERR(tsd->rst);
480		goto exit_free_irq;
481	}
482
483	init_completion(&tsd->xfer_completion);
484	pm_runtime_enable(&pdev->dev);
485	if (!pm_runtime_enabled(&pdev->dev)) {
486		ret = tegra_sflash_runtime_resume(&pdev->dev);
487		if (ret)
488			goto exit_pm_disable;
489	}
490
491	ret = pm_runtime_get_sync(&pdev->dev);
492	if (ret < 0) {
493		dev_err(&pdev->dev, "pm runtime get failed, e = %d\n", ret);
494		pm_runtime_put_noidle(&pdev->dev);
495		goto exit_pm_disable;
496	}
497
498	/* Reset controller */
499	reset_control_assert(tsd->rst);
500	udelay(2);
501	reset_control_deassert(tsd->rst);
502
503	tsd->def_command_reg  = SPI_M_S | SPI_CS_SW;
504	tegra_sflash_writel(tsd, tsd->def_command_reg, SPI_COMMAND);
505	pm_runtime_put(&pdev->dev);
506
507	master->dev.of_node = pdev->dev.of_node;
508	ret = devm_spi_register_master(&pdev->dev, master);
509	if (ret < 0) {
510		dev_err(&pdev->dev, "can not register to master err %d\n", ret);
511		goto exit_pm_disable;
512	}
513	return ret;
514
515exit_pm_disable:
516	pm_runtime_disable(&pdev->dev);
517	if (!pm_runtime_status_suspended(&pdev->dev))
518		tegra_sflash_runtime_suspend(&pdev->dev);
519exit_free_irq:
520	free_irq(tsd->irq, tsd);
521exit_free_master:
522	spi_master_put(master);
523	return ret;
524}
525
526static int tegra_sflash_remove(struct platform_device *pdev)
527{
528	struct spi_master *master = platform_get_drvdata(pdev);
529	struct tegra_sflash_data	*tsd = spi_master_get_devdata(master);
530
531	free_irq(tsd->irq, tsd);
532
533	pm_runtime_disable(&pdev->dev);
534	if (!pm_runtime_status_suspended(&pdev->dev))
535		tegra_sflash_runtime_suspend(&pdev->dev);
536
537	return 0;
538}
539
540#ifdef CONFIG_PM_SLEEP
541static int tegra_sflash_suspend(struct device *dev)
542{
543	struct spi_master *master = dev_get_drvdata(dev);
544
545	return spi_master_suspend(master);
546}
547
548static int tegra_sflash_resume(struct device *dev)
549{
550	struct spi_master *master = dev_get_drvdata(dev);
551	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
552	int ret;
553
554	ret = pm_runtime_get_sync(dev);
555	if (ret < 0) {
556		dev_err(dev, "pm runtime failed, e = %d\n", ret);
557		return ret;
558	}
559	tegra_sflash_writel(tsd, tsd->command_reg, SPI_COMMAND);
560	pm_runtime_put(dev);
561
562	return spi_master_resume(master);
563}
564#endif
565
566static int tegra_sflash_runtime_suspend(struct device *dev)
567{
568	struct spi_master *master = dev_get_drvdata(dev);
569	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
570
571	/* Flush all write which are in PPSB queue by reading back */
572	tegra_sflash_readl(tsd, SPI_COMMAND);
573
574	clk_disable_unprepare(tsd->clk);
575	return 0;
576}
577
578static int tegra_sflash_runtime_resume(struct device *dev)
579{
580	struct spi_master *master = dev_get_drvdata(dev);
581	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
582	int ret;
583
584	ret = clk_prepare_enable(tsd->clk);
585	if (ret < 0) {
586		dev_err(tsd->dev, "clk_prepare failed: %d\n", ret);
587		return ret;
588	}
589	return 0;
590}
591
592static const struct dev_pm_ops slink_pm_ops = {
593	SET_RUNTIME_PM_OPS(tegra_sflash_runtime_suspend,
594		tegra_sflash_runtime_resume, NULL)
595	SET_SYSTEM_SLEEP_PM_OPS(tegra_sflash_suspend, tegra_sflash_resume)
596};
597static struct platform_driver tegra_sflash_driver = {
598	.driver = {
599		.name		= "spi-tegra-sflash",
 
600		.pm		= &slink_pm_ops,
601		.of_match_table	= tegra_sflash_of_match,
602	},
603	.probe =	tegra_sflash_probe,
604	.remove =	tegra_sflash_remove,
605};
606module_platform_driver(tegra_sflash_driver);
607
608MODULE_ALIAS("platform:spi-tegra-sflash");
609MODULE_DESCRIPTION("NVIDIA Tegra20 Serial Flash Controller Driver");
610MODULE_AUTHOR("Laxman Dewangan <ldewangan@nvidia.com>");
611MODULE_LICENSE("GPL v2");
v3.15
 
  1/*
  2 * SPI driver for Nvidia's Tegra20 Serial Flash Controller.
  3 *
  4 * Copyright (c) 2012, NVIDIA CORPORATION.  All rights reserved.
  5 *
  6 * Author: Laxman Dewangan <ldewangan@nvidia.com>
  7 *
  8 * This program is free software; you can redistribute it and/or modify it
  9 * under the terms and conditions of the GNU General Public License,
 10 * version 2, as published by the Free Software Foundation.
 11 *
 12 * This program is distributed in the hope it will be useful, but WITHOUT
 13 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
 14 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
 15 * more details.
 16 *
 17 * You should have received a copy of the GNU General Public License
 18 * along with this program.  If not, see <http://www.gnu.org/licenses/>.
 19 */
 20
 21#include <linux/clk.h>
 22#include <linux/completion.h>
 23#include <linux/delay.h>
 24#include <linux/err.h>
 25#include <linux/interrupt.h>
 26#include <linux/io.h>
 27#include <linux/kernel.h>
 28#include <linux/kthread.h>
 29#include <linux/module.h>
 30#include <linux/platform_device.h>
 31#include <linux/pm_runtime.h>
 32#include <linux/of.h>
 33#include <linux/of_device.h>
 34#include <linux/reset.h>
 35#include <linux/spi/spi.h>
 36
 37#define SPI_COMMAND				0x000
 38#define SPI_GO					BIT(30)
 39#define SPI_M_S					BIT(28)
 40#define SPI_ACTIVE_SCLK_MASK			(0x3 << 26)
 41#define SPI_ACTIVE_SCLK_DRIVE_LOW		(0 << 26)
 42#define SPI_ACTIVE_SCLK_DRIVE_HIGH		(1 << 26)
 43#define SPI_ACTIVE_SCLK_PULL_LOW		(2 << 26)
 44#define SPI_ACTIVE_SCLK_PULL_HIGH		(3 << 26)
 45
 46#define SPI_CK_SDA_FALLING			(1 << 21)
 47#define SPI_CK_SDA_RISING			(0 << 21)
 48#define SPI_CK_SDA_MASK				(1 << 21)
 49#define SPI_ACTIVE_SDA				(0x3 << 18)
 50#define SPI_ACTIVE_SDA_DRIVE_LOW		(0 << 18)
 51#define SPI_ACTIVE_SDA_DRIVE_HIGH		(1 << 18)
 52#define SPI_ACTIVE_SDA_PULL_LOW			(2 << 18)
 53#define SPI_ACTIVE_SDA_PULL_HIGH		(3 << 18)
 54
 55#define SPI_CS_POL_INVERT			BIT(16)
 56#define SPI_TX_EN				BIT(15)
 57#define SPI_RX_EN				BIT(14)
 58#define SPI_CS_VAL_HIGH				BIT(13)
 59#define SPI_CS_VAL_LOW				0x0
 60#define SPI_CS_SW				BIT(12)
 61#define SPI_CS_HW				0x0
 62#define SPI_CS_DELAY_MASK			(7 << 9)
 63#define SPI_CS3_EN				BIT(8)
 64#define SPI_CS2_EN				BIT(7)
 65#define SPI_CS1_EN				BIT(6)
 66#define SPI_CS0_EN				BIT(5)
 67
 68#define SPI_CS_MASK			(SPI_CS3_EN | SPI_CS2_EN |	\
 69					SPI_CS1_EN | SPI_CS0_EN)
 70#define SPI_BIT_LENGTH(x)		(((x) & 0x1f) << 0)
 71
 72#define SPI_MODES			(SPI_ACTIVE_SCLK_MASK | SPI_CK_SDA_MASK)
 73
 74#define SPI_STATUS			0x004
 75#define SPI_BSY				BIT(31)
 76#define SPI_RDY				BIT(30)
 77#define SPI_TXF_FLUSH			BIT(29)
 78#define SPI_RXF_FLUSH			BIT(28)
 79#define SPI_RX_UNF			BIT(27)
 80#define SPI_TX_OVF			BIT(26)
 81#define SPI_RXF_EMPTY			BIT(25)
 82#define SPI_RXF_FULL			BIT(24)
 83#define SPI_TXF_EMPTY			BIT(23)
 84#define SPI_TXF_FULL			BIT(22)
 85#define SPI_BLK_CNT(count)		(((count) & 0xffff) + 1)
 86
 87#define SPI_FIFO_ERROR			(SPI_RX_UNF | SPI_TX_OVF)
 88#define SPI_FIFO_EMPTY			(SPI_TX_EMPTY | SPI_RX_EMPTY)
 89
 90#define SPI_RX_CMP			0x8
 91#define SPI_DMA_CTL			0x0C
 92#define SPI_DMA_EN			BIT(31)
 93#define SPI_IE_RXC			BIT(27)
 94#define SPI_IE_TXC			BIT(26)
 95#define SPI_PACKED			BIT(20)
 96#define SPI_RX_TRIG_MASK		(0x3 << 18)
 97#define SPI_RX_TRIG_1W			(0x0 << 18)
 98#define SPI_RX_TRIG_4W			(0x1 << 18)
 99#define SPI_TX_TRIG_MASK		(0x3 << 16)
100#define SPI_TX_TRIG_1W			(0x0 << 16)
101#define SPI_TX_TRIG_4W			(0x1 << 16)
102#define SPI_DMA_BLK_COUNT(count)	(((count) - 1) & 0xFFFF);
103
104#define SPI_TX_FIFO			0x10
105#define SPI_RX_FIFO			0x20
106
107#define DATA_DIR_TX			(1 << 0)
108#define DATA_DIR_RX			(1 << 1)
109
110#define MAX_CHIP_SELECT			4
111#define SPI_FIFO_DEPTH			4
112#define SPI_DMA_TIMEOUT               (msecs_to_jiffies(1000))
113
114struct tegra_sflash_data {
115	struct device				*dev;
116	struct spi_master			*master;
117	spinlock_t				lock;
118
119	struct clk				*clk;
120	struct reset_control			*rst;
121	void __iomem				*base;
122	unsigned				irq;
123	u32					cur_speed;
124
125	struct spi_device			*cur_spi;
126	unsigned				cur_pos;
127	unsigned				cur_len;
128	unsigned				bytes_per_word;
129	unsigned				cur_direction;
130	unsigned				curr_xfer_words;
131
132	unsigned				cur_rx_pos;
133	unsigned				cur_tx_pos;
134
135	u32					tx_status;
136	u32					rx_status;
137	u32					status_reg;
138
139	u32					def_command_reg;
140	u32					command_reg;
141	u32					dma_control_reg;
142
143	struct completion			xfer_completion;
144	struct spi_transfer			*curr_xfer;
145};
146
147static int tegra_sflash_runtime_suspend(struct device *dev);
148static int tegra_sflash_runtime_resume(struct device *dev);
149
150static inline u32 tegra_sflash_readl(struct tegra_sflash_data *tsd,
151		unsigned long reg)
152{
153	return readl(tsd->base + reg);
154}
155
156static inline void tegra_sflash_writel(struct tegra_sflash_data *tsd,
157		u32 val, unsigned long reg)
158{
159	writel(val, tsd->base + reg);
160}
161
162static void tegra_sflash_clear_status(struct tegra_sflash_data *tsd)
163{
164	/* Write 1 to clear status register */
165	tegra_sflash_writel(tsd, SPI_RDY | SPI_FIFO_ERROR, SPI_STATUS);
166}
167
168static unsigned tegra_sflash_calculate_curr_xfer_param(
169	struct spi_device *spi, struct tegra_sflash_data *tsd,
170	struct spi_transfer *t)
171{
172	unsigned remain_len = t->len - tsd->cur_pos;
173	unsigned max_word;
174
175	tsd->bytes_per_word = DIV_ROUND_UP(t->bits_per_word, 8);
176	max_word = remain_len / tsd->bytes_per_word;
177	if (max_word > SPI_FIFO_DEPTH)
178		max_word = SPI_FIFO_DEPTH;
179	tsd->curr_xfer_words = max_word;
180	return max_word;
181}
182
183static unsigned tegra_sflash_fill_tx_fifo_from_client_txbuf(
184	struct tegra_sflash_data *tsd, struct spi_transfer *t)
185{
186	unsigned nbytes;
187	u32 status;
188	unsigned max_n_32bit = tsd->curr_xfer_words;
189	u8 *tx_buf = (u8 *)t->tx_buf + tsd->cur_tx_pos;
190
191	if (max_n_32bit > SPI_FIFO_DEPTH)
192		max_n_32bit = SPI_FIFO_DEPTH;
193	nbytes = max_n_32bit * tsd->bytes_per_word;
194
195	status = tegra_sflash_readl(tsd, SPI_STATUS);
196	while (!(status & SPI_TXF_FULL)) {
197		int i;
198		u32 x = 0;
199
200		for (i = 0; nbytes && (i < tsd->bytes_per_word);
201							i++, nbytes--)
202			x |= (u32)(*tx_buf++) << (i * 8);
203		tegra_sflash_writel(tsd, x, SPI_TX_FIFO);
204		if (!nbytes)
205			break;
206
207		status = tegra_sflash_readl(tsd, SPI_STATUS);
208	}
209	tsd->cur_tx_pos += max_n_32bit * tsd->bytes_per_word;
210	return max_n_32bit;
211}
212
213static int tegra_sflash_read_rx_fifo_to_client_rxbuf(
214		struct tegra_sflash_data *tsd, struct spi_transfer *t)
215{
216	u32 status;
217	unsigned int read_words = 0;
218	u8 *rx_buf = (u8 *)t->rx_buf + tsd->cur_rx_pos;
219
220	status = tegra_sflash_readl(tsd, SPI_STATUS);
221	while (!(status & SPI_RXF_EMPTY)) {
222		int i;
223		u32 x = tegra_sflash_readl(tsd, SPI_RX_FIFO);
 
224		for (i = 0; (i < tsd->bytes_per_word); i++)
225			*rx_buf++ = (x >> (i*8)) & 0xFF;
226		read_words++;
227		status = tegra_sflash_readl(tsd, SPI_STATUS);
228	}
229	tsd->cur_rx_pos += read_words * tsd->bytes_per_word;
230	return 0;
231}
232
233static int tegra_sflash_start_cpu_based_transfer(
234		struct tegra_sflash_data *tsd, struct spi_transfer *t)
235{
236	u32 val = 0;
237	unsigned cur_words;
238
239	if (tsd->cur_direction & DATA_DIR_TX)
240		val |= SPI_IE_TXC;
241
242	if (tsd->cur_direction & DATA_DIR_RX)
243		val |= SPI_IE_RXC;
244
245	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
246	tsd->dma_control_reg = val;
247
248	if (tsd->cur_direction & DATA_DIR_TX)
249		cur_words = tegra_sflash_fill_tx_fifo_from_client_txbuf(tsd, t);
250	else
251		cur_words = tsd->curr_xfer_words;
252	val |= SPI_DMA_BLK_COUNT(cur_words);
253	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
254	tsd->dma_control_reg = val;
255	val |= SPI_DMA_EN;
256	tegra_sflash_writel(tsd, val, SPI_DMA_CTL);
257	return 0;
258}
259
260static int tegra_sflash_start_transfer_one(struct spi_device *spi,
261		struct spi_transfer *t, bool is_first_of_msg,
262		bool is_single_xfer)
263{
264	struct tegra_sflash_data *tsd = spi_master_get_devdata(spi->master);
265	u32 speed;
266	u32 command;
267
268	speed = t->speed_hz;
269	if (speed != tsd->cur_speed) {
270		clk_set_rate(tsd->clk, speed);
271		tsd->cur_speed = speed;
272	}
273
274	tsd->cur_spi = spi;
275	tsd->cur_pos = 0;
276	tsd->cur_rx_pos = 0;
277	tsd->cur_tx_pos = 0;
278	tsd->curr_xfer = t;
279	tegra_sflash_calculate_curr_xfer_param(spi, tsd, t);
280	if (is_first_of_msg) {
281		command = tsd->def_command_reg;
282		command |= SPI_BIT_LENGTH(t->bits_per_word - 1);
283		command |= SPI_CS_VAL_HIGH;
284
285		command &= ~SPI_MODES;
286		if (spi->mode & SPI_CPHA)
287			command |= SPI_CK_SDA_FALLING;
288
289		if (spi->mode & SPI_CPOL)
290			command |= SPI_ACTIVE_SCLK_DRIVE_HIGH;
291		else
292			command |= SPI_ACTIVE_SCLK_DRIVE_LOW;
293		command |= SPI_CS0_EN << spi->chip_select;
294	} else {
295		command = tsd->command_reg;
296		command &= ~SPI_BIT_LENGTH(~0);
297		command |= SPI_BIT_LENGTH(t->bits_per_word - 1);
298		command &= ~(SPI_RX_EN | SPI_TX_EN);
299	}
300
301	tsd->cur_direction = 0;
302	if (t->rx_buf) {
303		command |= SPI_RX_EN;
304		tsd->cur_direction |= DATA_DIR_RX;
305	}
306	if (t->tx_buf) {
307		command |= SPI_TX_EN;
308		tsd->cur_direction |= DATA_DIR_TX;
309	}
310	tegra_sflash_writel(tsd, command, SPI_COMMAND);
311	tsd->command_reg = command;
312
313	return tegra_sflash_start_cpu_based_transfer(tsd, t);
314}
315
316static int tegra_sflash_transfer_one_message(struct spi_master *master,
317			struct spi_message *msg)
318{
319	bool is_first_msg = true;
320	int single_xfer;
321	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
322	struct spi_transfer *xfer;
323	struct spi_device *spi = msg->spi;
324	int ret;
325
326	msg->status = 0;
327	msg->actual_length = 0;
328	single_xfer = list_is_singular(&msg->transfers);
329	list_for_each_entry(xfer, &msg->transfers, transfer_list) {
330		reinit_completion(&tsd->xfer_completion);
331		ret = tegra_sflash_start_transfer_one(spi, xfer,
332					is_first_msg, single_xfer);
333		if (ret < 0) {
334			dev_err(tsd->dev,
335				"spi can not start transfer, err %d\n", ret);
336			goto exit;
337		}
338		is_first_msg = false;
339		ret = wait_for_completion_timeout(&tsd->xfer_completion,
340						SPI_DMA_TIMEOUT);
341		if (WARN_ON(ret == 0)) {
342			dev_err(tsd->dev,
343				"spi trasfer timeout, err %d\n", ret);
344			ret = -EIO;
345			goto exit;
346		}
347
348		if (tsd->tx_status ||  tsd->rx_status) {
349			dev_err(tsd->dev, "Error in Transfer\n");
350			ret = -EIO;
351			goto exit;
352		}
353		msg->actual_length += xfer->len;
354		if (xfer->cs_change && xfer->delay_usecs) {
 
355			tegra_sflash_writel(tsd, tsd->def_command_reg,
356					SPI_COMMAND);
357			udelay(xfer->delay_usecs);
358		}
359	}
360	ret = 0;
361exit:
362	tegra_sflash_writel(tsd, tsd->def_command_reg, SPI_COMMAND);
363	msg->status = ret;
364	spi_finalize_current_message(master);
365	return ret;
366}
367
368static irqreturn_t handle_cpu_based_xfer(struct tegra_sflash_data *tsd)
369{
370	struct spi_transfer *t = tsd->curr_xfer;
371	unsigned long flags;
372
373	spin_lock_irqsave(&tsd->lock, flags);
374	if (tsd->tx_status || tsd->rx_status || (tsd->status_reg & SPI_BSY)) {
375		dev_err(tsd->dev,
376			"CpuXfer ERROR bit set 0x%x\n", tsd->status_reg);
377		dev_err(tsd->dev,
378			"CpuXfer 0x%08x:0x%08x\n", tsd->command_reg,
379				tsd->dma_control_reg);
380		reset_control_assert(tsd->rst);
381		udelay(2);
382		reset_control_deassert(tsd->rst);
383		complete(&tsd->xfer_completion);
384		goto exit;
385	}
386
387	if (tsd->cur_direction & DATA_DIR_RX)
388		tegra_sflash_read_rx_fifo_to_client_rxbuf(tsd, t);
389
390	if (tsd->cur_direction & DATA_DIR_TX)
391		tsd->cur_pos = tsd->cur_tx_pos;
392	else
393		tsd->cur_pos = tsd->cur_rx_pos;
394
395	if (tsd->cur_pos == t->len) {
396		complete(&tsd->xfer_completion);
397		goto exit;
398	}
399
400	tegra_sflash_calculate_curr_xfer_param(tsd->cur_spi, tsd, t);
401	tegra_sflash_start_cpu_based_transfer(tsd, t);
402exit:
403	spin_unlock_irqrestore(&tsd->lock, flags);
404	return IRQ_HANDLED;
405}
406
407static irqreturn_t tegra_sflash_isr(int irq, void *context_data)
408{
409	struct tegra_sflash_data *tsd = context_data;
410
411	tsd->status_reg = tegra_sflash_readl(tsd, SPI_STATUS);
412	if (tsd->cur_direction & DATA_DIR_TX)
413		tsd->tx_status = tsd->status_reg & SPI_TX_OVF;
414
415	if (tsd->cur_direction & DATA_DIR_RX)
416		tsd->rx_status = tsd->status_reg & SPI_RX_UNF;
417	tegra_sflash_clear_status(tsd);
418
419	return handle_cpu_based_xfer(tsd);
420}
421
422static struct of_device_id tegra_sflash_of_match[] = {
423	{ .compatible = "nvidia,tegra20-sflash", },
424	{}
425};
426MODULE_DEVICE_TABLE(of, tegra_sflash_of_match);
427
428static int tegra_sflash_probe(struct platform_device *pdev)
429{
430	struct spi_master	*master;
431	struct tegra_sflash_data	*tsd;
432	struct resource		*r;
433	int ret;
434	const struct of_device_id *match;
435
436	match = of_match_device(tegra_sflash_of_match, &pdev->dev);
437	if (!match) {
438		dev_err(&pdev->dev, "Error: No device match found\n");
439		return -ENODEV;
440	}
441
442	master = spi_alloc_master(&pdev->dev, sizeof(*tsd));
443	if (!master) {
444		dev_err(&pdev->dev, "master allocation failed\n");
445		return -ENOMEM;
446	}
447
448	/* the spi->mode bits understood by this driver: */
449	master->mode_bits = SPI_CPOL | SPI_CPHA;
450	master->transfer_one_message = tegra_sflash_transfer_one_message;
451	master->auto_runtime_pm = true;
452	master->num_chipselect = MAX_CHIP_SELECT;
453
454	platform_set_drvdata(pdev, master);
455	tsd = spi_master_get_devdata(master);
456	tsd->master = master;
457	tsd->dev = &pdev->dev;
458	spin_lock_init(&tsd->lock);
459
460	if (of_property_read_u32(tsd->dev->of_node, "spi-max-frequency",
461				 &master->max_speed_hz))
462		master->max_speed_hz = 25000000; /* 25MHz */
463
464	r = platform_get_resource(pdev, IORESOURCE_MEM, 0);
465	tsd->base = devm_ioremap_resource(&pdev->dev, r);
466	if (IS_ERR(tsd->base)) {
467		ret = PTR_ERR(tsd->base);
468		goto exit_free_master;
469	}
470
471	tsd->irq = platform_get_irq(pdev, 0);
472	ret = request_irq(tsd->irq, tegra_sflash_isr, 0,
473			dev_name(&pdev->dev), tsd);
474	if (ret < 0) {
475		dev_err(&pdev->dev, "Failed to register ISR for IRQ %d\n",
476					tsd->irq);
477		goto exit_free_master;
478	}
479
480	tsd->clk = devm_clk_get(&pdev->dev, NULL);
481	if (IS_ERR(tsd->clk)) {
482		dev_err(&pdev->dev, "can not get clock\n");
483		ret = PTR_ERR(tsd->clk);
484		goto exit_free_irq;
485	}
486
487	tsd->rst = devm_reset_control_get(&pdev->dev, "spi");
488	if (IS_ERR(tsd->rst)) {
489		dev_err(&pdev->dev, "can not get reset\n");
490		ret = PTR_ERR(tsd->rst);
491		goto exit_free_irq;
492	}
493
494	init_completion(&tsd->xfer_completion);
495	pm_runtime_enable(&pdev->dev);
496	if (!pm_runtime_enabled(&pdev->dev)) {
497		ret = tegra_sflash_runtime_resume(&pdev->dev);
498		if (ret)
499			goto exit_pm_disable;
500	}
501
502	ret = pm_runtime_get_sync(&pdev->dev);
503	if (ret < 0) {
504		dev_err(&pdev->dev, "pm runtime get failed, e = %d\n", ret);
 
505		goto exit_pm_disable;
506	}
507
508	/* Reset controller */
509	reset_control_assert(tsd->rst);
510	udelay(2);
511	reset_control_deassert(tsd->rst);
512
513	tsd->def_command_reg  = SPI_M_S | SPI_CS_SW;
514	tegra_sflash_writel(tsd, tsd->def_command_reg, SPI_COMMAND);
515	pm_runtime_put(&pdev->dev);
516
517	master->dev.of_node = pdev->dev.of_node;
518	ret = devm_spi_register_master(&pdev->dev, master);
519	if (ret < 0) {
520		dev_err(&pdev->dev, "can not register to master err %d\n", ret);
521		goto exit_pm_disable;
522	}
523	return ret;
524
525exit_pm_disable:
526	pm_runtime_disable(&pdev->dev);
527	if (!pm_runtime_status_suspended(&pdev->dev))
528		tegra_sflash_runtime_suspend(&pdev->dev);
529exit_free_irq:
530	free_irq(tsd->irq, tsd);
531exit_free_master:
532	spi_master_put(master);
533	return ret;
534}
535
536static int tegra_sflash_remove(struct platform_device *pdev)
537{
538	struct spi_master *master = platform_get_drvdata(pdev);
539	struct tegra_sflash_data	*tsd = spi_master_get_devdata(master);
540
541	free_irq(tsd->irq, tsd);
542
543	pm_runtime_disable(&pdev->dev);
544	if (!pm_runtime_status_suspended(&pdev->dev))
545		tegra_sflash_runtime_suspend(&pdev->dev);
546
547	return 0;
548}
549
550#ifdef CONFIG_PM_SLEEP
551static int tegra_sflash_suspend(struct device *dev)
552{
553	struct spi_master *master = dev_get_drvdata(dev);
554
555	return spi_master_suspend(master);
556}
557
558static int tegra_sflash_resume(struct device *dev)
559{
560	struct spi_master *master = dev_get_drvdata(dev);
561	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
562	int ret;
563
564	ret = pm_runtime_get_sync(dev);
565	if (ret < 0) {
566		dev_err(dev, "pm runtime failed, e = %d\n", ret);
567		return ret;
568	}
569	tegra_sflash_writel(tsd, tsd->command_reg, SPI_COMMAND);
570	pm_runtime_put(dev);
571
572	return spi_master_resume(master);
573}
574#endif
575
576static int tegra_sflash_runtime_suspend(struct device *dev)
577{
578	struct spi_master *master = dev_get_drvdata(dev);
579	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
580
581	/* Flush all write which are in PPSB queue by reading back */
582	tegra_sflash_readl(tsd, SPI_COMMAND);
583
584	clk_disable_unprepare(tsd->clk);
585	return 0;
586}
587
588static int tegra_sflash_runtime_resume(struct device *dev)
589{
590	struct spi_master *master = dev_get_drvdata(dev);
591	struct tegra_sflash_data *tsd = spi_master_get_devdata(master);
592	int ret;
593
594	ret = clk_prepare_enable(tsd->clk);
595	if (ret < 0) {
596		dev_err(tsd->dev, "clk_prepare failed: %d\n", ret);
597		return ret;
598	}
599	return 0;
600}
601
602static const struct dev_pm_ops slink_pm_ops = {
603	SET_RUNTIME_PM_OPS(tegra_sflash_runtime_suspend,
604		tegra_sflash_runtime_resume, NULL)
605	SET_SYSTEM_SLEEP_PM_OPS(tegra_sflash_suspend, tegra_sflash_resume)
606};
607static struct platform_driver tegra_sflash_driver = {
608	.driver = {
609		.name		= "spi-tegra-sflash",
610		.owner		= THIS_MODULE,
611		.pm		= &slink_pm_ops,
612		.of_match_table	= tegra_sflash_of_match,
613	},
614	.probe =	tegra_sflash_probe,
615	.remove =	tegra_sflash_remove,
616};
617module_platform_driver(tegra_sflash_driver);
618
619MODULE_ALIAS("platform:spi-tegra-sflash");
620MODULE_DESCRIPTION("NVIDIA Tegra20 Serial Flash Controller Driver");
621MODULE_AUTHOR("Laxman Dewangan <ldewangan@nvidia.com>");
622MODULE_LICENSE("GPL v2");