1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
2 //
3 // AMD SPI controller driver
4 //
5 // Copyright (c) 2020, Advanced Micro Devices, Inc.
6 //
7 // Author: Sanjay R Mehta <[email protected]>
8 
9 #include <linux/acpi.h>
10 #include <linux/delay.h>
11 #include <linux/dma-mapping.h>
12 #include <linux/init.h>
13 #include <linux/io-64-nonatomic-lo-hi.h>
14 #include <linux/iopoll.h>
15 #include <linux/module.h>
16 #include <linux/platform_device.h>
17 #include <linux/spi/spi.h>
18 #include <linux/spi/spi-mem.h>
19 
20 #define AMD_SPI_CTRL0_REG	0x00
21 #define AMD_SPI_EXEC_CMD	BIT(16)
22 #define AMD_SPI_FIFO_CLEAR	BIT(20)
23 #define AMD_SPI_BUSY		BIT(31)
24 
25 #define AMD_SPI_OPCODE_REG	0x45
26 #define AMD_SPI_CMD_TRIGGER_REG	0x47
27 #define AMD_SPI_TRIGGER_CMD	BIT(7)
28 
29 #define AMD_SPI_OPCODE_MASK	0xFF
30 
31 #define AMD_SPI_ALT_CS_REG	0x1D
32 #define AMD_SPI_ALT_CS_MASK	0x3
33 
34 #define AMD_SPI_FIFO_BASE	0x80
35 #define AMD_SPI_TX_COUNT_REG	0x48
36 #define AMD_SPI_RX_COUNT_REG	0x4B
37 #define AMD_SPI_STATUS_REG	0x4C
38 #define AMD_SPI_ADDR32CTRL_REG	0x50
39 
40 #define AMD_SPI_FIFO_SIZE	70
41 #define AMD_SPI_MEM_SIZE	200
42 #define AMD_SPI_MAX_DATA	64
43 #define AMD_SPI_HID2_DMA_SIZE   4096
44 
45 #define AMD_SPI_ENA_REG		0x20
46 #define AMD_SPI_ALT_SPD_SHIFT	20
47 #define AMD_SPI_ALT_SPD_MASK	GENMASK(23, AMD_SPI_ALT_SPD_SHIFT)
48 #define AMD_SPI_SPI100_SHIFT	0
49 #define AMD_SPI_SPI100_MASK	GENMASK(AMD_SPI_SPI100_SHIFT, AMD_SPI_SPI100_SHIFT)
50 #define AMD_SPI_SPEED_REG	0x6C
51 #define AMD_SPI_SPD7_SHIFT	8
52 #define AMD_SPI_SPD7_MASK	GENMASK(13, AMD_SPI_SPD7_SHIFT)
53 
54 #define AMD_SPI_HID2_INPUT_RING_BUF0	0X100
55 #define AMD_SPI_HID2_CNTRL		0x150
56 #define AMD_SPI_HID2_INT_STATUS		0x154
57 #define AMD_SPI_HID2_CMD_START		0x156
58 #define AMD_SPI_HID2_INT_MASK		0x158
59 #define AMD_SPI_HID2_READ_CNTRL0	0x170
60 #define AMD_SPI_HID2_READ_CNTRL1	0x174
61 #define AMD_SPI_HID2_READ_CNTRL2	0x180
62 
63 #define AMD_SPI_MAX_HZ		100000000
64 #define AMD_SPI_MIN_HZ		800000
65 
66 #define AMD_SPI_IO_SLEEP_US	20
67 #define AMD_SPI_IO_TIMEOUT_US	2000000
68 
69 /* SPI read command opcodes */
70 #define AMD_SPI_OP_READ          0x03	/* Read data bytes (low frequency) */
71 #define AMD_SPI_OP_READ_FAST     0x0b	/* Read data bytes (high frequency) */
72 #define AMD_SPI_OP_READ_1_1_2    0x3b	/* Read data bytes (Dual Output SPI) */
73 #define AMD_SPI_OP_READ_1_2_2    0xbb	/* Read data bytes (Dual I/O SPI) */
74 #define AMD_SPI_OP_READ_1_1_4    0x6b	/* Read data bytes (Quad Output SPI) */
75 #define AMD_SPI_OP_READ_1_4_4    0xeb	/* Read data bytes (Quad I/O SPI) */
76 
77 /* SPI read command opcodes - 4B address */
78 #define AMD_SPI_OP_READ_FAST_4B		0x0c    /* Read data bytes (high frequency) */
79 #define AMD_SPI_OP_READ_1_1_2_4B	0x3c    /* Read data bytes (Dual Output SPI) */
80 #define AMD_SPI_OP_READ_1_2_2_4B	0xbc    /* Read data bytes (Dual I/O SPI) */
81 #define AMD_SPI_OP_READ_1_1_4_4B	0x6c    /* Read data bytes (Quad Output SPI) */
82 #define AMD_SPI_OP_READ_1_4_4_4B	0xec    /* Read data bytes (Quad I/O SPI) */
83 
84 /**
85  * enum amd_spi_versions - SPI controller versions
86  * @AMD_SPI_V1:		AMDI0061 hardware version
87  * @AMD_SPI_V2:		AMDI0062 hardware version
88  * @AMD_HID2_SPI:	AMDI0063 hardware version
89  */
90 enum amd_spi_versions {
91 	AMD_SPI_V1 = 1,
92 	AMD_SPI_V2,
93 	AMD_HID2_SPI,
94 };
95 
96 enum amd_spi_speed {
97 	F_66_66MHz,
98 	F_33_33MHz,
99 	F_22_22MHz,
100 	F_16_66MHz,
101 	F_100MHz,
102 	F_800KHz,
103 	SPI_SPD7 = 0x7,
104 	F_50MHz = 0x4,
105 	F_4MHz = 0x32,
106 	F_3_17MHz = 0x3F
107 };
108 
109 /**
110  * struct amd_spi_freq - Matches device speed with values to write in regs
111  * @speed_hz: Device frequency
112  * @enable_val: Value to be written to "enable register"
113  * @spd7_val: Some frequencies requires to have a value written at SPISPEED register
114  */
115 struct amd_spi_freq {
116 	u32 speed_hz;
117 	u32 enable_val;
118 	u32 spd7_val;
119 };
120 
121 /**
122  * struct amd_spi - SPI driver instance
123  * @io_remap_addr:	Start address of the SPI controller registers
124  * @phy_dma_buf:	Physical address of DMA buffer
125  * @dma_virt_addr:	Virtual address of DMA buffer
126  * @version:		SPI controller hardware version
127  * @speed_hz:		Device frequency
128  */
129 struct amd_spi {
130 	void __iomem *io_remap_addr;
131 	dma_addr_t phy_dma_buf;
132 	void *dma_virt_addr;
133 	enum amd_spi_versions version;
134 	unsigned int speed_hz;
135 };
136 
amd_spi_readreg8(struct amd_spi * amd_spi,int idx)137 static inline u8 amd_spi_readreg8(struct amd_spi *amd_spi, int idx)
138 {
139 	return readb((u8 __iomem *)amd_spi->io_remap_addr + idx);
140 }
141 
amd_spi_writereg8(struct amd_spi * amd_spi,int idx,u8 val)142 static inline void amd_spi_writereg8(struct amd_spi *amd_spi, int idx, u8 val)
143 {
144 	writeb(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
145 }
146 
amd_spi_setclear_reg8(struct amd_spi * amd_spi,int idx,u8 set,u8 clear)147 static void amd_spi_setclear_reg8(struct amd_spi *amd_spi, int idx, u8 set, u8 clear)
148 {
149 	u8 tmp = amd_spi_readreg8(amd_spi, idx);
150 
151 	tmp = (tmp & ~clear) | set;
152 	amd_spi_writereg8(amd_spi, idx, tmp);
153 }
154 
amd_spi_readreg16(struct amd_spi * amd_spi,int idx)155 static inline u16 amd_spi_readreg16(struct amd_spi *amd_spi, int idx)
156 {
157 	return readw((u8 __iomem *)amd_spi->io_remap_addr + idx);
158 }
159 
amd_spi_writereg16(struct amd_spi * amd_spi,int idx,u16 val)160 static inline void amd_spi_writereg16(struct amd_spi *amd_spi, int idx, u16 val)
161 {
162 	writew(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
163 }
164 
amd_spi_readreg32(struct amd_spi * amd_spi,int idx)165 static inline u32 amd_spi_readreg32(struct amd_spi *amd_spi, int idx)
166 {
167 	return readl((u8 __iomem *)amd_spi->io_remap_addr + idx);
168 }
169 
amd_spi_writereg32(struct amd_spi * amd_spi,int idx,u32 val)170 static inline void amd_spi_writereg32(struct amd_spi *amd_spi, int idx, u32 val)
171 {
172 	writel(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
173 }
174 
amd_spi_readreg64(struct amd_spi * amd_spi,int idx)175 static inline u64 amd_spi_readreg64(struct amd_spi *amd_spi, int idx)
176 {
177 	return readq((u8 __iomem *)amd_spi->io_remap_addr + idx);
178 }
179 
amd_spi_writereg64(struct amd_spi * amd_spi,int idx,u64 val)180 static inline void amd_spi_writereg64(struct amd_spi *amd_spi, int idx, u64 val)
181 {
182 	writeq(val, ((u8 __iomem *)amd_spi->io_remap_addr + idx));
183 }
184 
amd_spi_setclear_reg32(struct amd_spi * amd_spi,int idx,u32 set,u32 clear)185 static inline void amd_spi_setclear_reg32(struct amd_spi *amd_spi, int idx, u32 set, u32 clear)
186 {
187 	u32 tmp = amd_spi_readreg32(amd_spi, idx);
188 
189 	tmp = (tmp & ~clear) | set;
190 	amd_spi_writereg32(amd_spi, idx, tmp);
191 }
192 
amd_spi_select_chip(struct amd_spi * amd_spi,u8 cs)193 static void amd_spi_select_chip(struct amd_spi *amd_spi, u8 cs)
194 {
195 	amd_spi_setclear_reg8(amd_spi, AMD_SPI_ALT_CS_REG, cs, AMD_SPI_ALT_CS_MASK);
196 }
197 
amd_spi_clear_chip(struct amd_spi * amd_spi,u8 chip_select)198 static inline void amd_spi_clear_chip(struct amd_spi *amd_spi, u8 chip_select)
199 {
200 	amd_spi_writereg8(amd_spi, AMD_SPI_ALT_CS_REG, chip_select & ~AMD_SPI_ALT_CS_MASK);
201 }
202 
amd_spi_clear_fifo_ptr(struct amd_spi * amd_spi)203 static void amd_spi_clear_fifo_ptr(struct amd_spi *amd_spi)
204 {
205 	amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, AMD_SPI_FIFO_CLEAR, AMD_SPI_FIFO_CLEAR);
206 }
207 
amd_spi_set_opcode(struct amd_spi * amd_spi,u8 cmd_opcode)208 static int amd_spi_set_opcode(struct amd_spi *amd_spi, u8 cmd_opcode)
209 {
210 	switch (amd_spi->version) {
211 	case AMD_SPI_V1:
212 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, cmd_opcode,
213 				       AMD_SPI_OPCODE_MASK);
214 		return 0;
215 	case AMD_SPI_V2:
216 	case AMD_HID2_SPI:
217 		amd_spi_writereg8(amd_spi, AMD_SPI_OPCODE_REG, cmd_opcode);
218 		return 0;
219 	default:
220 		return -ENODEV;
221 	}
222 }
223 
amd_spi_set_rx_count(struct amd_spi * amd_spi,u8 rx_count)224 static inline void amd_spi_set_rx_count(struct amd_spi *amd_spi, u8 rx_count)
225 {
226 	amd_spi_writereg8(amd_spi, AMD_SPI_RX_COUNT_REG, rx_count);
227 }
228 
amd_spi_set_tx_count(struct amd_spi * amd_spi,u8 tx_count)229 static inline void amd_spi_set_tx_count(struct amd_spi *amd_spi, u8 tx_count)
230 {
231 	amd_spi_writereg8(amd_spi, AMD_SPI_TX_COUNT_REG, tx_count);
232 }
233 
amd_spi_busy_wait(struct amd_spi * amd_spi)234 static int amd_spi_busy_wait(struct amd_spi *amd_spi)
235 {
236 	u32 val;
237 	int reg;
238 
239 	switch (amd_spi->version) {
240 	case AMD_SPI_V1:
241 		reg = AMD_SPI_CTRL0_REG;
242 		break;
243 	case AMD_SPI_V2:
244 	case AMD_HID2_SPI:
245 		reg = AMD_SPI_STATUS_REG;
246 		break;
247 	default:
248 		return -ENODEV;
249 	}
250 
251 	return readl_poll_timeout(amd_spi->io_remap_addr + reg, val,
252 				  !(val & AMD_SPI_BUSY), 20, 2000000);
253 }
254 
amd_spi_execute_opcode(struct amd_spi * amd_spi)255 static int amd_spi_execute_opcode(struct amd_spi *amd_spi)
256 {
257 	int ret;
258 
259 	ret = amd_spi_busy_wait(amd_spi);
260 	if (ret)
261 		return ret;
262 
263 	switch (amd_spi->version) {
264 	case AMD_SPI_V1:
265 		/* Set ExecuteOpCode bit in the CTRL0 register */
266 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_CTRL0_REG, AMD_SPI_EXEC_CMD,
267 				       AMD_SPI_EXEC_CMD);
268 		return 0;
269 	case AMD_SPI_V2:
270 	case AMD_HID2_SPI:
271 		/* Trigger the command execution */
272 		amd_spi_setclear_reg8(amd_spi, AMD_SPI_CMD_TRIGGER_REG,
273 				      AMD_SPI_TRIGGER_CMD, AMD_SPI_TRIGGER_CMD);
274 		return 0;
275 	default:
276 		return -ENODEV;
277 	}
278 }
279 
amd_spi_host_setup(struct spi_device * spi)280 static int amd_spi_host_setup(struct spi_device *spi)
281 {
282 	struct amd_spi *amd_spi = spi_controller_get_devdata(spi->controller);
283 
284 	amd_spi_clear_fifo_ptr(amd_spi);
285 
286 	return 0;
287 }
288 
289 static const struct amd_spi_freq amd_spi_freq[] = {
290 	{ AMD_SPI_MAX_HZ,   F_100MHz,         0},
291 	{       66660000, F_66_66MHz,         0},
292 	{       50000000,   SPI_SPD7,   F_50MHz},
293 	{       33330000, F_33_33MHz,         0},
294 	{       22220000, F_22_22MHz,         0},
295 	{       16660000, F_16_66MHz,         0},
296 	{        4000000,   SPI_SPD7,    F_4MHz},
297 	{        3170000,   SPI_SPD7, F_3_17MHz},
298 	{ AMD_SPI_MIN_HZ,   F_800KHz,         0},
299 };
300 
amd_set_spi_freq(struct amd_spi * amd_spi,u32 speed_hz)301 static void amd_set_spi_freq(struct amd_spi *amd_spi, u32 speed_hz)
302 {
303 	unsigned int i, spd7_val, alt_spd;
304 
305 	for (i = 0; i < ARRAY_SIZE(amd_spi_freq)-1; i++)
306 		if (speed_hz >= amd_spi_freq[i].speed_hz)
307 			break;
308 
309 	if (amd_spi->speed_hz == amd_spi_freq[i].speed_hz)
310 		return;
311 
312 	amd_spi->speed_hz = amd_spi_freq[i].speed_hz;
313 
314 	alt_spd = (amd_spi_freq[i].enable_val << AMD_SPI_ALT_SPD_SHIFT)
315 		   & AMD_SPI_ALT_SPD_MASK;
316 	amd_spi_setclear_reg32(amd_spi, AMD_SPI_ENA_REG, alt_spd,
317 			       AMD_SPI_ALT_SPD_MASK);
318 
319 	if (amd_spi->speed_hz == AMD_SPI_MAX_HZ)
320 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_ENA_REG, 1,
321 				       AMD_SPI_SPI100_MASK);
322 
323 	if (amd_spi_freq[i].spd7_val) {
324 		spd7_val = (amd_spi_freq[i].spd7_val << AMD_SPI_SPD7_SHIFT)
325 			    & AMD_SPI_SPD7_MASK;
326 		amd_spi_setclear_reg32(amd_spi, AMD_SPI_SPEED_REG, spd7_val,
327 				       AMD_SPI_SPD7_MASK);
328 	}
329 }
330 
amd_spi_fifo_xfer(struct amd_spi * amd_spi,struct spi_controller * host,struct spi_message * message)331 static inline int amd_spi_fifo_xfer(struct amd_spi *amd_spi,
332 				    struct spi_controller *host,
333 				    struct spi_message *message)
334 {
335 	struct spi_transfer *xfer = NULL;
336 	struct spi_device *spi = message->spi;
337 	u8 cmd_opcode = 0, fifo_pos = AMD_SPI_FIFO_BASE;
338 	u8 *buf = NULL;
339 	u32 i = 0;
340 	u32 tx_len = 0, rx_len = 0;
341 
342 	list_for_each_entry(xfer, &message->transfers,
343 			    transfer_list) {
344 		if (xfer->speed_hz)
345 			amd_set_spi_freq(amd_spi, xfer->speed_hz);
346 		else
347 			amd_set_spi_freq(amd_spi, spi->max_speed_hz);
348 
349 		if (xfer->tx_buf) {
350 			buf = (u8 *)xfer->tx_buf;
351 			if (!tx_len) {
352 				cmd_opcode = *(u8 *)xfer->tx_buf;
353 				buf++;
354 				xfer->len--;
355 			}
356 			tx_len += xfer->len;
357 
358 			/* Write data into the FIFO. */
359 			for (i = 0; i < xfer->len; i++)
360 				amd_spi_writereg8(amd_spi, fifo_pos + i, buf[i]);
361 
362 			fifo_pos += xfer->len;
363 		}
364 
365 		/* Store no. of bytes to be received from FIFO */
366 		if (xfer->rx_buf)
367 			rx_len += xfer->len;
368 	}
369 
370 	if (!buf) {
371 		message->status = -EINVAL;
372 		goto fin_msg;
373 	}
374 
375 	amd_spi_set_opcode(amd_spi, cmd_opcode);
376 	amd_spi_set_tx_count(amd_spi, tx_len);
377 	amd_spi_set_rx_count(amd_spi, rx_len);
378 
379 	/* Execute command */
380 	message->status = amd_spi_execute_opcode(amd_spi);
381 	if (message->status)
382 		goto fin_msg;
383 
384 	if (rx_len) {
385 		message->status = amd_spi_busy_wait(amd_spi);
386 		if (message->status)
387 			goto fin_msg;
388 
389 		list_for_each_entry(xfer, &message->transfers, transfer_list)
390 			if (xfer->rx_buf) {
391 				buf = (u8 *)xfer->rx_buf;
392 				/* Read data from FIFO to receive buffer */
393 				for (i = 0; i < xfer->len; i++)
394 					buf[i] = amd_spi_readreg8(amd_spi, fifo_pos + i);
395 				fifo_pos += xfer->len;
396 			}
397 	}
398 
399 	/* Update statistics */
400 	message->actual_length = tx_len + rx_len + 1;
401 
402 fin_msg:
403 	switch (amd_spi->version) {
404 	case AMD_SPI_V1:
405 		break;
406 	case AMD_SPI_V2:
407 	case AMD_HID2_SPI:
408 		amd_spi_clear_chip(amd_spi, spi_get_chipselect(message->spi, 0));
409 		break;
410 	default:
411 		return -ENODEV;
412 	}
413 
414 	spi_finalize_current_message(host);
415 
416 	return message->status;
417 }
418 
amd_is_spi_read_cmd_4b(const u16 op)419 static inline bool amd_is_spi_read_cmd_4b(const u16 op)
420 {
421 	switch (op) {
422 	case AMD_SPI_OP_READ_FAST_4B:
423 	case AMD_SPI_OP_READ_1_1_2_4B:
424 	case AMD_SPI_OP_READ_1_2_2_4B:
425 	case AMD_SPI_OP_READ_1_1_4_4B:
426 	case AMD_SPI_OP_READ_1_4_4_4B:
427 		return true;
428 	default:
429 		return false;
430 	}
431 }
432 
amd_is_spi_read_cmd(const u16 op)433 static inline bool amd_is_spi_read_cmd(const u16 op)
434 {
435 	switch (op) {
436 	case AMD_SPI_OP_READ:
437 	case AMD_SPI_OP_READ_FAST:
438 	case AMD_SPI_OP_READ_1_1_2:
439 	case AMD_SPI_OP_READ_1_2_2:
440 	case AMD_SPI_OP_READ_1_1_4:
441 	case AMD_SPI_OP_READ_1_4_4:
442 		return true;
443 	default:
444 		return amd_is_spi_read_cmd_4b(op);
445 	}
446 }
447 
amd_spi_supports_op(struct spi_mem * mem,const struct spi_mem_op * op)448 static bool amd_spi_supports_op(struct spi_mem *mem,
449 				const struct spi_mem_op *op)
450 {
451 	struct amd_spi *amd_spi = spi_controller_get_devdata(mem->spi->controller);
452 
453 	/* bus width is number of IO lines used to transmit */
454 	if (op->cmd.buswidth > 1 || op->addr.buswidth > 4)
455 		return false;
456 
457 	/* AMD SPI controllers support quad mode only for read operations */
458 	if (amd_is_spi_read_cmd(op->cmd.opcode)) {
459 		if (op->data.buswidth > 4)
460 			return false;
461 
462 		/*
463 		 * HID2 SPI controller supports DMA read up to 4K bytes and
464 		 * doesn't support 4-byte address commands.
465 		 */
466 		if (amd_spi->version == AMD_HID2_SPI) {
467 			if (amd_is_spi_read_cmd_4b(op->cmd.opcode) ||
468 			    op->data.nbytes > AMD_SPI_HID2_DMA_SIZE)
469 				return false;
470 		} else if (op->data.nbytes > AMD_SPI_MAX_DATA) {
471 			return false;
472 		}
473 	} else if (op->data.buswidth > 1 || op->data.nbytes > AMD_SPI_MAX_DATA) {
474 		return false;
475 	}
476 
477 	if (op->max_freq < mem->spi->controller->min_speed_hz)
478 		return false;
479 
480 	return spi_mem_default_supports_op(mem, op);
481 }
482 
amd_spi_adjust_op_size(struct spi_mem * mem,struct spi_mem_op * op)483 static int amd_spi_adjust_op_size(struct spi_mem *mem, struct spi_mem_op *op)
484 {
485 	struct amd_spi *amd_spi = spi_controller_get_devdata(mem->spi->controller);
486 
487 	/*
488 	 * HID2 SPI controller DMA read mode supports reading up to 4k
489 	 * bytes in single transaction, where as SPI0 and HID2 SPI
490 	 * controller index mode supports maximum of 64 bytes in a single
491 	 * transaction.
492 	 */
493 	if (amd_spi->version == AMD_HID2_SPI && amd_is_spi_read_cmd(op->cmd.opcode))
494 		op->data.nbytes = clamp_val(op->data.nbytes, 0, AMD_SPI_HID2_DMA_SIZE);
495 	else
496 		op->data.nbytes = clamp_val(op->data.nbytes, 0, AMD_SPI_MAX_DATA);
497 
498 	return 0;
499 }
500 
amd_spi_set_addr(struct amd_spi * amd_spi,const struct spi_mem_op * op)501 static void amd_spi_set_addr(struct amd_spi *amd_spi,
502 			     const struct spi_mem_op *op)
503 {
504 	u8 nbytes = op->addr.nbytes;
505 	u64 addr_val = op->addr.val;
506 	int base_addr, i;
507 
508 	base_addr = AMD_SPI_FIFO_BASE + nbytes;
509 
510 	for (i = 0; i < nbytes; i++) {
511 		amd_spi_writereg8(amd_spi, base_addr - i - 1, addr_val &
512 				  GENMASK(7, 0));
513 		addr_val >>= 8;
514 	}
515 }
516 
amd_spi_mem_data_out(struct amd_spi * amd_spi,const struct spi_mem_op * op)517 static void amd_spi_mem_data_out(struct amd_spi *amd_spi,
518 				 const struct spi_mem_op *op)
519 {
520 	int base_addr = AMD_SPI_FIFO_BASE + op->addr.nbytes;
521 	u64 *buf_64 = (u64 *)op->data.buf.out;
522 	u32 nbytes = op->data.nbytes;
523 	u32 left_data = nbytes;
524 	u8 *buf;
525 	int i;
526 
527 	amd_spi_set_opcode(amd_spi, op->cmd.opcode);
528 	amd_spi_set_addr(amd_spi, op);
529 
530 	for (i = 0; left_data >= 8; i++, left_data -= 8)
531 		amd_spi_writereg64(amd_spi, base_addr + op->dummy.nbytes + (i * 8), *buf_64++);
532 
533 	buf = (u8 *)buf_64;
534 	for (i = 0; i < left_data; i++) {
535 		amd_spi_writereg8(amd_spi, base_addr + op->dummy.nbytes + nbytes + i - left_data,
536 				  buf[i]);
537 	}
538 
539 	amd_spi_set_tx_count(amd_spi, op->addr.nbytes + op->data.nbytes);
540 	amd_spi_set_rx_count(amd_spi, 0);
541 	amd_spi_clear_fifo_ptr(amd_spi);
542 	amd_spi_execute_opcode(amd_spi);
543 }
544 
amd_spi_hiddma_read(struct amd_spi * amd_spi,const struct spi_mem_op * op)545 static void amd_spi_hiddma_read(struct amd_spi *amd_spi, const struct spi_mem_op *op)
546 {
547 	u16 hid_cmd_start, val;
548 	u32 hid_regval;
549 
550 	/* Set the opcode in hid2_read_control0 register */
551 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL0);
552 	hid_regval = (hid_regval & ~GENMASK(7, 0)) | op->cmd.opcode;
553 
554 	/*
555 	 * Program the address in the hid2_read_control0 register [8:31]. The address should
556 	 * be written starting from the 8th bit of the register, requiring an 8-bit shift.
557 	 * Additionally, to convert a 2-byte spinand address to a 3-byte address, another
558 	 * 8-bit shift is needed. Therefore, a total shift of 16 bits is required.
559 	 */
560 	hid_regval = (hid_regval & ~GENMASK(31, 8)) | (op->addr.val << 16);
561 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL0, hid_regval);
562 
563 	/* Configure dummy clock cycles for fast read, dual, quad I/O commands */
564 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL2);
565 	/* Fast read dummy cycle */
566 	hid_regval &= ~GENMASK(4, 0);
567 
568 	/* Fast read Dual I/O dummy cycle */
569 	hid_regval &= ~GENMASK(12, 8);
570 
571 	/* Fast read Quad I/O dummy cycle */
572 	hid_regval = (hid_regval & ~GENMASK(20, 16)) | BIT(17);
573 
574 	/* Set no of preamble bytecount */
575 	hid_regval &= ~GENMASK(27, 24);
576 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL2, hid_regval);
577 
578 	/*
579 	 * Program the HID2 Input Ring Buffer0. 4k aligned buf_memory_addr[31:12],
580 	 * buf_size[4:0], end_input_ring[5].
581 	 */
582 	hid_regval = amd_spi->phy_dma_buf | BIT(5) | BIT(0);
583 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_INPUT_RING_BUF0, hid_regval);
584 
585 	/* Program max read length(no of DWs) in hid2_read_control1 register */
586 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_READ_CNTRL1);
587 	hid_regval = (hid_regval & ~GENMASK(15, 0)) | ((op->data.nbytes / 4) - 1);
588 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_READ_CNTRL1, hid_regval);
589 
590 	/* Set cmd start bit in hid2_cmd_start register to trigger HID basic read operation */
591 	hid_cmd_start = amd_spi_readreg16(amd_spi, AMD_SPI_HID2_CMD_START);
592 	amd_spi_writereg16(amd_spi, AMD_SPI_HID2_CMD_START, (hid_cmd_start | BIT(3)));
593 
594 	/* Check interrupt status of HIDDMA basic read operation in hid2_int_status register */
595 	readw_poll_timeout(amd_spi->io_remap_addr + AMD_SPI_HID2_INT_STATUS, val,
596 			   (val & BIT(3)), AMD_SPI_IO_SLEEP_US, AMD_SPI_IO_TIMEOUT_US);
597 
598 	/* Clear the interrupts by writing to hid2_int_status register */
599 	val = amd_spi_readreg16(amd_spi, AMD_SPI_HID2_INT_STATUS);
600 	amd_spi_writereg16(amd_spi, AMD_SPI_HID2_INT_STATUS, val);
601 }
602 
amd_spi_mem_data_in(struct amd_spi * amd_spi,const struct spi_mem_op * op)603 static void amd_spi_mem_data_in(struct amd_spi *amd_spi,
604 				const struct spi_mem_op *op)
605 {
606 	int base_addr = AMD_SPI_FIFO_BASE + op->addr.nbytes;
607 	u64 *buf_64 = (u64 *)op->data.buf.in;
608 	u32 nbytes = op->data.nbytes;
609 	u32 left_data = nbytes;
610 	u32 data;
611 	u8 *buf;
612 	int i;
613 
614 	/*
615 	 * Condition for using HID read mode. Only for reading complete page data, use HID read.
616 	 * Use index mode otherwise.
617 	 */
618 	if (amd_spi->version == AMD_HID2_SPI && amd_is_spi_read_cmd(op->cmd.opcode)) {
619 		amd_spi_hiddma_read(amd_spi, op);
620 
621 		for (i = 0; left_data >= 8; i++, left_data -= 8)
622 			*buf_64++ = readq((u8 __iomem *)amd_spi->dma_virt_addr + (i * 8));
623 
624 		buf = (u8 *)buf_64;
625 		for (i = 0; i < left_data; i++)
626 			buf[i] = readb((u8 __iomem *)amd_spi->dma_virt_addr +
627 				       (nbytes - left_data + i));
628 
629 		/* Reset HID RX memory logic */
630 		data = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_CNTRL);
631 		amd_spi_writereg32(amd_spi, AMD_SPI_HID2_CNTRL, data | BIT(5));
632 	} else {
633 		/* Index mode */
634 		amd_spi_set_opcode(amd_spi, op->cmd.opcode);
635 		amd_spi_set_addr(amd_spi, op);
636 		amd_spi_set_tx_count(amd_spi, op->addr.nbytes + op->dummy.nbytes);
637 
638 		for (i = 0; i < op->dummy.nbytes; i++)
639 			amd_spi_writereg8(amd_spi, (base_addr + i), 0xff);
640 
641 		amd_spi_set_rx_count(amd_spi, op->data.nbytes);
642 		amd_spi_clear_fifo_ptr(amd_spi);
643 		amd_spi_execute_opcode(amd_spi);
644 		amd_spi_busy_wait(amd_spi);
645 
646 		for (i = 0; left_data >= 8; i++, left_data -= 8)
647 			*buf_64++ = amd_spi_readreg64(amd_spi, base_addr + op->dummy.nbytes +
648 						      (i * 8));
649 
650 		buf = (u8 *)buf_64;
651 		for (i = 0; i < left_data; i++)
652 			buf[i] = amd_spi_readreg8(amd_spi, base_addr + op->dummy.nbytes +
653 						  nbytes + i - left_data);
654 	}
655 
656 }
657 
amd_set_spi_addr_mode(struct amd_spi * amd_spi,const struct spi_mem_op * op)658 static void amd_set_spi_addr_mode(struct amd_spi *amd_spi,
659 				  const struct spi_mem_op *op)
660 {
661 	u32 val = amd_spi_readreg32(amd_spi, AMD_SPI_ADDR32CTRL_REG);
662 
663 	if (amd_is_spi_read_cmd_4b(op->cmd.opcode))
664 		amd_spi_writereg32(amd_spi, AMD_SPI_ADDR32CTRL_REG, val | BIT(0));
665 	else
666 		amd_spi_writereg32(amd_spi, AMD_SPI_ADDR32CTRL_REG, val & ~BIT(0));
667 }
668 
amd_spi_exec_mem_op(struct spi_mem * mem,const struct spi_mem_op * op)669 static int amd_spi_exec_mem_op(struct spi_mem *mem,
670 			       const struct spi_mem_op *op)
671 {
672 	struct amd_spi *amd_spi;
673 
674 	amd_spi = spi_controller_get_devdata(mem->spi->controller);
675 
676 	amd_set_spi_freq(amd_spi, op->max_freq);
677 
678 	if (amd_spi->version == AMD_SPI_V2)
679 		amd_set_spi_addr_mode(amd_spi, op);
680 
681 	switch (op->data.dir) {
682 	case SPI_MEM_DATA_IN:
683 		amd_spi_mem_data_in(amd_spi, op);
684 		break;
685 	case SPI_MEM_DATA_OUT:
686 		fallthrough;
687 	case SPI_MEM_NO_DATA:
688 		amd_spi_mem_data_out(amd_spi, op);
689 		break;
690 	default:
691 		return -EOPNOTSUPP;
692 	}
693 
694 	return 0;
695 }
696 
697 static const struct spi_controller_mem_ops amd_spi_mem_ops = {
698 	.exec_op = amd_spi_exec_mem_op,
699 	.adjust_op_size = amd_spi_adjust_op_size,
700 	.supports_op = amd_spi_supports_op,
701 };
702 
703 static const struct spi_controller_mem_caps amd_spi_mem_caps = {
704 	.per_op_freq = true,
705 };
706 
amd_spi_host_transfer(struct spi_controller * host,struct spi_message * msg)707 static int amd_spi_host_transfer(struct spi_controller *host,
708 				   struct spi_message *msg)
709 {
710 	struct amd_spi *amd_spi = spi_controller_get_devdata(host);
711 	struct spi_device *spi = msg->spi;
712 
713 	amd_spi_select_chip(amd_spi, spi_get_chipselect(spi, 0));
714 
715 	/*
716 	 * Extract spi_transfers from the spi message and
717 	 * program the controller.
718 	 */
719 	return amd_spi_fifo_xfer(amd_spi, host, msg);
720 }
721 
amd_spi_max_transfer_size(struct spi_device * spi)722 static size_t amd_spi_max_transfer_size(struct spi_device *spi)
723 {
724 	return AMD_SPI_FIFO_SIZE;
725 }
726 
amd_spi_setup_hiddma(struct amd_spi * amd_spi,struct device * dev)727 static int amd_spi_setup_hiddma(struct amd_spi *amd_spi, struct device *dev)
728 {
729 	u32 hid_regval;
730 
731 	/* Allocate DMA buffer to use for HID basic read operation */
732 	amd_spi->dma_virt_addr = dma_alloc_coherent(dev, AMD_SPI_HID2_DMA_SIZE,
733 						    &amd_spi->phy_dma_buf, GFP_KERNEL);
734 	if (!amd_spi->dma_virt_addr)
735 		return -ENOMEM;
736 
737 	/*
738 	 * Enable interrupts and set mask bits in hid2_int_mask register to generate interrupt
739 	 * properly for HIDDMA basic read operations.
740 	 */
741 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_INT_MASK);
742 	hid_regval = (hid_regval & GENMASK(31, 8)) | BIT(19);
743 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_INT_MASK, hid_regval);
744 
745 	/* Configure buffer unit(4k) in hid2_control register */
746 	hid_regval = amd_spi_readreg32(amd_spi, AMD_SPI_HID2_CNTRL);
747 	amd_spi_writereg32(amd_spi, AMD_SPI_HID2_CNTRL, hid_regval & ~BIT(3));
748 
749 	return 0;
750 }
751 
amd_spi_probe(struct platform_device * pdev)752 static int amd_spi_probe(struct platform_device *pdev)
753 {
754 	struct device *dev = &pdev->dev;
755 	struct spi_controller *host;
756 	struct amd_spi *amd_spi;
757 	int err;
758 
759 	/* Allocate storage for host and driver private data */
760 	host = devm_spi_alloc_host(dev, sizeof(struct amd_spi));
761 	if (!host)
762 		return dev_err_probe(dev, -ENOMEM, "Error allocating SPI host\n");
763 
764 	amd_spi = spi_controller_get_devdata(host);
765 	amd_spi->io_remap_addr = devm_platform_ioremap_resource(pdev, 0);
766 	if (IS_ERR(amd_spi->io_remap_addr))
767 		return dev_err_probe(dev, PTR_ERR(amd_spi->io_remap_addr),
768 				     "ioremap of SPI registers failed\n");
769 
770 	dev_dbg(dev, "io_remap_address: %p\n", amd_spi->io_remap_addr);
771 
772 	amd_spi->version = (uintptr_t) device_get_match_data(dev);
773 
774 	/* Initialize the spi_controller fields */
775 	host->bus_num = (amd_spi->version == AMD_HID2_SPI) ? 2 : 0;
776 	host->num_chipselect = 4;
777 	host->mode_bits = SPI_TX_DUAL | SPI_TX_QUAD | SPI_RX_DUAL | SPI_RX_QUAD;
778 	host->flags = SPI_CONTROLLER_HALF_DUPLEX;
779 	host->max_speed_hz = AMD_SPI_MAX_HZ;
780 	host->min_speed_hz = AMD_SPI_MIN_HZ;
781 	host->setup = amd_spi_host_setup;
782 	host->transfer_one_message = amd_spi_host_transfer;
783 	host->mem_ops = &amd_spi_mem_ops;
784 	host->mem_caps = &amd_spi_mem_caps;
785 	host->max_transfer_size = amd_spi_max_transfer_size;
786 	host->max_message_size = amd_spi_max_transfer_size;
787 
788 	/* Register the controller with SPI framework */
789 	err = devm_spi_register_controller(dev, host);
790 	if (err)
791 		return dev_err_probe(dev, err, "error registering SPI controller\n");
792 
793 	if (amd_spi->version == AMD_HID2_SPI)
794 		err = amd_spi_setup_hiddma(amd_spi, dev);
795 
796 	return err;
797 }
798 
799 #ifdef CONFIG_ACPI
800 static const struct acpi_device_id spi_acpi_match[] = {
801 	{ "AMDI0061", AMD_SPI_V1 },
802 	{ "AMDI0062", AMD_SPI_V2 },
803 	{ "AMDI0063", AMD_HID2_SPI },
804 	{},
805 };
806 MODULE_DEVICE_TABLE(acpi, spi_acpi_match);
807 #endif
808 
809 static struct platform_driver amd_spi_driver = {
810 	.driver = {
811 		.name = "amd_spi",
812 		.acpi_match_table = ACPI_PTR(spi_acpi_match),
813 	},
814 	.probe = amd_spi_probe,
815 };
816 
817 module_platform_driver(amd_spi_driver);
818 
819 MODULE_LICENSE("Dual BSD/GPL");
820 MODULE_AUTHOR("Sanjay Mehta <[email protected]>");
821 MODULE_DESCRIPTION("AMD SPI Master Controller Driver");
822