1 // SPDX-License-Identifier: GPL-2.0+
2 /*
3  * LPC32xx SLC NAND flash controller driver
4  *
5  * (C) Copyright 2015-2018 Vladimir Zapolskiy <vz@mleia.com>
6  * Copyright (c) 2015 Tyco Fire Protection Products.
7  *
8  * Hardware ECC support original source code
9  * Copyright (C) 2008 by NXP Semiconductors
10  * Author: Kevin Wells
11  */
12 
13 #include <common.h>
14 #include <log.h>
15 #include <nand.h>
16 #include <linux/bug.h>
17 #include <linux/mtd/nand_ecc.h>
18 #include <linux/mtd/rawnand.h>
19 #include <linux/errno.h>
20 #include <asm/io.h>
21 #include <asm/arch/config.h>
22 #include <asm/arch/clk.h>
23 #include <asm/arch/sys_proto.h>
24 #include <asm/arch/dma.h>
25 #include <asm/arch/cpu.h>
26 
27 struct lpc32xx_nand_slc_regs {
28 	u32 data;
29 	u32 addr;
30 	u32 cmd;
31 	u32 stop;
32 	u32 ctrl;
33 	u32 cfg;
34 	u32 stat;
35 	u32 int_stat;
36 	u32 ien;
37 	u32 isr;
38 	u32 icr;
39 	u32 tac;
40 	u32 tc;
41 	u32 ecc;
42 	u32 dma_data;
43 };
44 
45 /* CFG register */
46 #define CFG_CE_LOW		(1 << 5)
47 #define CFG_DMA_ECC		(1 << 4) /* Enable DMA ECC bit */
48 #define CFG_ECC_EN		(1 << 3) /* ECC enable bit */
49 #define CFG_DMA_BURST		(1 << 2) /* DMA burst bit */
50 #define CFG_DMA_DIR		(1 << 1) /* DMA write(0)/read(1) bit */
51 
52 /* CTRL register */
53 #define CTRL_SW_RESET		(1 << 2)
54 #define CTRL_ECC_CLEAR		(1 << 1) /* Reset ECC bit */
55 #define CTRL_DMA_START		(1 << 0) /* Start DMA channel bit */
56 
57 /* STAT register */
58 #define STAT_DMA_FIFO		(1 << 2) /* DMA FIFO has data bit */
59 #define STAT_NAND_READY		(1 << 0)
60 
61 /* INT_STAT register */
62 #define INT_STAT_TC		(1 << 1)
63 #define INT_STAT_RDY		(1 << 0)
64 
65 /* TAC register bits, be aware of overflows */
66 #define TAC_W_RDY(n)		(max_t(uint32_t, (n), 0xF) << 28)
67 #define TAC_W_WIDTH(n)		(max_t(uint32_t, (n), 0xF) << 24)
68 #define TAC_W_HOLD(n)		(max_t(uint32_t, (n), 0xF) << 20)
69 #define TAC_W_SETUP(n)		(max_t(uint32_t, (n), 0xF) << 16)
70 #define TAC_R_RDY(n)		(max_t(uint32_t, (n), 0xF) << 12)
71 #define TAC_R_WIDTH(n)		(max_t(uint32_t, (n), 0xF) << 8)
72 #define TAC_R_HOLD(n)		(max_t(uint32_t, (n), 0xF) << 4)
73 #define TAC_R_SETUP(n)		(max_t(uint32_t, (n), 0xF) << 0)
74 
75 /* NAND ECC Layout for small page NAND devices
76  * Note: For large page devices, the default layouts are used. */
77 static struct nand_ecclayout lpc32xx_nand_oob_16 = {
78 	.eccbytes = 6,
79 	.eccpos = { 10, 11, 12, 13, 14, 15, },
80 	.oobfree = {
81 		{ .offset = 0, .length = 4, },
82 		{ .offset = 6, .length = 4, },
83 	}
84 };
85 
86 #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
87 #define ECCSTEPS	(CONFIG_SYS_NAND_PAGE_SIZE / CONFIG_SYS_NAND_ECCSIZE)
88 
89 /*
90  * DMA Descriptors
91  * For Large Block: 17 descriptors = ((16 Data and ECC Read) + 1 Spare Area)
92  * For Small Block: 5 descriptors = ((4 Data and ECC Read) + 1 Spare Area)
93  */
94 static struct lpc32xx_dmac_ll dmalist[ECCSTEPS * 2 + 1];
95 static u32 ecc_buffer[8]; /* MAX ECC size */
96 static unsigned int dmachan = (unsigned int)-1; /* Invalid channel */
97 
98 /*
99  * Helper macro for the DMA client (i.e. NAND SLC):
100  * - to write the next DMA linked list item address
101  *   (see arch/include/asm/arch-lpc32xx/dma.h).
102  * - to assign the DMA data register to DMA source or destination address.
103  * - to assign the ECC register to DMA source or destination address.
104  */
105 #define lpc32xx_dmac_next_lli(x)	((u32)x)
106 #define lpc32xx_dmac_set_dma_data()	((u32)&lpc32xx_nand_slc_regs->dma_data)
107 #define lpc32xx_dmac_set_ecc()		((u32)&lpc32xx_nand_slc_regs->ecc)
108 #endif
109 
110 static struct lpc32xx_nand_slc_regs __iomem *lpc32xx_nand_slc_regs
111 	= (struct lpc32xx_nand_slc_regs __iomem *)SLC_NAND_BASE;
112 
lpc32xx_nand_init(void)113 static void lpc32xx_nand_init(void)
114 {
115 	uint32_t hclk = get_hclk_clk_rate();
116 
117 	/* Reset SLC NAND controller */
118 	writel(CTRL_SW_RESET, &lpc32xx_nand_slc_regs->ctrl);
119 
120 	/* 8-bit bus, no DMA, no ECC, ordinary CE signal */
121 	writel(0, &lpc32xx_nand_slc_regs->cfg);
122 
123 	/* Interrupts disabled and cleared */
124 	writel(0, &lpc32xx_nand_slc_regs->ien);
125 	writel(INT_STAT_TC | INT_STAT_RDY,
126 	       &lpc32xx_nand_slc_regs->icr);
127 
128 	/* Configure NAND flash timings */
129 	writel(TAC_W_RDY(CONFIG_LPC32XX_NAND_SLC_WDR_CLKS) |
130 	       TAC_W_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_WWIDTH) |
131 	       TAC_W_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_WHOLD) |
132 	       TAC_W_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_WSETUP) |
133 	       TAC_R_RDY(CONFIG_LPC32XX_NAND_SLC_RDR_CLKS) |
134 	       TAC_R_WIDTH(hclk / CONFIG_LPC32XX_NAND_SLC_RWIDTH) |
135 	       TAC_R_HOLD(hclk / CONFIG_LPC32XX_NAND_SLC_RHOLD) |
136 	       TAC_R_SETUP(hclk / CONFIG_LPC32XX_NAND_SLC_RSETUP),
137 	       &lpc32xx_nand_slc_regs->tac);
138 }
139 
lpc32xx_nand_cmd_ctrl(struct mtd_info * mtd,int cmd,unsigned int ctrl)140 static void lpc32xx_nand_cmd_ctrl(struct mtd_info *mtd,
141 				  int cmd, unsigned int ctrl)
142 {
143 	debug("ctrl: 0x%08x, cmd: 0x%08x\n", ctrl, cmd);
144 
145 	if (ctrl & NAND_NCE)
146 		setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
147 	else
148 		clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_CE_LOW);
149 
150 	if (cmd == NAND_CMD_NONE)
151 		return;
152 
153 	if (ctrl & NAND_CLE)
154 		writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->cmd);
155 	else if (ctrl & NAND_ALE)
156 		writel(cmd & 0xFF, &lpc32xx_nand_slc_regs->addr);
157 }
158 
lpc32xx_nand_dev_ready(struct mtd_info * mtd)159 static int lpc32xx_nand_dev_ready(struct mtd_info *mtd)
160 {
161 	return readl(&lpc32xx_nand_slc_regs->stat) & STAT_NAND_READY;
162 }
163 
164 #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
165 /*
166  * Prepares DMA descriptors for NAND RD/WR operations
167  * If the size is < 256 Bytes then it is assumed to be
168  * an OOB transfer
169  */
lpc32xx_nand_dma_configure(struct nand_chip * chip,const u8 * buffer,int size,int read)170 static void lpc32xx_nand_dma_configure(struct nand_chip *chip,
171 				       const u8 *buffer, int size,
172 				       int read)
173 {
174 	u32 i, dmasrc, ctrl, ecc_ctrl, oob_ctrl, dmadst;
175 	struct lpc32xx_dmac_ll *dmalist_cur;
176 	struct lpc32xx_dmac_ll *dmalist_cur_ecc;
177 
178 	/*
179 	 * CTRL descriptor entry for reading ECC
180 	 * Copy Multiple times to sync DMA with Flash Controller
181 	 */
182 	ecc_ctrl = 0x5 |
183 			DMAC_CHAN_SRC_BURST_1 |
184 			DMAC_CHAN_DEST_BURST_1 |
185 			DMAC_CHAN_SRC_WIDTH_32 |
186 			DMAC_CHAN_DEST_WIDTH_32 |
187 			DMAC_CHAN_DEST_AHB1;
188 
189 	/* CTRL descriptor entry for reading/writing Data */
190 	ctrl = (CONFIG_SYS_NAND_ECCSIZE / 4) |
191 			DMAC_CHAN_SRC_BURST_4 |
192 			DMAC_CHAN_DEST_BURST_4 |
193 			DMAC_CHAN_SRC_WIDTH_32 |
194 			DMAC_CHAN_DEST_WIDTH_32 |
195 			DMAC_CHAN_DEST_AHB1;
196 
197 	/* CTRL descriptor entry for reading/writing Spare Area */
198 	oob_ctrl = (CONFIG_SYS_NAND_OOBSIZE / 4) |
199 			DMAC_CHAN_SRC_BURST_4 |
200 			DMAC_CHAN_DEST_BURST_4 |
201 			DMAC_CHAN_SRC_WIDTH_32 |
202 			DMAC_CHAN_DEST_WIDTH_32 |
203 			DMAC_CHAN_DEST_AHB1;
204 
205 	if (read) {
206 		dmasrc = lpc32xx_dmac_set_dma_data();
207 		dmadst = (u32)buffer;
208 		ctrl |= DMAC_CHAN_DEST_AUTOINC;
209 	} else {
210 		dmadst = lpc32xx_dmac_set_dma_data();
211 		dmasrc = (u32)buffer;
212 		ctrl |= DMAC_CHAN_SRC_AUTOINC;
213 	}
214 
215 	/*
216 	 * Write Operation Sequence for Small Block NAND
217 	 * ----------------------------------------------------------
218 	 * 1. X'fer 256 bytes of data from Memory to Flash.
219 	 * 2. Copy generated ECC data from Register to Spare Area
220 	 * 3. X'fer next 256 bytes of data from Memory to Flash.
221 	 * 4. Copy generated ECC data from Register to Spare Area.
222 	 * 5. X'fer 16 byets of Spare area from Memory to Flash.
223 	 * Read Operation Sequence for Small Block NAND
224 	 * ----------------------------------------------------------
225 	 * 1. X'fer 256 bytes of data from Flash to Memory.
226 	 * 2. Copy generated ECC data from Register to ECC calc Buffer.
227 	 * 3. X'fer next 256 bytes of data from Flash to Memory.
228 	 * 4. Copy generated ECC data from Register to ECC calc Buffer.
229 	 * 5. X'fer 16 bytes of Spare area from Flash to Memory.
230 	 * Write Operation Sequence for Large Block NAND
231 	 * ----------------------------------------------------------
232 	 * 1. Steps(1-4) of Write Operations repeate for four times
233 	 * which generates 16 DMA descriptors to X'fer 2048 bytes of
234 	 * data & 32 bytes of ECC data.
235 	 * 2. X'fer 64 bytes of Spare area from Memory to Flash.
236 	 * Read Operation Sequence for Large Block NAND
237 	 * ----------------------------------------------------------
238 	 * 1. Steps(1-4) of Read Operations repeate for four times
239 	 * which generates 16 DMA descriptors to X'fer 2048 bytes of
240 	 * data & 32 bytes of ECC data.
241 	 * 2. X'fer 64 bytes of Spare area from Flash to Memory.
242 	 */
243 
244 	for (i = 0; i < size/CONFIG_SYS_NAND_ECCSIZE; i++) {
245 		dmalist_cur = &dmalist[i * 2];
246 		dmalist_cur_ecc = &dmalist[(i * 2) + 1];
247 
248 		dmalist_cur->dma_src = (read ? (dmasrc) : (dmasrc + (i*256)));
249 		dmalist_cur->dma_dest = (read ? (dmadst + (i*256)) : dmadst);
250 		dmalist_cur->next_lli = lpc32xx_dmac_next_lli(dmalist_cur_ecc);
251 		dmalist_cur->next_ctrl = ctrl;
252 
253 		dmalist_cur_ecc->dma_src = lpc32xx_dmac_set_ecc();
254 		dmalist_cur_ecc->dma_dest = (u32)&ecc_buffer[i];
255 		dmalist_cur_ecc->next_lli =
256 			lpc32xx_dmac_next_lli(&dmalist[(i * 2) + 2]);
257 		dmalist_cur_ecc->next_ctrl = ecc_ctrl;
258 	}
259 
260 	if (i) { /* Data only transfer */
261 		dmalist_cur_ecc = &dmalist[(i * 2) - 1];
262 		dmalist_cur_ecc->next_lli = 0;
263 		dmalist_cur_ecc->next_ctrl |= DMAC_CHAN_INT_TC_EN;
264 		return;
265 	}
266 
267 	/* OOB only transfer */
268 	if (read) {
269 		dmasrc = lpc32xx_dmac_set_dma_data();
270 		dmadst = (u32)buffer;
271 		oob_ctrl |= DMAC_CHAN_DEST_AUTOINC;
272 	} else {
273 		dmadst = lpc32xx_dmac_set_dma_data();
274 		dmasrc = (u32)buffer;
275 		oob_ctrl |= DMAC_CHAN_SRC_AUTOINC;
276 	}
277 
278 	/* Read/ Write Spare Area Data To/From Flash */
279 	dmalist_cur = &dmalist[i * 2];
280 	dmalist_cur->dma_src = dmasrc;
281 	dmalist_cur->dma_dest = dmadst;
282 	dmalist_cur->next_lli = 0;
283 	dmalist_cur->next_ctrl = (oob_ctrl | DMAC_CHAN_INT_TC_EN);
284 }
285 
lpc32xx_nand_xfer(struct mtd_info * mtd,const u8 * buf,int len,int read)286 static void lpc32xx_nand_xfer(struct mtd_info *mtd, const u8 *buf,
287 			      int len, int read)
288 {
289 	struct nand_chip *chip = mtd_to_nand(mtd);
290 	u32 config;
291 	int ret;
292 
293 	/* DMA Channel Configuration */
294 	config = (read ? DMAC_CHAN_FLOW_D_P2M : DMAC_CHAN_FLOW_D_M2P) |
295 		(read ? DMAC_DEST_PERIP(0) : DMAC_DEST_PERIP(DMA_PERID_NAND1)) |
296 		(read ? DMAC_SRC_PERIP(DMA_PERID_NAND1) : DMAC_SRC_PERIP(0)) |
297 		DMAC_CHAN_ENABLE;
298 
299 	/* Prepare DMA descriptors */
300 	lpc32xx_nand_dma_configure(chip, buf, len, read);
301 
302 	/* Setup SLC controller and start transfer */
303 	if (read)
304 		setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
305 	else  /* NAND_ECC_WRITE */
306 		clrbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_DIR);
307 	setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_DMA_BURST);
308 
309 	/* Write length for new transfers */
310 	if (!((readl(&lpc32xx_nand_slc_regs->stat) & STAT_DMA_FIFO) |
311 	      readl(&lpc32xx_nand_slc_regs->tc))) {
312 		int tmp = (len != mtd->oobsize) ? mtd->oobsize : 0;
313 		writel(len + tmp, &lpc32xx_nand_slc_regs->tc);
314 	}
315 
316 	setbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
317 
318 	/* Start DMA transfers */
319 	ret = lpc32xx_dma_start_xfer(dmachan, dmalist, config);
320 	if (unlikely(ret < 0))
321 		BUG();
322 
323 	/* Wait for NAND to be ready */
324 	while (!lpc32xx_nand_dev_ready(mtd))
325 		;
326 
327 	/* Wait till DMA transfer is DONE */
328 	if (lpc32xx_dma_wait_status(dmachan))
329 		pr_err("NAND DMA transfer error!\r\n");
330 
331 	/* Stop DMA & HW ECC */
332 	clrbits_le32(&lpc32xx_nand_slc_regs->ctrl, CTRL_DMA_START);
333 	clrbits_le32(&lpc32xx_nand_slc_regs->cfg,
334 		     CFG_DMA_DIR | CFG_DMA_BURST | CFG_ECC_EN | CFG_DMA_ECC);
335 }
336 
slc_ecc_copy_to_buffer(u8 * spare,const u32 * ecc,int count)337 static u32 slc_ecc_copy_to_buffer(u8 *spare, const u32 *ecc, int count)
338 {
339 	int i;
340 	for (i = 0; i < (count * CONFIG_SYS_NAND_ECCBYTES);
341 	     i += CONFIG_SYS_NAND_ECCBYTES) {
342 		u32 ce = ecc[i / CONFIG_SYS_NAND_ECCBYTES];
343 		ce = ~(ce << 2) & 0xFFFFFF;
344 		spare[i+2] = (u8)(ce & 0xFF); ce >>= 8;
345 		spare[i+1] = (u8)(ce & 0xFF); ce >>= 8;
346 		spare[i]   = (u8)(ce & 0xFF);
347 	}
348 	return 0;
349 }
350 
lpc32xx_ecc_calculate(struct mtd_info * mtd,const uint8_t * dat,uint8_t * ecc_code)351 static int lpc32xx_ecc_calculate(struct mtd_info *mtd, const uint8_t *dat,
352 				 uint8_t *ecc_code)
353 {
354 	return slc_ecc_copy_to_buffer(ecc_code, ecc_buffer, ECCSTEPS);
355 }
356 
357 /*
358  * Enables and prepares SLC NAND controller
359  * for doing data transfers with H/W ECC enabled.
360  */
lpc32xx_hwecc_enable(struct mtd_info * mtd,int mode)361 static void lpc32xx_hwecc_enable(struct mtd_info *mtd, int mode)
362 {
363 	/* Clear ECC */
364 	writel(CTRL_ECC_CLEAR, &lpc32xx_nand_slc_regs->ctrl);
365 
366 	/* Setup SLC controller for H/W ECC operations */
367 	setbits_le32(&lpc32xx_nand_slc_regs->cfg, CFG_ECC_EN | CFG_DMA_ECC);
368 }
369 
370 /*
371  * lpc32xx_correct_data - [NAND Interface] Detect and correct bit error(s)
372  * mtd:	MTD block structure
373  * dat:	raw data read from the chip
374  * read_ecc:	ECC from the chip
375  * calc_ecc:	the ECC calculated from raw data
376  *
377  * Detect and correct a 1 bit error for 256 byte block
378  */
lpc32xx_correct_data(struct mtd_info * mtd,u_char * dat,u_char * read_ecc,u_char * calc_ecc)379 int lpc32xx_correct_data(struct mtd_info *mtd, u_char *dat,
380 			 u_char *read_ecc, u_char *calc_ecc)
381 {
382 	unsigned int i;
383 	int ret1, ret2 = 0;
384 	u_char *r = read_ecc;
385 	u_char *c = calc_ecc;
386 	u16 data_offset = 0;
387 
388 	for (i = 0 ; i < ECCSTEPS ; i++) {
389 		r += CONFIG_SYS_NAND_ECCBYTES;
390 		c += CONFIG_SYS_NAND_ECCBYTES;
391 		data_offset += CONFIG_SYS_NAND_ECCSIZE;
392 
393 		ret1 = nand_correct_data(mtd, dat + data_offset, r, c);
394 		if (ret1 < 0)
395 			return -EBADMSG;
396 		else
397 			ret2 += ret1;
398 	}
399 
400 	return ret2;
401 }
402 
lpc32xx_dma_read_buf(struct mtd_info * mtd,uint8_t * buf,int len)403 static void lpc32xx_dma_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
404 {
405 	lpc32xx_nand_xfer(mtd, buf, len, 1);
406 }
407 
lpc32xx_dma_write_buf(struct mtd_info * mtd,const uint8_t * buf,int len)408 static void lpc32xx_dma_write_buf(struct mtd_info *mtd, const uint8_t *buf,
409 				  int len)
410 {
411 	lpc32xx_nand_xfer(mtd, buf, len, 0);
412 }
413 
414 /* Reuse the logic from "nand_read_page_hwecc()" */
lpc32xx_read_page_hwecc(struct mtd_info * mtd,struct nand_chip * chip,uint8_t * buf,int oob_required,int page)415 static int lpc32xx_read_page_hwecc(struct mtd_info *mtd, struct nand_chip *chip,
416 				uint8_t *buf, int oob_required, int page)
417 {
418 	int i;
419 	int stat;
420 	uint8_t *p = buf;
421 	uint8_t *ecc_calc = chip->buffers->ecccalc;
422 	uint8_t *ecc_code = chip->buffers->ecccode;
423 	uint32_t *eccpos = chip->ecc.layout->eccpos;
424 	unsigned int max_bitflips = 0;
425 
426 	/*
427 	 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
428 	 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
429 	 * of a page size using DMA controller scatter/gather mode through
430 	 * linked list; the ECC read is done without any software intervention.
431 	 */
432 
433 	lpc32xx_hwecc_enable(mtd, NAND_ECC_READ);
434 	lpc32xx_dma_read_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
435 	lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
436 	lpc32xx_dma_read_buf(mtd, chip->oob_poi, mtd->oobsize);
437 
438 	for (i = 0; i < chip->ecc.total; i++)
439 		ecc_code[i] = chip->oob_poi[eccpos[i]];
440 
441 	stat = chip->ecc.correct(mtd, p, &ecc_code[0], &ecc_calc[0]);
442 	if (stat < 0)
443 		mtd->ecc_stats.failed++;
444 	else {
445 		mtd->ecc_stats.corrected += stat;
446 		max_bitflips = max_t(unsigned int, max_bitflips, stat);
447 	}
448 
449 	return max_bitflips;
450 }
451 
452 /* Reuse the logic from "nand_write_page_hwecc()" */
lpc32xx_write_page_hwecc(struct mtd_info * mtd,struct nand_chip * chip,const uint8_t * buf,int oob_required,int page)453 static int lpc32xx_write_page_hwecc(struct mtd_info *mtd,
454 				    struct nand_chip *chip,
455 				    const uint8_t *buf, int oob_required,
456 				    int page)
457 {
458 	int i;
459 	uint8_t *ecc_calc = chip->buffers->ecccalc;
460 	const uint8_t *p = buf;
461 	uint32_t *eccpos = chip->ecc.layout->eccpos;
462 
463 	/*
464 	 * As per the "LPC32x0 and LPC32x0/01 User manual" table 173 notes
465 	 * and section 9.7, the NAND SLC & DMA allowed single DMA transaction
466 	 * of a page size using DMA controller scatter/gather mode through
467 	 * linked list; the ECC read is done without any software intervention.
468 	 */
469 
470 	lpc32xx_hwecc_enable(mtd, NAND_ECC_WRITE);
471 	lpc32xx_dma_write_buf(mtd, p, chip->ecc.size * chip->ecc.steps);
472 	lpc32xx_ecc_calculate(mtd, p, &ecc_calc[0]);
473 
474 	for (i = 0; i < chip->ecc.total; i++)
475 		chip->oob_poi[eccpos[i]] = ecc_calc[i];
476 
477 	lpc32xx_dma_write_buf(mtd, chip->oob_poi, mtd->oobsize);
478 
479 	return 0;
480 }
481 #else
lpc32xx_read_buf(struct mtd_info * mtd,uint8_t * buf,int len)482 static void lpc32xx_read_buf(struct mtd_info *mtd, uint8_t *buf, int len)
483 {
484 	while (len-- > 0)
485 		*buf++ = readl(&lpc32xx_nand_slc_regs->data);
486 }
487 
lpc32xx_write_buf(struct mtd_info * mtd,const uint8_t * buf,int len)488 static void lpc32xx_write_buf(struct mtd_info *mtd, const uint8_t *buf, int len)
489 {
490 	while (len-- > 0)
491 		writel(*buf++, &lpc32xx_nand_slc_regs->data);
492 }
493 #endif
494 
lpc32xx_read_byte(struct mtd_info * mtd)495 static uint8_t lpc32xx_read_byte(struct mtd_info *mtd)
496 {
497 	return readl(&lpc32xx_nand_slc_regs->data);
498 }
499 
lpc32xx_write_byte(struct mtd_info * mtd,uint8_t byte)500 static void lpc32xx_write_byte(struct mtd_info *mtd, uint8_t byte)
501 {
502 	writel(byte, &lpc32xx_nand_slc_regs->data);
503 }
504 
505 /*
506  * LPC32xx has only one SLC NAND controller, don't utilize
507  * CONFIG_SYS_NAND_SELF_INIT to be able to reuse this function
508  * both in SPL NAND and U-Boot images.
509  */
board_nand_init(struct nand_chip * lpc32xx_chip)510 int board_nand_init(struct nand_chip *lpc32xx_chip)
511 {
512 #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
513 	int ret;
514 
515 	/* Acquire a channel for our use */
516 	ret = lpc32xx_dma_get_channel();
517 	if (unlikely(ret < 0)) {
518 		pr_info("Unable to get free DMA channel for NAND transfers\n");
519 		return -1;
520 	}
521 	dmachan = (unsigned int)ret;
522 #endif
523 
524 	lpc32xx_chip->cmd_ctrl  = lpc32xx_nand_cmd_ctrl;
525 	lpc32xx_chip->dev_ready = lpc32xx_nand_dev_ready;
526 
527 	/*
528 	 * The implementation of these functions is quite common, but
529 	 * they MUST be defined, because access to data register
530 	 * is strictly 32-bit aligned.
531 	 */
532 	lpc32xx_chip->read_byte  = lpc32xx_read_byte;
533 	lpc32xx_chip->write_byte = lpc32xx_write_byte;
534 
535 #if defined(CONFIG_DMA_LPC32XX) && !defined(CONFIG_SPL_BUILD)
536 	/* Hardware ECC calculation is supported when DMA driver is selected */
537 	lpc32xx_chip->ecc.mode		= NAND_ECC_HW;
538 
539 	lpc32xx_chip->read_buf		= lpc32xx_dma_read_buf;
540 	lpc32xx_chip->write_buf		= lpc32xx_dma_write_buf;
541 
542 	lpc32xx_chip->ecc.calculate	= lpc32xx_ecc_calculate;
543 	lpc32xx_chip->ecc.correct	= lpc32xx_correct_data;
544 	lpc32xx_chip->ecc.hwctl		= lpc32xx_hwecc_enable;
545 	lpc32xx_chip->chip_delay	= 2000;
546 
547 	lpc32xx_chip->ecc.read_page	= lpc32xx_read_page_hwecc;
548 	lpc32xx_chip->ecc.write_page	= lpc32xx_write_page_hwecc;
549 	lpc32xx_chip->options		|= NAND_NO_SUBPAGE_WRITE;
550 #else
551 	/*
552 	 * Hardware ECC calculation is not supported by the driver,
553 	 * because it requires DMA support, see LPC32x0 User Manual,
554 	 * note after SLC_ECC register description (UM10326, p.198)
555 	 */
556 	lpc32xx_chip->ecc.mode = NAND_ECC_SOFT;
557 
558 	/*
559 	 * The implementation of these functions is quite common, but
560 	 * they MUST be defined, because access to data register
561 	 * is strictly 32-bit aligned.
562 	 */
563 	lpc32xx_chip->read_buf   = lpc32xx_read_buf;
564 	lpc32xx_chip->write_buf  = lpc32xx_write_buf;
565 #endif
566 
567 	/*
568 	 * These values are predefined
569 	 * for both small and large page NAND flash devices.
570 	 */
571 	lpc32xx_chip->ecc.size     = CONFIG_SYS_NAND_ECCSIZE;
572 	lpc32xx_chip->ecc.bytes    = CONFIG_SYS_NAND_ECCBYTES;
573 	lpc32xx_chip->ecc.strength = 1;
574 
575 	if (CONFIG_SYS_NAND_PAGE_SIZE != NAND_LARGE_BLOCK_PAGE_SIZE)
576 		lpc32xx_chip->ecc.layout = &lpc32xx_nand_oob_16;
577 
578 #if defined(CONFIG_SYS_NAND_USE_FLASH_BBT)
579 	lpc32xx_chip->bbt_options |= NAND_BBT_USE_FLASH;
580 #endif
581 
582 	/* Initialize NAND interface */
583 	lpc32xx_nand_init();
584 
585 	return 0;
586 }
587