Lines Matching +full:no +full:- +full:mmc

1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * davinci_mmc.c - TI DaVinci MMC/SD/SDIO driver
13 #include <linux/dma-mapping.h>
20 #include <linux/mmc/host.h>
21 #include <linux/mmc/mmc.h>
22 #include <linux/mmc/slot-gpio.h>
24 #include <linux/platform_data/mmc-davinci.h>
36 #define DAVINCI_MMCTOR 0x14 /* Response Time-Out Register */
37 #define DAVINCI_MMCTOD 0x18 /* Data Read Time-Out Register */
143 #define MAX_CCNT ((1 << 16) - 1)
169 struct mmc_host *mmc; member
195 /* Version of the MMC/SD controller */
212 struct sg_mapping_iter *sgm = &host->sg_miter; in davinci_fifo_data_trans()
217 * By adjusting sgm->consumed this will give a pointer to the in davinci_fifo_data_trans()
221 dev_err(mmc_dev(host->mmc), "ran out of sglist prematurely\n"); in davinci_fifo_data_trans()
224 p = sgm->addr; in davinci_fifo_data_trans()
226 if (n > sgm->length) in davinci_fifo_data_trans()
227 n = sgm->length; in davinci_fifo_data_trans()
230 * to/from the fifo here; there's no I/O overlap. in davinci_fifo_data_trans()
233 if (host->data_dir == DAVINCI_MMC_DATADIR_WRITE) { in davinci_fifo_data_trans()
235 writel(*((u32 *)p), host->base + DAVINCI_MMCDXR); in davinci_fifo_data_trans()
239 iowrite8_rep(host->base + DAVINCI_MMCDXR, p, (n & 3)); in davinci_fifo_data_trans()
244 *((u32 *)p) = readl(host->base + DAVINCI_MMCDRR); in davinci_fifo_data_trans()
248 ioread8_rep(host->base + DAVINCI_MMCDRR, p, (n & 3)); in davinci_fifo_data_trans()
253 sgm->consumed = n; in davinci_fifo_data_trans()
254 host->bytes_left -= n; in davinci_fifo_data_trans()
263 dev_dbg(mmc_dev(host->mmc), "CMD%d, arg 0x%08x%s\n", in mmc_davinci_start_command()
264 cmd->opcode, cmd->arg, in mmc_davinci_start_command()
283 host->cmd = cmd; in mmc_davinci_start_command()
299 case MMC_RSP_R3: /* 48 bits, no CRC */ in mmc_davinci_start_command()
304 dev_dbg(mmc_dev(host->mmc), "unknown resp_type %04x\n", in mmc_davinci_start_command()
310 cmd_reg |= cmd->opcode; in mmc_davinci_start_command()
313 if (host->do_dma) in mmc_davinci_start_command()
316 if (host->version == MMC_CTLR_VERSION_2 && host->data != NULL && in mmc_davinci_start_command()
317 host->data_dir == DAVINCI_MMC_DATADIR_READ) in mmc_davinci_start_command()
321 if (cmd->data) in mmc_davinci_start_command()
325 if (host->data_dir == DAVINCI_MMC_DATADIR_WRITE) in mmc_davinci_start_command()
328 if (host->bus_mode == MMC_BUSMODE_PUSHPULL) in mmc_davinci_start_command()
332 writel(0x1FFF, host->base + DAVINCI_MMCTOR); in mmc_davinci_start_command()
336 if (host->data_dir == DAVINCI_MMC_DATADIR_WRITE) { in mmc_davinci_start_command()
339 if (!host->do_dma) in mmc_davinci_start_command()
341 } else if (host->data_dir == DAVINCI_MMC_DATADIR_READ) { in mmc_davinci_start_command()
344 if (!host->do_dma) in mmc_davinci_start_command()
349 * Before non-DMA WRITE commands the controller needs priming: in mmc_davinci_start_command()
352 if (!host->do_dma && (host->data_dir == DAVINCI_MMC_DATADIR_WRITE)) in mmc_davinci_start_command()
355 writel(cmd->arg, host->base + DAVINCI_MMCARGHL); in mmc_davinci_start_command()
356 writel(cmd_reg, host->base + DAVINCI_MMCCMD); in mmc_davinci_start_command()
358 host->active_request = true; in mmc_davinci_start_command()
360 if (!host->do_dma && host->bytes_left <= poll_threshold) { in mmc_davinci_start_command()
363 while (host->active_request && count--) { in mmc_davinci_start_command()
369 if (host->active_request) in mmc_davinci_start_command()
370 writel(im_val, host->base + DAVINCI_MMCIM); in mmc_davinci_start_command()
373 /*----------------------------------------------------------------------*/
381 if (host->data_dir == DAVINCI_MMC_DATADIR_READ) in davinci_abort_dma()
382 sync_dev = host->dma_rx; in davinci_abort_dma()
384 sync_dev = host->dma_tx; in davinci_abort_dma()
396 if (host->data_dir == DAVINCI_MMC_DATADIR_WRITE) { in mmc_davinci_send_dma_request()
399 .dst_addr = host->mem_res->start + DAVINCI_MMCDXR, in mmc_davinci_send_dma_request()
404 chan = host->dma_tx; in mmc_davinci_send_dma_request()
405 dmaengine_slave_config(host->dma_tx, &dma_tx_conf); in mmc_davinci_send_dma_request()
407 desc = dmaengine_prep_slave_sg(host->dma_tx, in mmc_davinci_send_dma_request()
408 data->sg, in mmc_davinci_send_dma_request()
409 host->sg_len, in mmc_davinci_send_dma_request()
413 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_send_dma_request()
415 ret = -1; in mmc_davinci_send_dma_request()
421 .src_addr = host->mem_res->start + DAVINCI_MMCDRR, in mmc_davinci_send_dma_request()
426 chan = host->dma_rx; in mmc_davinci_send_dma_request()
427 dmaengine_slave_config(host->dma_rx, &dma_rx_conf); in mmc_davinci_send_dma_request()
429 desc = dmaengine_prep_slave_sg(host->dma_rx, in mmc_davinci_send_dma_request()
430 data->sg, in mmc_davinci_send_dma_request()
431 host->sg_len, in mmc_davinci_send_dma_request()
435 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_send_dma_request()
437 ret = -1; in mmc_davinci_send_dma_request()
453 int mask = rw_threshold - 1; in mmc_davinci_start_dma_transfer()
456 host->sg_len = dma_map_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in mmc_davinci_start_dma_transfer()
459 /* no individual DMA segment should need a partial FIFO */ in mmc_davinci_start_dma_transfer()
460 for (i = 0; i < host->sg_len; i++) { in mmc_davinci_start_dma_transfer()
461 if (sg_dma_len(data->sg + i) & mask) { in mmc_davinci_start_dma_transfer()
462 dma_unmap_sg(mmc_dev(host->mmc), in mmc_davinci_start_dma_transfer()
463 data->sg, data->sg_len, in mmc_davinci_start_dma_transfer()
465 return -1; in mmc_davinci_start_dma_transfer()
469 host->do_dma = 1; in mmc_davinci_start_dma_transfer()
477 if (!host->use_dma) in davinci_release_dma_channels()
480 dma_release_channel(host->dma_tx); in davinci_release_dma_channels()
481 dma_release_channel(host->dma_rx); in davinci_release_dma_channels()
486 host->dma_tx = dma_request_chan(mmc_dev(host->mmc), "tx"); in davinci_acquire_dma_channels()
487 if (IS_ERR(host->dma_tx)) { in davinci_acquire_dma_channels()
488 dev_err(mmc_dev(host->mmc), "Can't get dma_tx channel\n"); in davinci_acquire_dma_channels()
489 return PTR_ERR(host->dma_tx); in davinci_acquire_dma_channels()
492 host->dma_rx = dma_request_chan(mmc_dev(host->mmc), "rx"); in davinci_acquire_dma_channels()
493 if (IS_ERR(host->dma_rx)) { in davinci_acquire_dma_channels()
494 dev_err(mmc_dev(host->mmc), "Can't get dma_rx channel\n"); in davinci_acquire_dma_channels()
495 dma_release_channel(host->dma_tx); in davinci_acquire_dma_channels()
496 return PTR_ERR(host->dma_rx); in davinci_acquire_dma_channels()
502 /*----------------------------------------------------------------------*/
509 struct mmc_data *data = req->data; in mmc_davinci_prepare_data()
512 if (host->version == MMC_CTLR_VERSION_2) in mmc_davinci_prepare_data()
515 host->data = data; in mmc_davinci_prepare_data()
517 host->data_dir = DAVINCI_MMC_DATADIR_NONE; in mmc_davinci_prepare_data()
518 writel(0, host->base + DAVINCI_MMCBLEN); in mmc_davinci_prepare_data()
519 writel(0, host->base + DAVINCI_MMCNBLK); in mmc_davinci_prepare_data()
523 dev_dbg(mmc_dev(host->mmc), "%s, %d blocks of %d bytes\n", in mmc_davinci_prepare_data()
524 (data->flags & MMC_DATA_WRITE) ? "write" : "read", in mmc_davinci_prepare_data()
525 data->blocks, data->blksz); in mmc_davinci_prepare_data()
526 dev_dbg(mmc_dev(host->mmc), " DTO %d cycles + %d ns\n", in mmc_davinci_prepare_data()
527 data->timeout_clks, data->timeout_ns); in mmc_davinci_prepare_data()
528 timeout = data->timeout_clks + in mmc_davinci_prepare_data()
529 (data->timeout_ns / host->ns_in_one_cycle); in mmc_davinci_prepare_data()
533 writel(timeout, host->base + DAVINCI_MMCTOD); in mmc_davinci_prepare_data()
534 writel(data->blocks, host->base + DAVINCI_MMCNBLK); in mmc_davinci_prepare_data()
535 writel(data->blksz, host->base + DAVINCI_MMCBLEN); in mmc_davinci_prepare_data()
538 if (data->flags & MMC_DATA_WRITE) { in mmc_davinci_prepare_data()
540 host->data_dir = DAVINCI_MMC_DATADIR_WRITE; in mmc_davinci_prepare_data()
542 host->base + DAVINCI_MMCFIFOCTL); in mmc_davinci_prepare_data()
544 host->base + DAVINCI_MMCFIFOCTL); in mmc_davinci_prepare_data()
547 host->data_dir = DAVINCI_MMC_DATADIR_READ; in mmc_davinci_prepare_data()
549 host->base + DAVINCI_MMCFIFOCTL); in mmc_davinci_prepare_data()
551 host->base + DAVINCI_MMCFIFOCTL); in mmc_davinci_prepare_data()
554 host->bytes_left = data->blocks * data->blksz; in mmc_davinci_prepare_data()
564 if (host->use_dma && (host->bytes_left & (rw_threshold - 1)) == 0 in mmc_davinci_prepare_data()
566 /* zero this to ensure we take no PIO paths */ in mmc_davinci_prepare_data()
567 host->bytes_left = 0; in mmc_davinci_prepare_data()
570 host->sg_len = data->sg_len; in mmc_davinci_prepare_data()
571 sg_miter_start(&host->sg_miter, data->sg, data->sg_len, flags); in mmc_davinci_prepare_data()
575 static void mmc_davinci_request(struct mmc_host *mmc, struct mmc_request *req) in mmc_davinci_request() argument
577 struct mmc_davinci_host *host = mmc_priv(mmc); in mmc_davinci_request()
585 mmcst1 = readl(host->base + DAVINCI_MMCST1); in mmc_davinci_request()
591 dev_err(mmc_dev(host->mmc), "still BUSY? bad ... \n"); in mmc_davinci_request()
592 req->cmd->error = -ETIMEDOUT; in mmc_davinci_request()
593 mmc_request_done(mmc, req); in mmc_davinci_request()
597 host->do_dma = 0; in mmc_davinci_request()
599 mmc_davinci_start_command(host, req->cmd); in mmc_davinci_request()
607 mmc_pclk = host->mmc_input_clk; in calculate_freq_for_card()
610 / (2 * mmc_req_freq)) - 1; in calculate_freq_for_card()
621 host->ns_in_one_cycle = (1000000) / (((mmc_pclk in calculate_freq_for_card()
624 host->ns_in_one_cycle = (1000000) / (((mmc_pclk in calculate_freq_for_card()
630 static void calculate_clk_divider(struct mmc_host *mmc, struct mmc_ios *ios) in calculate_clk_divider() argument
634 struct mmc_davinci_host *host = mmc_priv(mmc); in calculate_clk_divider()
636 if (ios->bus_mode == MMC_BUSMODE_OPENDRAIN) { in calculate_clk_divider()
643 / (2 * MMCSD_INIT_CLOCK)) - 1; in calculate_clk_divider()
648 temp = readl(host->base + DAVINCI_MMCCLK) & ~MMCCLK_CLKRT_MASK; in calculate_clk_divider()
650 writel(temp, host->base + DAVINCI_MMCCLK); in calculate_clk_divider()
653 host->ns_in_one_cycle = (1000000) / (MMCSD_INIT_CLOCK/1000); in calculate_clk_divider()
656 mmc_push_pull_freq = calculate_freq_for_card(host, ios->clock); in calculate_clk_divider()
661 temp = readl(host->base + DAVINCI_MMCCLK) & ~MMCCLK_CLKEN; in calculate_clk_divider()
662 writel(temp, host->base + DAVINCI_MMCCLK); in calculate_clk_divider()
666 temp = readl(host->base + DAVINCI_MMCCLK) & ~MMCCLK_CLKRT_MASK; in calculate_clk_divider()
668 writel(temp, host->base + DAVINCI_MMCCLK); in calculate_clk_divider()
670 writel(temp | MMCCLK_CLKEN, host->base + DAVINCI_MMCCLK); in calculate_clk_divider()
676 static void mmc_davinci_set_ios(struct mmc_host *mmc, struct mmc_ios *ios) in mmc_davinci_set_ios() argument
678 struct mmc_davinci_host *host = mmc_priv(mmc); in mmc_davinci_set_ios()
679 struct platform_device *pdev = to_platform_device(mmc->parent); in mmc_davinci_set_ios()
680 struct davinci_mmc_config *config = pdev->dev.platform_data; in mmc_davinci_set_ios()
682 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_set_ios()
684 ios->clock, ios->bus_mode, ios->power_mode, in mmc_davinci_set_ios()
685 ios->vdd); in mmc_davinci_set_ios()
687 switch (ios->power_mode) { in mmc_davinci_set_ios()
689 if (config && config->set_power) in mmc_davinci_set_ios()
690 config->set_power(pdev->id, false); in mmc_davinci_set_ios()
693 if (config && config->set_power) in mmc_davinci_set_ios()
694 config->set_power(pdev->id, true); in mmc_davinci_set_ios()
698 switch (ios->bus_width) { in mmc_davinci_set_ios()
700 dev_dbg(mmc_dev(host->mmc), "Enabling 8 bit mode\n"); in mmc_davinci_set_ios()
701 writel((readl(host->base + DAVINCI_MMCCTL) & in mmc_davinci_set_ios()
703 host->base + DAVINCI_MMCCTL); in mmc_davinci_set_ios()
706 dev_dbg(mmc_dev(host->mmc), "Enabling 4 bit mode\n"); in mmc_davinci_set_ios()
707 if (host->version == MMC_CTLR_VERSION_2) in mmc_davinci_set_ios()
708 writel((readl(host->base + DAVINCI_MMCCTL) & in mmc_davinci_set_ios()
710 host->base + DAVINCI_MMCCTL); in mmc_davinci_set_ios()
712 writel(readl(host->base + DAVINCI_MMCCTL) | in mmc_davinci_set_ios()
714 host->base + DAVINCI_MMCCTL); in mmc_davinci_set_ios()
717 dev_dbg(mmc_dev(host->mmc), "Enabling 1 bit mode\n"); in mmc_davinci_set_ios()
718 if (host->version == MMC_CTLR_VERSION_2) in mmc_davinci_set_ios()
719 writel(readl(host->base + DAVINCI_MMCCTL) & in mmc_davinci_set_ios()
721 host->base + DAVINCI_MMCCTL); in mmc_davinci_set_ios()
723 writel(readl(host->base + DAVINCI_MMCCTL) & in mmc_davinci_set_ios()
725 host->base + DAVINCI_MMCCTL); in mmc_davinci_set_ios()
729 calculate_clk_divider(mmc, ios); in mmc_davinci_set_ios()
731 host->bus_mode = ios->bus_mode; in mmc_davinci_set_ios()
732 if (ios->power_mode == MMC_POWER_UP) { in mmc_davinci_set_ios()
737 writel(0, host->base + DAVINCI_MMCARGHL); in mmc_davinci_set_ios()
738 writel(MMCCMD_INITCK, host->base + DAVINCI_MMCCMD); in mmc_davinci_set_ios()
740 u32 tmp = readl(host->base + DAVINCI_MMCST0); in mmc_davinci_set_ios()
749 dev_warn(mmc_dev(host->mmc), "powerup timeout\n"); in mmc_davinci_set_ios()
758 host->data = NULL; in mmc_davinci_xfer_done()
760 if (host->mmc->caps & MMC_CAP_SDIO_IRQ) { in mmc_davinci_xfer_done()
762 * SDIO Interrupt Detection work-around as suggested by in mmc_davinci_xfer_done()
766 if (host->sdio_int && !(readl(host->base + DAVINCI_SDIOST0) & in mmc_davinci_xfer_done()
768 writel(SDIOIST_IOINT, host->base + DAVINCI_SDIOIST); in mmc_davinci_xfer_done()
769 mmc_signal_sdio_irq(host->mmc); in mmc_davinci_xfer_done()
773 if (host->do_dma) { in mmc_davinci_xfer_done()
776 dma_unmap_sg(mmc_dev(host->mmc), data->sg, data->sg_len, in mmc_davinci_xfer_done()
778 host->do_dma = false; in mmc_davinci_xfer_done()
780 host->data_dir = DAVINCI_MMC_DATADIR_NONE; in mmc_davinci_xfer_done()
782 if (!data->stop || (host->cmd && host->cmd->error)) { in mmc_davinci_xfer_done()
783 mmc_request_done(host->mmc, data->mrq); in mmc_davinci_xfer_done()
784 writel(0, host->base + DAVINCI_MMCIM); in mmc_davinci_xfer_done()
785 host->active_request = false; in mmc_davinci_xfer_done()
787 mmc_davinci_start_command(host, data->stop); in mmc_davinci_xfer_done()
793 host->cmd = NULL; in mmc_davinci_cmd_done()
795 if (cmd->flags & MMC_RSP_PRESENT) { in mmc_davinci_cmd_done()
796 if (cmd->flags & MMC_RSP_136) { in mmc_davinci_cmd_done()
798 cmd->resp[3] = readl(host->base + DAVINCI_MMCRSP01); in mmc_davinci_cmd_done()
799 cmd->resp[2] = readl(host->base + DAVINCI_MMCRSP23); in mmc_davinci_cmd_done()
800 cmd->resp[1] = readl(host->base + DAVINCI_MMCRSP45); in mmc_davinci_cmd_done()
801 cmd->resp[0] = readl(host->base + DAVINCI_MMCRSP67); in mmc_davinci_cmd_done()
804 cmd->resp[0] = readl(host->base + DAVINCI_MMCRSP67); in mmc_davinci_cmd_done()
808 if (host->data == NULL || cmd->error) { in mmc_davinci_cmd_done()
809 if (cmd->error == -ETIMEDOUT) in mmc_davinci_cmd_done()
810 cmd->mrq->cmd->retries = 0; in mmc_davinci_cmd_done()
811 mmc_request_done(host->mmc, cmd->mrq); in mmc_davinci_cmd_done()
812 writel(0, host->base + DAVINCI_MMCIM); in mmc_davinci_cmd_done()
813 host->active_request = false; in mmc_davinci_cmd_done()
822 temp = readl(host->base + DAVINCI_MMCCTL); in mmc_davinci_reset_ctrl()
828 writel(temp, host->base + DAVINCI_MMCCTL); in mmc_davinci_reset_ctrl()
837 if (!host->do_dma) in davinci_abort_data()
838 sg_miter_stop(&host->sg_miter); in davinci_abort_data()
846 status = readl(host->base + DAVINCI_SDIOIST); in mmc_davinci_sdio_irq()
848 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_sdio_irq()
850 writel(status | SDIOIST_IOINT, host->base + DAVINCI_SDIOIST); in mmc_davinci_sdio_irq()
851 mmc_signal_sdio_irq(host->mmc); in mmc_davinci_sdio_irq()
862 struct mmc_data *data = host->data; in mmc_davinci_irq()
864 if (host->cmd == NULL && host->data == NULL) { in mmc_davinci_irq()
865 status = readl(host->base + DAVINCI_MMCST0); in mmc_davinci_irq()
866 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_irq()
869 writel(0, host->base + DAVINCI_MMCIM); in mmc_davinci_irq()
873 status = readl(host->base + DAVINCI_MMCST0); in mmc_davinci_irq()
881 * non-dma. in mmc_davinci_irq()
883 if (host->bytes_left && (status & (MMCST0_DXRDY | MMCST0_DRRDY))) { in mmc_davinci_irq()
893 im_val = readl(host->base + DAVINCI_MMCIM); in mmc_davinci_irq()
894 writel(0, host->base + DAVINCI_MMCIM); in mmc_davinci_irq()
898 status = readl(host->base + DAVINCI_MMCST0); in mmc_davinci_irq()
900 } while (host->bytes_left && in mmc_davinci_irq()
907 * status is race-prone. in mmc_davinci_irq()
909 writel(im_val, host->base + DAVINCI_MMCIM); in mmc_davinci_irq()
915 if (!host->do_dma) { in mmc_davinci_irq()
916 if (host->bytes_left > 0) in mmc_davinci_irq()
918 * no RX ints are generated in mmc_davinci_irq()
920 davinci_fifo_data_trans(host, host->bytes_left); in mmc_davinci_irq()
921 sg_miter_stop(&host->sg_miter); in mmc_davinci_irq()
924 data->bytes_xfered = data->blocks * data->blksz; in mmc_davinci_irq()
926 dev_err(mmc_dev(host->mmc), in mmc_davinci_irq()
927 "DATDNE with no host->data\n"); in mmc_davinci_irq()
933 data->error = -ETIMEDOUT; in mmc_davinci_irq()
936 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_irq()
945 data->error = -EILSEQ; in mmc_davinci_irq()
951 * case and the two three-bit patterns in various SD specs in mmc_davinci_irq()
955 u32 temp = readb(host->base + DAVINCI_MMCDRSP); in mmc_davinci_irq()
958 data->error = -ETIMEDOUT; in mmc_davinci_irq()
960 dev_dbg(mmc_dev(host->mmc), "data %s %s error\n", in mmc_davinci_irq()
962 (data->error == -ETIMEDOUT) ? "timeout" : "CRC"); in mmc_davinci_irq()
969 if (host->cmd) { in mmc_davinci_irq()
970 dev_dbg(mmc_dev(host->mmc), in mmc_davinci_irq()
972 host->cmd->opcode, qstatus); in mmc_davinci_irq()
973 host->cmd->error = -ETIMEDOUT; in mmc_davinci_irq()
984 dev_dbg(mmc_dev(host->mmc), "Command CRC error\n"); in mmc_davinci_irq()
985 if (host->cmd) { in mmc_davinci_irq()
986 host->cmd->error = -EILSEQ; in mmc_davinci_irq()
993 end_command = host->cmd ? 1 : 0; in mmc_davinci_irq()
997 mmc_davinci_cmd_done(host, host->cmd); in mmc_davinci_irq()
1003 static int mmc_davinci_get_cd(struct mmc_host *mmc) in mmc_davinci_get_cd() argument
1005 struct platform_device *pdev = to_platform_device(mmc->parent); in mmc_davinci_get_cd()
1006 struct davinci_mmc_config *config = pdev->dev.platform_data; in mmc_davinci_get_cd()
1008 if (config && config->get_cd) in mmc_davinci_get_cd()
1009 return config->get_cd(pdev->id); in mmc_davinci_get_cd()
1011 return mmc_gpio_get_cd(mmc); in mmc_davinci_get_cd()
1014 static int mmc_davinci_get_ro(struct mmc_host *mmc) in mmc_davinci_get_ro() argument
1016 struct platform_device *pdev = to_platform_device(mmc->parent); in mmc_davinci_get_ro()
1017 struct davinci_mmc_config *config = pdev->dev.platform_data; in mmc_davinci_get_ro()
1019 if (config && config->get_ro) in mmc_davinci_get_ro()
1020 return config->get_ro(pdev->id); in mmc_davinci_get_ro()
1022 return mmc_gpio_get_ro(mmc); in mmc_davinci_get_ro()
1025 static void mmc_davinci_enable_sdio_irq(struct mmc_host *mmc, int enable) in mmc_davinci_enable_sdio_irq() argument
1027 struct mmc_davinci_host *host = mmc_priv(mmc); in mmc_davinci_enable_sdio_irq()
1030 if (!(readl(host->base + DAVINCI_SDIOST0) & SDIOST0_DAT1_HI)) { in mmc_davinci_enable_sdio_irq()
1031 writel(SDIOIST_IOINT, host->base + DAVINCI_SDIOIST); in mmc_davinci_enable_sdio_irq()
1032 mmc_signal_sdio_irq(host->mmc); in mmc_davinci_enable_sdio_irq()
1034 host->sdio_int = true; in mmc_davinci_enable_sdio_irq()
1035 writel(readl(host->base + DAVINCI_SDIOIEN) | in mmc_davinci_enable_sdio_irq()
1036 SDIOIEN_IOINTEN, host->base + DAVINCI_SDIOIEN); in mmc_davinci_enable_sdio_irq()
1039 host->sdio_int = false; in mmc_davinci_enable_sdio_irq()
1040 writel(readl(host->base + DAVINCI_SDIOIEN) & ~SDIOIEN_IOINTEN, in mmc_davinci_enable_sdio_irq()
1041 host->base + DAVINCI_SDIOIEN); in mmc_davinci_enable_sdio_irq()
1053 /*----------------------------------------------------------------------*/
1061 struct mmc_host *mmc; in mmc_davinci_cpufreq_transition() local
1065 mmc = host->mmc; in mmc_davinci_cpufreq_transition()
1066 mmc_pclk = clk_get_rate(host->clk); in mmc_davinci_cpufreq_transition()
1069 spin_lock_irqsave(&mmc->lock, flags); in mmc_davinci_cpufreq_transition()
1070 host->mmc_input_clk = mmc_pclk; in mmc_davinci_cpufreq_transition()
1071 calculate_clk_divider(mmc, &mmc->ios); in mmc_davinci_cpufreq_transition()
1072 spin_unlock_irqrestore(&mmc->lock, flags); in mmc_davinci_cpufreq_transition()
1080 host->freq_transition.notifier_call = mmc_davinci_cpufreq_transition; in mmc_davinci_cpufreq_register()
1082 return cpufreq_register_notifier(&host->freq_transition, in mmc_davinci_cpufreq_register()
1088 cpufreq_unregister_notifier(&host->freq_transition, in mmc_davinci_cpufreq_deregister()
1106 writel(0, host->base + DAVINCI_MMCCLK); in init_mmcsd_host()
1107 writel(MMCCLK_CLKEN, host->base + DAVINCI_MMCCLK); in init_mmcsd_host()
1109 writel(0x1FFF, host->base + DAVINCI_MMCTOR); in init_mmcsd_host()
1110 writel(0xFFFF, host->base + DAVINCI_MMCTOD); in init_mmcsd_host()
1117 .name = "dm6441-mmc",
1120 .name = "da830-mmc",
1129 .compatible = "ti,dm6441-mmc",
1133 .compatible = "ti,da830-mmc",
1140 static int mmc_davinci_parse_pdata(struct mmc_host *mmc) in mmc_davinci_parse_pdata() argument
1142 struct platform_device *pdev = to_platform_device(mmc->parent); in mmc_davinci_parse_pdata()
1143 struct davinci_mmc_config *pdata = pdev->dev.platform_data; in mmc_davinci_parse_pdata()
1148 return -EINVAL; in mmc_davinci_parse_pdata()
1150 host = mmc_priv(mmc); in mmc_davinci_parse_pdata()
1152 return -EINVAL; in mmc_davinci_parse_pdata()
1154 if (pdata && pdata->nr_sg) in mmc_davinci_parse_pdata()
1155 host->nr_sg = pdata->nr_sg - 1; in mmc_davinci_parse_pdata()
1157 if (pdata && (pdata->wires == 4 || pdata->wires == 0)) in mmc_davinci_parse_pdata()
1158 mmc->caps |= MMC_CAP_4_BIT_DATA; in mmc_davinci_parse_pdata()
1160 if (pdata && (pdata->wires == 8)) in mmc_davinci_parse_pdata()
1161 mmc->caps |= (MMC_CAP_4_BIT_DATA | MMC_CAP_8_BIT_DATA); in mmc_davinci_parse_pdata()
1163 mmc->f_min = 312500; in mmc_davinci_parse_pdata()
1164 mmc->f_max = 25000000; in mmc_davinci_parse_pdata()
1165 if (pdata && pdata->max_freq) in mmc_davinci_parse_pdata()
1166 mmc->f_max = pdata->max_freq; in mmc_davinci_parse_pdata()
1167 if (pdata && pdata->caps) in mmc_davinci_parse_pdata()
1168 mmc->caps |= pdata->caps; in mmc_davinci_parse_pdata()
1171 ret = mmc_gpiod_request_cd(mmc, "cd", 0, false, 0); in mmc_davinci_parse_pdata()
1172 if (ret == -EPROBE_DEFER) in mmc_davinci_parse_pdata()
1175 mmc->caps |= MMC_CAP_NEEDS_POLL; in mmc_davinci_parse_pdata()
1177 ret = mmc_gpiod_request_ro(mmc, "wp", 0, 0); in mmc_davinci_parse_pdata()
1178 if (ret == -EPROBE_DEFER) in mmc_davinci_parse_pdata()
1187 struct mmc_host *mmc = NULL; in davinci_mmcsd_probe() local
1195 return -ENODEV; in davinci_mmcsd_probe()
1201 mem = devm_request_mem_region(&pdev->dev, r->start, mem_size, in davinci_mmcsd_probe()
1202 pdev->name); in davinci_mmcsd_probe()
1204 return -EBUSY; in davinci_mmcsd_probe()
1206 mmc = mmc_alloc_host(sizeof(struct mmc_davinci_host), &pdev->dev); in davinci_mmcsd_probe()
1207 if (!mmc) in davinci_mmcsd_probe()
1208 return -ENOMEM; in davinci_mmcsd_probe()
1210 host = mmc_priv(mmc); in davinci_mmcsd_probe()
1211 host->mmc = mmc; /* Important */ in davinci_mmcsd_probe()
1213 host->mem_res = mem; in davinci_mmcsd_probe()
1214 host->base = devm_ioremap(&pdev->dev, mem->start, mem_size); in davinci_mmcsd_probe()
1215 if (!host->base) { in davinci_mmcsd_probe()
1216 ret = -ENOMEM; in davinci_mmcsd_probe()
1220 host->clk = devm_clk_get(&pdev->dev, NULL); in davinci_mmcsd_probe()
1221 if (IS_ERR(host->clk)) { in davinci_mmcsd_probe()
1222 ret = PTR_ERR(host->clk); in davinci_mmcsd_probe()
1225 ret = clk_prepare_enable(host->clk); in davinci_mmcsd_probe()
1229 host->mmc_input_clk = clk_get_rate(host->clk); in davinci_mmcsd_probe()
1231 pdev->id_entry = device_get_match_data(&pdev->dev); in davinci_mmcsd_probe()
1232 if (pdev->id_entry) { in davinci_mmcsd_probe()
1233 ret = mmc_of_parse(mmc); in davinci_mmcsd_probe()
1235 dev_err_probe(&pdev->dev, ret, in davinci_mmcsd_probe()
1240 ret = mmc_davinci_parse_pdata(mmc); in davinci_mmcsd_probe()
1242 dev_err(&pdev->dev, in davinci_mmcsd_probe()
1247 if (host->nr_sg > MAX_NR_SG || !host->nr_sg) in davinci_mmcsd_probe()
1248 host->nr_sg = MAX_NR_SG; in davinci_mmcsd_probe()
1252 host->use_dma = use_dma; in davinci_mmcsd_probe()
1253 host->mmc_irq = irq; in davinci_mmcsd_probe()
1254 host->sdio_irq = platform_get_irq_optional(pdev, 1); in davinci_mmcsd_probe()
1256 if (host->use_dma) { in davinci_mmcsd_probe()
1258 if (ret == -EPROBE_DEFER) in davinci_mmcsd_probe()
1261 host->use_dma = 0; in davinci_mmcsd_probe()
1264 mmc->caps |= MMC_CAP_WAIT_WHILE_BUSY; in davinci_mmcsd_probe()
1268 host->version = id_entry->driver_data; in davinci_mmcsd_probe()
1270 mmc->ops = &mmc_davinci_ops; in davinci_mmcsd_probe()
1271 mmc->ocr_avail = MMC_VDD_32_33 | MMC_VDD_33_34; in davinci_mmcsd_probe()
1273 /* With no iommu coalescing pages, each phys_seg is a hw_seg. in davinci_mmcsd_probe()
1277 mmc->max_segs = MAX_NR_SG; in davinci_mmcsd_probe()
1280 mmc->max_seg_size = MAX_CCNT * rw_threshold; in davinci_mmcsd_probe()
1282 /* MMC/SD controller limits for multiblock requests */ in davinci_mmcsd_probe()
1283 mmc->max_blk_size = 4095; /* BLEN is 12 bits */ in davinci_mmcsd_probe()
1284 mmc->max_blk_count = 65535; /* NBLK is 16 bits */ in davinci_mmcsd_probe()
1285 mmc->max_req_size = mmc->max_blk_size * mmc->max_blk_count; in davinci_mmcsd_probe()
1287 dev_dbg(mmc_dev(host->mmc), "max_segs=%d\n", mmc->max_segs); in davinci_mmcsd_probe()
1288 dev_dbg(mmc_dev(host->mmc), "max_blk_size=%d\n", mmc->max_blk_size); in davinci_mmcsd_probe()
1289 dev_dbg(mmc_dev(host->mmc), "max_req_size=%d\n", mmc->max_req_size); in davinci_mmcsd_probe()
1290 dev_dbg(mmc_dev(host->mmc), "max_seg_size=%d\n", mmc->max_seg_size); in davinci_mmcsd_probe()
1296 dev_err(&pdev->dev, "failed to register cpufreq\n"); in davinci_mmcsd_probe()
1300 ret = mmc_add_host(mmc); in davinci_mmcsd_probe()
1304 ret = devm_request_irq(&pdev->dev, irq, mmc_davinci_irq, 0, in davinci_mmcsd_probe()
1305 mmc_hostname(mmc), host); in davinci_mmcsd_probe()
1309 if (host->sdio_irq >= 0) { in davinci_mmcsd_probe()
1310 ret = devm_request_irq(&pdev->dev, host->sdio_irq, in davinci_mmcsd_probe()
1312 mmc_hostname(mmc), host); in davinci_mmcsd_probe()
1314 mmc->caps |= MMC_CAP_SDIO_IRQ; in davinci_mmcsd_probe()
1317 rename_region(mem, mmc_hostname(mmc)); in davinci_mmcsd_probe()
1319 if (mmc->caps & MMC_CAP_8_BIT_DATA) in davinci_mmcsd_probe()
1321 else if (mmc->caps & MMC_CAP_4_BIT_DATA) in davinci_mmcsd_probe()
1325 dev_info(mmc_dev(host->mmc), "Using %s, %d-bit mode\n", in davinci_mmcsd_probe()
1326 host->use_dma ? "DMA" : "PIO", bus_width); in davinci_mmcsd_probe()
1331 mmc_remove_host(mmc); in davinci_mmcsd_probe()
1338 clk_disable_unprepare(host->clk); in davinci_mmcsd_probe()
1342 mmc_free_host(mmc); in davinci_mmcsd_probe()
1351 mmc_remove_host(host->mmc); in davinci_mmcsd_remove()
1354 clk_disable_unprepare(host->clk); in davinci_mmcsd_remove()
1355 mmc_free_host(host->mmc); in davinci_mmcsd_remove()
1363 writel(0, host->base + DAVINCI_MMCIM); in davinci_mmcsd_suspend()
1365 clk_disable(host->clk); in davinci_mmcsd_suspend()
1375 ret = clk_enable(host->clk); in davinci_mmcsd_resume()
1410 MODULE_DESCRIPTION("MMC/SD driver for Davinci MMC controller");