Lines Matching full:d

279 ldma_update_bits(struct ldma_dev *d, u32 mask, u32 val, u32 ofs)  in ldma_update_bits()  argument
283 old_val = readl(d->base + ofs); in ldma_update_bits()
287 writel(new_val, d->base + ofs); in ldma_update_bits()
315 static void ldma_dev_reset(struct ldma_dev *d) in ldma_dev_reset() argument
320 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_reset()
321 ldma_update_bits(d, DMA_CTRL_RST, DMA_CTRL_RST, DMA_CTRL); in ldma_dev_reset()
322 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_reset()
325 static void ldma_dev_pkt_arb_cfg(struct ldma_dev *d, bool enable) in ldma_dev_pkt_arb_cfg() argument
331 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_pkt_arb_cfg()
332 ldma_update_bits(d, mask, val, DMA_CTRL); in ldma_dev_pkt_arb_cfg()
333 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_pkt_arb_cfg()
336 static void ldma_dev_sram_desc_cfg(struct ldma_dev *d, bool enable) in ldma_dev_sram_desc_cfg() argument
342 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_sram_desc_cfg()
343 ldma_update_bits(d, mask, val, DMA_CTRL); in ldma_dev_sram_desc_cfg()
344 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_sram_desc_cfg()
347 static void ldma_dev_chan_flow_ctl_cfg(struct ldma_dev *d, bool enable) in ldma_dev_chan_flow_ctl_cfg() argument
352 if (d->inst->type != DMA_TYPE_TX) in ldma_dev_chan_flow_ctl_cfg()
358 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_chan_flow_ctl_cfg()
359 ldma_update_bits(d, mask, val, DMA_CTRL); in ldma_dev_chan_flow_ctl_cfg()
360 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_chan_flow_ctl_cfg()
363 static void ldma_dev_global_polling_enable(struct ldma_dev *d) in ldma_dev_global_polling_enable() argument
369 val |= FIELD_PREP(DMA_CPOLL_CNT, d->pollcnt); in ldma_dev_global_polling_enable()
371 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_global_polling_enable()
372 ldma_update_bits(d, mask, val, DMA_CPOLL); in ldma_dev_global_polling_enable()
373 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_global_polling_enable()
376 static void ldma_dev_desc_fetch_on_demand_cfg(struct ldma_dev *d, bool enable) in ldma_dev_desc_fetch_on_demand_cfg() argument
381 if (d->inst->type == DMA_TYPE_MCPY) in ldma_dev_desc_fetch_on_demand_cfg()
387 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_desc_fetch_on_demand_cfg()
388 ldma_update_bits(d, mask, val, DMA_CTRL); in ldma_dev_desc_fetch_on_demand_cfg()
389 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_desc_fetch_on_demand_cfg()
392 static void ldma_dev_byte_enable_cfg(struct ldma_dev *d, bool enable) in ldma_dev_byte_enable_cfg() argument
398 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_byte_enable_cfg()
399 ldma_update_bits(d, mask, val, DMA_CTRL); in ldma_dev_byte_enable_cfg()
400 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_byte_enable_cfg()
403 static void ldma_dev_orrc_cfg(struct ldma_dev *d) in ldma_dev_orrc_cfg() argument
409 if (d->inst->type == DMA_TYPE_RX) in ldma_dev_orrc_cfg()
413 if (d->inst->orrc > 0 && d->inst->orrc <= DMA_ORRC_MAX_CNT) in ldma_dev_orrc_cfg()
414 val = DMA_ORRC_EN | FIELD_PREP(DMA_ORRC_ORRCNT, d->inst->orrc); in ldma_dev_orrc_cfg()
416 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_orrc_cfg()
417 ldma_update_bits(d, mask, val, DMA_ORRC); in ldma_dev_orrc_cfg()
418 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_orrc_cfg()
421 static void ldma_dev_df_tout_cfg(struct ldma_dev *d, bool enable, int tcnt) in ldma_dev_df_tout_cfg() argument
432 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_df_tout_cfg()
433 ldma_update_bits(d, mask, val, DMA_CTRL); in ldma_dev_df_tout_cfg()
434 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_df_tout_cfg()
437 static void ldma_dev_dburst_wr_cfg(struct ldma_dev *d, bool enable) in ldma_dev_dburst_wr_cfg() argument
442 if (d->inst->type != DMA_TYPE_RX && d->inst->type != DMA_TYPE_MCPY) in ldma_dev_dburst_wr_cfg()
448 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_dburst_wr_cfg()
449 ldma_update_bits(d, mask, val, DMA_CTRL); in ldma_dev_dburst_wr_cfg()
450 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_dburst_wr_cfg()
453 static void ldma_dev_vld_fetch_ack_cfg(struct ldma_dev *d, bool enable) in ldma_dev_vld_fetch_ack_cfg() argument
458 if (d->inst->type != DMA_TYPE_TX) in ldma_dev_vld_fetch_ack_cfg()
464 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_vld_fetch_ack_cfg()
465 ldma_update_bits(d, mask, val, DMA_CTRL); in ldma_dev_vld_fetch_ack_cfg()
466 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_vld_fetch_ack_cfg()
469 static void ldma_dev_drb_cfg(struct ldma_dev *d, int enable) in ldma_dev_drb_cfg() argument
475 spin_lock_irqsave(&d->dev_lock, flags); in ldma_dev_drb_cfg()
476 ldma_update_bits(d, mask, val, DMA_CTRL); in ldma_dev_drb_cfg()
477 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_dev_drb_cfg()
480 static int ldma_dev_cfg(struct ldma_dev *d) in ldma_dev_cfg() argument
484 ldma_dev_pkt_arb_cfg(d, true); in ldma_dev_cfg()
485 ldma_dev_global_polling_enable(d); in ldma_dev_cfg()
487 enable = !!(d->flags & DMA_DFT_DRB); in ldma_dev_cfg()
488 ldma_dev_drb_cfg(d, enable); in ldma_dev_cfg()
490 enable = !!(d->flags & DMA_EN_BYTE_EN); in ldma_dev_cfg()
491 ldma_dev_byte_enable_cfg(d, enable); in ldma_dev_cfg()
493 enable = !!(d->flags & DMA_CHAN_FLOW_CTL); in ldma_dev_cfg()
494 ldma_dev_chan_flow_ctl_cfg(d, enable); in ldma_dev_cfg()
496 enable = !!(d->flags & DMA_DESC_FOD); in ldma_dev_cfg()
497 ldma_dev_desc_fetch_on_demand_cfg(d, enable); in ldma_dev_cfg()
499 enable = !!(d->flags & DMA_DESC_IN_SRAM); in ldma_dev_cfg()
500 ldma_dev_sram_desc_cfg(d, enable); in ldma_dev_cfg()
502 enable = !!(d->flags & DMA_DBURST_WR); in ldma_dev_cfg()
503 ldma_dev_dburst_wr_cfg(d, enable); in ldma_dev_cfg()
505 enable = !!(d->flags & DMA_VALID_DESC_FETCH_ACK); in ldma_dev_cfg()
506 ldma_dev_vld_fetch_ack_cfg(d, enable); in ldma_dev_cfg()
508 if (d->ver > DMA_VER22) { in ldma_dev_cfg()
509 ldma_dev_orrc_cfg(d); in ldma_dev_cfg()
510 ldma_dev_df_tout_cfg(d, true, DMA_DFT_DESC_TCNT); in ldma_dev_cfg()
513 dev_dbg(d->dev, "%s Controller 0x%08x configuration done\n", in ldma_dev_cfg()
514 d->inst->name, readl(d->base + DMA_CTRL)); in ldma_dev_cfg()
521 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cctrl_cfg() local
526 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_cctrl_cfg()
527 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_cctrl_cfg()
528 reg = readl(d->base + DMA_CCTRL); in ldma_chan_cctrl_cfg()
542 writel(val, d->base + DMA_CCTRL); in ldma_chan_cctrl_cfg()
543 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_cctrl_cfg()
550 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_init() local
564 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_irq_init()
565 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_irq_init()
568 writel(0, d->base + DMA_CIE); in ldma_chan_irq_init()
569 writel(DMA_CI_ALL, d->base + DMA_CIS); in ldma_chan_irq_init()
571 ldma_update_bits(d, cn_bit, 0, enofs); in ldma_chan_irq_init()
572 writel(cn_bit, d->base + crofs); in ldma_chan_irq_init()
573 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_irq_init()
578 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_set_class() local
581 if (d->inst->type == DMA_TYPE_MCPY || val > DMA_MAX_CLASS) in ldma_chan_set_class()
589 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_set_class()
590 ldma_update_bits(d, DMA_CCTRL_CLASS | DMA_CCTRL_CLASSH, class_val, in ldma_chan_set_class()
596 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_on() local
603 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_on()
604 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_on()
605 ldma_update_bits(d, DMA_CCTRL_ON, DMA_CCTRL_ON, DMA_CCTRL); in ldma_chan_on()
606 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_on()
615 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_off() local
620 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_off()
621 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_off()
622 ldma_update_bits(d, DMA_CCTRL_ON, 0, DMA_CCTRL); in ldma_chan_off()
623 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_off()
625 ret = readl_poll_timeout_atomic(d->base + DMA_CCTRL, val, in ldma_chan_off()
638 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_hw_cfg() local
641 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_desc_hw_cfg()
642 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_desc_hw_cfg()
643 writel(lower_32_bits(desc_base), d->base + DMA_CDBA); in ldma_chan_desc_hw_cfg()
649 ldma_update_bits(d, DMA_CDBA_MSB, in ldma_chan_desc_hw_cfg()
652 writel(desc_num, d->base + DMA_CDLEN); in ldma_chan_desc_hw_cfg()
653 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_desc_hw_cfg()
662 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_cfg() local
667 dev_err(d->dev, "Channel %d must allocate descriptor first\n", in ldma_chan_desc_cfg()
673 dev_err(d->dev, "Channel %d descriptor number out of range %d\n", in ldma_chan_desc_cfg()
696 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_reset() local
705 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_reset()
706 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_reset()
707 ldma_update_bits(d, DMA_CCTRL_RST, DMA_CCTRL_RST, DMA_CCTRL); in ldma_chan_reset()
708 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_reset()
710 ret = readl_poll_timeout_atomic(d->base + DMA_CCTRL, val, in ldma_chan_reset()
723 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_byte_offset_cfg() local
732 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_byte_offset_cfg()
733 ldma_update_bits(d, mask, val, DMA_C_BOFF); in ldma_chan_byte_offset_cfg()
739 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_data_endian_cfg() local
748 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_data_endian_cfg()
749 ldma_update_bits(d, mask, val, DMA_C_ENDIAN); in ldma_chan_data_endian_cfg()
755 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_desc_endian_cfg() local
764 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_desc_endian_cfg()
765 ldma_update_bits(d, mask, val, DMA_C_ENDIAN); in ldma_chan_desc_endian_cfg()
770 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_hdr_mode_cfg() local
783 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_hdr_mode_cfg()
784 ldma_update_bits(d, mask, val, DMA_C_HDRM); in ldma_chan_hdr_mode_cfg()
789 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_rxwr_np_cfg() local
799 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_rxwr_np_cfg()
800 ldma_update_bits(d, mask, val, DMA_CCTRL); in ldma_chan_rxwr_np_cfg()
805 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_abc_cfg() local
808 if (d->ver < DMA_VER32 || ldma_chan_tx(c)) in ldma_chan_abc_cfg()
814 ldma_update_bits(d, DMA_CS_MASK, c->nr, DMA_CS); in ldma_chan_abc_cfg()
815 ldma_update_bits(d, mask, val, DMA_CCTRL); in ldma_chan_abc_cfg()
821 struct ldma_dev *d; in ldma_port_cfg() local
824 d = p->ldev; in ldma_port_cfg()
828 if (d->ver == DMA_VER22) { in ldma_port_cfg()
849 spin_lock_irqsave(&d->dev_lock, flags); in ldma_port_cfg()
850 writel(p->portid, d->base + DMA_PS); in ldma_port_cfg()
851 writel(reg, d->base + DMA_PCTRL); in ldma_port_cfg()
852 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_port_cfg()
854 reg = readl(d->base + DMA_PCTRL); /* read back */ in ldma_port_cfg()
855 dev_dbg(d->dev, "Port Control 0x%08x configuration done\n", reg); in ldma_port_cfg()
862 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_cfg() local
873 if (d->ver <= DMA_VER22) in ldma_chan_cfg()
876 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_cfg()
884 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_cfg()
892 static void ldma_dev_init(struct ldma_dev *d) in ldma_dev_init() argument
894 unsigned long ch_mask = (unsigned long)d->channels_mask; in ldma_dev_init()
900 spin_lock_init(&d->dev_lock); in ldma_dev_init()
901 ldma_dev_reset(d); in ldma_dev_init()
902 ldma_dev_cfg(d); in ldma_dev_init()
905 for (i = 0; i < d->port_nrs; i++) { in ldma_dev_init()
906 p = &d->ports[i]; in ldma_dev_init()
911 for_each_set_bit(j, &ch_mask, d->chan_nrs) { in ldma_dev_init()
912 c = &d->chans[j]; in ldma_dev_init()
917 static int ldma_parse_dt(struct ldma_dev *d) in ldma_parse_dt() argument
919 struct fwnode_handle *fwnode = dev_fwnode(d->dev); in ldma_parse_dt()
924 d->flags |= DMA_EN_BYTE_EN; in ldma_parse_dt()
927 d->flags |= DMA_DBURST_WR; in ldma_parse_dt()
930 d->flags |= DMA_DFT_DRB; in ldma_parse_dt()
933 &d->pollcnt)) in ldma_parse_dt()
934 d->pollcnt = DMA_DFT_POLL_CNT; in ldma_parse_dt()
936 if (d->inst->chan_fc) in ldma_parse_dt()
937 d->flags |= DMA_CHAN_FLOW_CTL; in ldma_parse_dt()
939 if (d->inst->desc_fod) in ldma_parse_dt()
940 d->flags |= DMA_DESC_FOD; in ldma_parse_dt()
942 if (d->inst->desc_in_sram) in ldma_parse_dt()
943 d->flags |= DMA_DESC_IN_SRAM; in ldma_parse_dt()
945 if (d->inst->valid_desc_fetch_ack) in ldma_parse_dt()
946 d->flags |= DMA_VALID_DESC_FETCH_ACK; in ldma_parse_dt()
948 if (d->ver > DMA_VER22) { in ldma_parse_dt()
949 if (!d->port_nrs) in ldma_parse_dt()
952 for (i = 0; i < d->port_nrs; i++) { in ldma_parse_dt()
953 p = &d->ports[i]; in ldma_parse_dt()
981 dev_err(dev, "sg num %d exceed max %d\n", num, c->desc_num); in dma_alloc_desc_resource()
1004 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_chan_irq_en() local
1007 spin_lock_irqsave(&d->dev_lock, flags); in ldma_chan_irq_en()
1008 writel(c->nr, d->base + DMA_CS); in ldma_chan_irq_en()
1009 writel(DMA_CI_EOP, d->base + DMA_CIE); in ldma_chan_irq_en()
1010 writel(BIT(c->nr), d->base + DMA_IRNEN); in ldma_chan_irq_en()
1011 spin_unlock_irqrestore(&d->dev_lock, flags); in ldma_chan_irq_en()
1017 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_issue_pending() local
1020 if (d->ver == DMA_VER22) { in ldma_issue_pending()
1091 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_tx_status() local
1094 if (d->ver == DMA_VER22) in ldma_tx_status()
1103 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in dma_chan_irq() local
1107 writel(c->nr, d->base + DMA_CS); in dma_chan_irq()
1108 stat = readl(d->base + DMA_CIS); in dma_chan_irq()
1112 writel(readl(d->base + DMA_CIE) & ~DMA_CI_ALL, d->base + DMA_CIE); in dma_chan_irq()
1113 writel(stat, d->base + DMA_CIS); in dma_chan_irq()
1114 queue_work(d->wq, &c->work); in dma_chan_irq()
1119 struct ldma_dev *d = dev_id; in dma_interrupt() local
1124 irncr = readl(d->base + DMA_IRNCR); in dma_interrupt()
1126 dev_err(d->dev, "dummy interrupt\n"); in dma_interrupt()
1130 for_each_set_bit(cid, &irncr, d->chan_nrs) { in dma_interrupt()
1132 writel(readl(d->base + DMA_IRNEN) & ~BIT(cid), d->base + DMA_IRNEN); in dma_interrupt()
1134 writel(readl(d->base + DMA_IRNCR) | BIT(cid), d->base + DMA_IRNCR); in dma_interrupt()
1136 c = &d->chans[cid]; in dma_interrupt()
1162 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_prep_slave_sg() local
1173 if (d->ver > DMA_VER22) in ldma_prep_slave_sg()
1272 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_alloc_chan_resources() local
1276 if (d->ver > DMA_VER22) { in ldma_alloc_chan_resources()
1299 struct ldma_dev *d = to_ldma_dev(c->vchan.chan.device); in ldma_free_chan_resources() local
1301 if (d->ver == DMA_VER22) { in ldma_free_chan_resources()
1360 struct ldma_dev *d = ofdma->of_dma_data; in update_client_configs() local
1367 if (chan_id >= d->chan_nrs || port_id >= d->port_nrs) in update_client_configs()
1370 p = &d->ports[port_id]; in update_client_configs()
1371 c = &d->chans[chan_id]; in update_client_configs()
1374 if (d->ver == DMA_VER22) in update_client_configs()
1387 struct ldma_dev *d = ofdma->of_dma_data; in ldma_xlate() local
1401 return dma_get_slave_channel(&d->chans[chan_id].vchan.chan); in ldma_xlate()
1404 static void ldma_dma_init_v22(int i, struct ldma_dev *d) in ldma_dma_init_v22() argument
1408 c = &d->chans[i]; in ldma_dma_init_v22()
1412 snprintf(c->name, sizeof(c->name), "chan%d", c->nr); in ldma_dma_init_v22()
1415 vchan_init(&c->vchan, &d->dma_dev); in ldma_dma_init_v22()
1418 static void ldma_dma_init_v3X(int i, struct ldma_dev *d) in ldma_dma_init_v3X() argument
1422 c = &d->chans[i]; in ldma_dma_init_v3X()
1436 vchan_init(&c->vchan, &d->dma_dev); in ldma_dma_init_v3X()
1439 static int ldma_init_v22(struct ldma_dev *d, struct platform_device *pdev) in ldma_init_v22() argument
1443 ret = device_property_read_u32(d->dev, "dma-channels", &d->chan_nrs); in ldma_init_v22()
1445 dev_err(d->dev, "unable to read dma-channels property\n"); in ldma_init_v22()
1449 d->irq = platform_get_irq(pdev, 0); in ldma_init_v22()
1450 if (d->irq < 0) in ldma_init_v22()
1451 return d->irq; in ldma_init_v22()
1453 ret = devm_request_irq(&pdev->dev, d->irq, dma_interrupt, 0, in ldma_init_v22()
1454 DRIVER_NAME, d); in ldma_init_v22()
1458 d->wq = alloc_ordered_workqueue("dma_wq", WQ_MEM_RECLAIM | in ldma_init_v22()
1460 if (!d->wq) in ldma_init_v22()
1468 struct ldma_dev *d = data; in ldma_clk_disable() local
1470 clk_disable_unprepare(d->core_clk); in ldma_clk_disable()
1471 reset_control_assert(d->rst); in ldma_clk_disable()
1571 struct ldma_dev *d; in intel_ldma_probe() local
1575 d = devm_kzalloc(dev, sizeof(*d), GFP_KERNEL); in intel_ldma_probe()
1576 if (!d) in intel_ldma_probe()
1580 d->dev = &pdev->dev; in intel_ldma_probe()
1582 d->inst = device_get_match_data(dev); in intel_ldma_probe()
1583 if (!d->inst) { in intel_ldma_probe()
1588 d->base = devm_platform_ioremap_resource(pdev, 0); in intel_ldma_probe()
1589 if (IS_ERR(d->base)) in intel_ldma_probe()
1590 return PTR_ERR(d->base); in intel_ldma_probe()
1593 d->core_clk = devm_clk_get_optional(dev, NULL); in intel_ldma_probe()
1594 if (IS_ERR(d->core_clk)) in intel_ldma_probe()
1595 return PTR_ERR(d->core_clk); in intel_ldma_probe()
1597 d->rst = devm_reset_control_get_optional(dev, NULL); in intel_ldma_probe()
1598 if (IS_ERR(d->rst)) in intel_ldma_probe()
1599 return PTR_ERR(d->rst); in intel_ldma_probe()
1601 clk_prepare_enable(d->core_clk); in intel_ldma_probe()
1602 reset_control_deassert(d->rst); in intel_ldma_probe()
1604 ret = devm_add_action_or_reset(dev, ldma_clk_disable, d); in intel_ldma_probe()
1606 dev_err(dev, "Failed to devm_add_action_or_reset, %d\n", ret); in intel_ldma_probe()
1610 id = readl(d->base + DMA_ID); in intel_ldma_probe()
1611 d->chan_nrs = FIELD_GET(DMA_ID_CHNR, id); in intel_ldma_probe()
1612 d->port_nrs = FIELD_GET(DMA_ID_PNR, id); in intel_ldma_probe()
1613 d->ver = FIELD_GET(DMA_ID_REV, id); in intel_ldma_probe()
1616 d->flags |= DMA_ADDR_36BIT; in intel_ldma_probe()
1622 d->flags |= DMA_DATA_128BIT; in intel_ldma_probe()
1630 if (d->ver == DMA_VER22) { in intel_ldma_probe()
1631 ret = ldma_init_v22(d, pdev); in intel_ldma_probe()
1636 ret = device_property_read_u32(dev, "dma-channel-mask", &d->channels_mask); in intel_ldma_probe()
1638 d->channels_mask = GENMASK(d->chan_nrs - 1, 0); in intel_ldma_probe()
1640 dma_dev = &d->dma_dev; in intel_ldma_probe()
1649 d->ports = devm_kcalloc(dev, d->port_nrs, sizeof(*p), GFP_KERNEL); in intel_ldma_probe()
1650 if (!d->ports) in intel_ldma_probe()
1654 d->chans = devm_kcalloc(d->dev, d->chan_nrs, sizeof(*c), GFP_KERNEL); in intel_ldma_probe()
1655 if (!d->chans) in intel_ldma_probe()
1658 for (i = 0; i < d->port_nrs; i++) { in intel_ldma_probe()
1659 p = &d->ports[i]; in intel_ldma_probe()
1661 p->ldev = d; in intel_ldma_probe()
1666 ch_mask = (unsigned long)d->channels_mask; in intel_ldma_probe()
1667 for_each_set_bit(j, &ch_mask, d->chan_nrs) { in intel_ldma_probe()
1668 if (d->ver == DMA_VER22) in intel_ldma_probe()
1669 ldma_dma_init_v22(j, d); in intel_ldma_probe()
1671 ldma_dma_init_v3X(j, d); in intel_ldma_probe()
1674 ret = ldma_parse_dt(d); in intel_ldma_probe()
1687 if (d->ver == DMA_VER22) { in intel_ldma_probe()
1698 platform_set_drvdata(pdev, d); in intel_ldma_probe()
1700 ldma_dev_init(d); in intel_ldma_probe()
1708 ret = of_dma_controller_register(pdev->dev.of_node, ldma_xlate, d); in intel_ldma_probe()
1715 dev_info(dev, "Init done - rev: %x, ports: %d channels: %d\n", d->ver, in intel_ldma_probe()
1716 d->port_nrs, d->chan_nrs); in intel_ldma_probe()