Lines Matching full:mt76

20 		if (phy->mt76->band_idx == MT_BAND2)  in mt7996_init_tx_queues()
26 return mt76_connac_init_tx_queues(phy->mt76, idx, n_desc, in mt7996_init_tx_queues()
34 dev = container_of(napi, struct mt7996_dev, mt76.tx_napi); in mt7996_poll_tx()
36 mt76_connac_tx_cleanup(&dev->mt76); in mt7996_poll_tx()
64 if (is_mt7996(&dev->mt76)) { in mt7996_dma_config()
99 if (is_mt7996(&dev->mt76)) { in mt7996_dma_config()
138 queue = is_mt7996(&dev->mt76) ? MT_RXQ_BAND2_WA : MT_RXQ_BAND1_WA; in __mt7996_dma_prefetch()
143 queue = is_mt7996(&dev->mt76) ? MT_RXQ_BAND2 : MT_RXQ_BAND1; in __mt7996_dma_prefetch()
221 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7996_dma_start()
363 if (mtk_wed_device_active(&dev->mt76.mmio.wed) && in mt7996_dma_enable()
378 struct mt76_dev *mdev = &dev->mt76; in mt7996_dma_rro_init()
444 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7996_dma_init()
445 struct mtk_wed_device *wed_hif2 = &dev->mt76.mmio.wed_hif2; in mt7996_dma_init()
452 mt76_dma_attach(&dev->mt76); in mt7996_dma_init()
469 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, in mt7996_dma_init()
477 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA, in mt7996_dma_init()
485 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, in mt7996_dma_init()
493 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU], in mt7996_dma_init()
502 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA], in mt7996_dma_init()
512 dev->mt76.q_rx[MT_RXQ_MAIN].flags = MT_WED_Q_RX(0); in mt7996_dma_init()
513 dev->mt76.q_rx[MT_RXQ_MAIN].wed = wed; in mt7996_dma_init()
516 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN], in mt7996_dma_init()
526 dev->mt76.q_rx[MT_RXQ_MAIN_WA].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
527 dev->mt76.q_rx[MT_RXQ_MAIN_WA].wed = wed; in mt7996_dma_init()
530 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA], in mt7996_dma_init()
541 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND2], in mt7996_dma_init()
553 dev->mt76.q_rx[MT_RXQ_BAND2_WA].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
554 dev->mt76.q_rx[MT_RXQ_BAND2_WA].wed = wed_hif2; in mt7996_dma_init()
557 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND2_WA], in mt7996_dma_init()
567 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1], in mt7996_dma_init()
577 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1_WA], in mt7996_dma_init()
589 dev->mt76.q_rx[MT_RXQ_RRO_BAND0].flags = in mt7996_dma_init()
591 dev->mt76.q_rx[MT_RXQ_RRO_BAND0].wed = wed; in mt7996_dma_init()
592 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_RRO_BAND0], in mt7996_dma_init()
601 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
602 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0].wed = wed; in mt7996_dma_init()
604 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND0], in mt7996_dma_init()
614 dev->mt76.q_rx[MT_RXQ_RRO_BAND2].flags = in mt7996_dma_init()
616 dev->mt76.q_rx[MT_RXQ_RRO_BAND2].wed = wed; in mt7996_dma_init()
617 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_RRO_BAND2], in mt7996_dma_init()
627 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2].flags = MT_WED_Q_TXFREE; in mt7996_dma_init()
628 dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2].wed = wed_hif2; in mt7996_dma_init()
630 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_TXFREE_BAND2], in mt7996_dma_init()
644 netif_napi_add_tx(dev->mt76.tx_napi_dev, &dev->mt76.tx_napi, in mt7996_dma_init()
646 napi_enable(&dev->mt76.tx_napi); in mt7996_dma_init()
655 struct mt76_phy *phy2 = dev->mt76.phys[MT_BAND1]; in mt7996_dma_reset()
656 struct mt76_phy *phy3 = dev->mt76.phys[MT_BAND2]; in mt7996_dma_reset()
680 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true); in mt7996_dma_reset()
682 mt76_for_each_q_rx(&dev->mt76, i) in mt7996_dma_reset()
683 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]); in mt7996_dma_reset()
685 mt76_tx_status_check(&dev->mt76, true); in mt7996_dma_reset()
691 if (dev->hif2 && mtk_wed_device_active(&dev->mt76.mmio.wed_hif2)) in mt7996_dma_reset()
692 mtk_wed_device_dma_reset(&dev->mt76.mmio.wed_hif2); in mt7996_dma_reset()
694 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7996_dma_reset()
695 mtk_wed_device_dma_reset(&dev->mt76.mmio.wed); in mt7996_dma_reset()
698 mt76_wed_dma_reset(&dev->mt76); in mt7996_dma_reset()
702 mt76_dma_reset_tx_queue(&dev->mt76, dev->mphy.q_tx[i]); in mt7996_dma_reset()
704 mt76_dma_reset_tx_queue(&dev->mt76, phy2->q_tx[i]); in mt7996_dma_reset()
706 mt76_dma_reset_tx_queue(&dev->mt76, phy3->q_tx[i]); in mt7996_dma_reset()
710 mt76_queue_reset(dev, dev->mt76.q_mcu[i]); in mt7996_dma_reset()
712 mt76_for_each_q_rx(&dev->mt76, i) { in mt7996_dma_reset()
713 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) in mt7996_dma_reset()
714 if (mt76_queue_is_wed_rro(&dev->mt76.q_rx[i]) || in mt7996_dma_reset()
715 mt76_queue_is_wed_tx_free(&dev->mt76.q_rx[i])) in mt7996_dma_reset()
718 mt76_queue_reset(dev, &dev->mt76.q_rx[i]); in mt7996_dma_reset()
721 mt76_tx_status_check(&dev->mt76, true); in mt7996_dma_reset()
723 mt76_for_each_q_rx(&dev->mt76, i) in mt7996_dma_reset()
733 mt76_dma_cleanup(&dev->mt76); in mt7996_dma_cleanup()