Lines Matching full:mt76
14 if (mtk_wed_device_active(&dev->mt76.mmio.wed)) { in mt7915_init_tx_queues()
15 if (is_mt798x(&dev->mt76)) in mt7915_init_tx_queues()
21 wed = &dev->mt76.mmio.wed; in mt7915_init_tx_queues()
24 return mt76_connac_init_tx_queues(phy->mt76, idx, n_desc, ring_base, in mt7915_init_tx_queues()
32 dev = container_of(napi, struct mt7915_dev, mt76.tx_napi); in mt7915_poll_tx()
34 mt76_connac_tx_cleanup(&dev->mt76); in mt7915_poll_tx()
54 if (is_mt7915(&dev->mt76)) { in mt7915_dma_config()
87 if (is_mt7916(&dev->mt76) && mtk_wed_device_active(&dev->mt76.mmio.wed)) { in mt7915_dma_config()
139 if (!is_mt7915(&dev->mt76)) { in __mt7915_dma_prefetch()
154 if (is_mt7915(&dev->mt76)) { in __mt7915_dma_prefetch()
174 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_disable()
257 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_start()
303 if (!dev->phy.mt76->band_idx) in mt7915_dma_start()
306 if (dev->dbdc_support || dev->phy.mt76->band_idx) in mt7915_dma_start()
309 if (mtk_wed_device_active(&dev->mt76.mmio.wed) && wed_reset) { in mt7915_dma_start()
314 if (!is_mt798x(&dev->mt76)) in mt7915_dma_start()
323 mtk_wed_device_start(&dev->mt76.mmio.wed, wed_irq_mask); in mt7915_dma_start()
336 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_enable()
410 struct mt76_dev *mdev = &dev->mt76; in mt7915_dma_init()
417 mt76_dma_attach(&dev->mt76); in mt7915_dma_init()
445 MT_TXQ_ID(dev->phy.mt76->band_idx), in mt7915_dma_init()
453 MT_TXQ_ID(phy2->mt76->band_idx), in mt7915_dma_init()
461 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WM, in mt7915_dma_init()
469 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_WA, in mt7915_dma_init()
477 ret = mt76_init_mcu_queue(&dev->mt76, MT_MCUQ_FWDL, in mt7915_dma_init()
485 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU], in mt7915_dma_init()
503 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MCU_WA], in mt7915_dma_init()
510 if (!dev->phy.mt76->band_idx) { in mt7915_dma_init()
515 dev->mt76.rx_token_size += MT7915_RX_RING_SIZE; in mt7915_dma_init()
519 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN], in mt7915_dma_init()
542 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_MAIN_WA], in mt7915_dma_init()
549 if (dev->dbdc_support || dev->phy.mt76->band_idx) { in mt7915_dma_init()
554 dev->mt76.rx_token_size += MT7915_RX_RING_SIZE; in mt7915_dma_init()
559 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1], in mt7915_dma_init()
568 ret = mt76_queue_alloc(dev, &dev->mt76.q_rx[MT_RXQ_BAND1_WA], in mt7915_dma_init()
581 netif_napi_add_tx(dev->mt76.tx_napi_dev, &dev->mt76.tx_napi, in mt7915_dma_init()
583 napi_enable(&dev->mt76.tx_napi); in mt7915_dma_init()
592 struct mt76_phy *mphy_ext = dev->mt76.phys[MT_BAND1]; in mt7915_dma_reset()
593 struct mtk_wed_device *wed = &dev->mt76.mmio.wed; in mt7915_dma_reset()
597 for (i = 0; i < ARRAY_SIZE(dev->mt76.phy.q_tx); i++) { in mt7915_dma_reset()
603 for (i = 0; i < ARRAY_SIZE(dev->mt76.q_mcu); i++) in mt7915_dma_reset()
604 mt76_queue_tx_cleanup(dev, dev->mt76.q_mcu[i], true); in mt7915_dma_reset()
606 mt76_for_each_q_rx(&dev->mt76, i) in mt7915_dma_reset()
607 mt76_queue_rx_cleanup(dev, &dev->mt76.q_rx[i]); in mt7915_dma_reset()
617 mt76_wed_dma_reset(&dev->mt76); in mt7915_dma_reset()
621 mt76_dma_reset_tx_queue(&dev->mt76, dev->mphy.q_tx[i]); in mt7915_dma_reset()
623 mt76_dma_reset_tx_queue(&dev->mt76, mphy_ext->q_tx[i]); in mt7915_dma_reset()
627 mt76_queue_reset(dev, dev->mt76.q_mcu[i]); in mt7915_dma_reset()
629 mt76_for_each_q_rx(&dev->mt76, i) { in mt7915_dma_reset()
630 if (mt76_queue_is_wed_tx_free(&dev->mt76.q_rx[i])) in mt7915_dma_reset()
633 mt76_queue_reset(dev, &dev->mt76.q_rx[i]); in mt7915_dma_reset()
636 mt76_tx_status_check(&dev->mt76, true); in mt7915_dma_reset()
638 mt76_for_each_q_rx(&dev->mt76, i) in mt7915_dma_reset()
641 if (mtk_wed_device_active(wed) && is_mt7915(&dev->mt76)) in mt7915_dma_reset()
654 mt76_dma_cleanup(&dev->mt76); in mt7915_dma_cleanup()