Lines Matching +full:dma +full:- +full:poll +full:- +full:cnt
1 // SPDX-License-Identifier: GPL-2.0 OR BSD-3-Clause
25 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_get_phy_offset_by_link_speed()
26 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_get_phy_offset_by_link_speed()
41 return -EFAULT; in rtw89_pci_get_phy_offset_by_link_speed()
65 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_dma_recalc()
66 u32 cnt, cur_rp, wp, rp, len; in rtw89_pci_dma_recalc() local
68 rp = bd_ring->rp; in rtw89_pci_dma_recalc()
69 wp = bd_ring->wp; in rtw89_pci_dma_recalc()
70 len = bd_ring->len; in rtw89_pci_dma_recalc()
74 cnt = cur_rp >= rp ? cur_rp - rp : len - (rp - cur_rp); in rtw89_pci_dma_recalc()
76 if (info->rx_ring_eq_is_full) in rtw89_pci_dma_recalc()
79 cnt = cur_rp >= wp ? cur_rp - wp : len - (wp - cur_rp); in rtw89_pci_dma_recalc()
82 bd_ring->rp = cur_rp; in rtw89_pci_dma_recalc()
84 return cnt; in rtw89_pci_dma_recalc()
90 struct rtw89_pci_dma_ring *bd_ring = &tx_ring->bd_ring; in rtw89_pci_txbd_recalc()
91 u32 addr_idx = bd_ring->addr.idx; in rtw89_pci_txbd_recalc()
92 u32 cnt, idx; in rtw89_pci_txbd_recalc() local
95 cnt = rtw89_pci_dma_recalc(rtwdev, bd_ring, idx, true); in rtw89_pci_txbd_recalc()
97 return cnt; in rtw89_pci_txbd_recalc()
102 u32 cnt, bool release_all) in rtw89_pci_release_fwcmd() argument
108 while (cnt--) { in rtw89_pci_release_fwcmd()
109 skb = skb_dequeue(&rtwpci->h2c_queue); in rtw89_pci_release_fwcmd()
111 rtw89_err(rtwdev, "failed to pre-release fwcmd\n"); in rtw89_pci_release_fwcmd()
114 skb_queue_tail(&rtwpci->h2c_release_queue, skb); in rtw89_pci_release_fwcmd()
117 qlen = skb_queue_len(&rtwpci->h2c_release_queue); in rtw89_pci_release_fwcmd()
119 qlen = qlen > RTW89_PCI_MULTITAG ? qlen - RTW89_PCI_MULTITAG : 0; in rtw89_pci_release_fwcmd()
121 while (qlen--) { in rtw89_pci_release_fwcmd()
122 skb = skb_dequeue(&rtwpci->h2c_release_queue); in rtw89_pci_release_fwcmd()
128 dma_unmap_single(&rtwpci->pdev->dev, tx_data->dma, skb->len, in rtw89_pci_release_fwcmd()
137 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[RTW89_TXCH_CH12]; in rtw89_pci_reclaim_tx_fwcmd()
138 u32 cnt; in rtw89_pci_reclaim_tx_fwcmd() local
140 cnt = rtw89_pci_txbd_recalc(rtwdev, tx_ring); in rtw89_pci_reclaim_tx_fwcmd()
141 if (!cnt) in rtw89_pci_reclaim_tx_fwcmd()
143 rtw89_pci_release_fwcmd(rtwdev, rtwpci, cnt, false); in rtw89_pci_reclaim_tx_fwcmd()
149 struct rtw89_pci_dma_ring *bd_ring = &rx_ring->bd_ring; in rtw89_pci_rxbd_recalc()
150 u32 addr_idx = bd_ring->addr.idx; in rtw89_pci_rxbd_recalc()
151 u32 cnt, idx; in rtw89_pci_rxbd_recalc() local
154 cnt = rtw89_pci_dma_recalc(rtwdev, bd_ring, idx, false); in rtw89_pci_rxbd_recalc()
156 return cnt; in rtw89_pci_rxbd_recalc()
163 dma_addr_t dma; in rtw89_pci_sync_skb_for_cpu() local
166 dma = rx_info->dma; in rtw89_pci_sync_skb_for_cpu()
167 dma_sync_single_for_cpu(rtwdev->dev, dma, RTW89_PCI_RX_BUF_SIZE, in rtw89_pci_sync_skb_for_cpu()
175 dma_addr_t dma; in rtw89_pci_sync_skb_for_device() local
178 dma = rx_info->dma; in rtw89_pci_sync_skb_for_device()
179 dma_sync_single_for_device(rtwdev->dev, dma, RTW89_PCI_RX_BUF_SIZE, in rtw89_pci_sync_skb_for_device()
190 rxbd_info = (struct rtw89_pci_rxbd_info *)skb->data; in rtw89_pci_rxbd_info_update()
191 info = rxbd_info->dword; in rtw89_pci_rxbd_info_update()
193 rx_info->fs = le32_get_bits(info, RTW89_PCI_RXBD_FS); in rtw89_pci_rxbd_info_update()
194 rx_info->ls = le32_get_bits(info, RTW89_PCI_RXBD_LS); in rtw89_pci_rxbd_info_update()
195 rx_info->len = le32_get_bits(info, RTW89_PCI_RXBD_WRITE_SIZE); in rtw89_pci_rxbd_info_update()
196 rx_info->tag = le32_get_bits(info, RTW89_PCI_RXBD_TAG); in rtw89_pci_rxbd_info_update()
204 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_validate_rx_tag()
207 if (!info->check_rx_tag) in rtw89_pci_validate_rx_tag()
211 if (rx_ring->target_rx_tag == 0) in rtw89_pci_validate_rx_tag()
214 target_rx_tag = rx_ring->target_rx_tag; in rtw89_pci_validate_rx_tag()
216 if (rx_info->tag != target_rx_tag) { in rtw89_pci_validate_rx_tag()
218 rx_info->tag, target_rx_tag); in rtw89_pci_validate_rx_tag()
219 return -EAGAIN; in rtw89_pci_validate_rx_tag()
239 if (ret != -EAGAIN) in rtw89_pci_sync_skb_for_device_and_validate_rx_info()
241 } while (rx_tag_retry--); in rtw89_pci_sync_skb_for_device_and_validate_rx_info()
244 rx_ring->target_rx_tag = rx_info->tag + 1; in rtw89_pci_sync_skb_for_device_and_validate_rx_info()
251 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ctrl_txdma_ch_ax()
252 const struct rtw89_reg_def *dma_stop1 = &info->dma_stop1; in rtw89_pci_ctrl_txdma_ch_ax()
253 const struct rtw89_reg_def *dma_stop2 = &info->dma_stop2; in rtw89_pci_ctrl_txdma_ch_ax()
256 rtw89_write32_clr(rtwdev, dma_stop1->addr, dma_stop1->mask); in rtw89_pci_ctrl_txdma_ch_ax()
257 if (dma_stop2->addr) in rtw89_pci_ctrl_txdma_ch_ax()
258 rtw89_write32_clr(rtwdev, dma_stop2->addr, dma_stop2->mask); in rtw89_pci_ctrl_txdma_ch_ax()
260 rtw89_write32_set(rtwdev, dma_stop1->addr, dma_stop1->mask); in rtw89_pci_ctrl_txdma_ch_ax()
261 if (dma_stop2->addr) in rtw89_pci_ctrl_txdma_ch_ax()
262 rtw89_write32_set(rtwdev, dma_stop2->addr, dma_stop2->mask); in rtw89_pci_ctrl_txdma_ch_ax()
268 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ctrl_txdma_fw_ch_ax()
269 const struct rtw89_reg_def *dma_stop1 = &info->dma_stop1; in rtw89_pci_ctrl_txdma_fw_ch_ax()
272 rtw89_write32_clr(rtwdev, dma_stop1->addr, B_AX_STOP_CH12); in rtw89_pci_ctrl_txdma_fw_ch_ax()
274 rtw89_write32_set(rtwdev, dma_stop1->addr, B_AX_STOP_CH12); in rtw89_pci_ctrl_txdma_fw_ch_ax()
284 u32 copy_len = rx_info->len - offset; in rtw89_skb_put_rx_data()
289 rx_info->len, desc_info->pkt_size, offset, fs, ls); in rtw89_skb_put_rx_data()
291 skb->data, rx_info->len); in rtw89_skb_put_rx_data()
292 /* length of a single segment skb is desc_info->pkt_size */ in rtw89_skb_put_rx_data()
294 copy_len = desc_info->pkt_size; in rtw89_skb_put_rx_data()
301 skb_put_data(new, skb->data + offset, copy_len); in rtw89_skb_put_rx_data()
309 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_get_rx_skb_idx()
310 u32 wp = bd_ring->wp; in rtw89_pci_get_rx_skb_idx()
312 if (!info->rx_ring_eq_is_full) in rtw89_pci_get_rx_skb_idx()
315 if (++wp >= bd_ring->len) in rtw89_pci_get_rx_skb_idx()
324 struct rtw89_rx_desc_info *desc_info = &rx_ring->diliver_desc; in rtw89_pci_rxbd_deliver_skbs()
325 struct rtw89_pci_dma_ring *bd_ring = &rx_ring->bd_ring; in rtw89_pci_rxbd_deliver_skbs()
326 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_rxbd_deliver_skbs()
327 struct sk_buff *new = rx_ring->diliver_skb; in rtw89_pci_rxbd_deliver_skbs()
333 u32 cnt = 1; in rtw89_pci_rxbd_deliver_skbs() local
338 skb = rx_ring->buf[skb_idx]; in rtw89_pci_rxbd_deliver_skbs()
343 bd_ring->wp, ret); in rtw89_pci_rxbd_deliver_skbs()
348 fs = info->no_rxbd_fs ? !new : rx_info->fs; in rtw89_pci_rxbd_deliver_skbs()
349 ls = rx_info->ls; in rtw89_pci_rxbd_deliver_skbs()
353 "unexpected fs/ls=%d/%d tag=%u len=%u new->len=%u\n", in rtw89_pci_rxbd_deliver_skbs()
354 fs, ls, rx_info->tag, rx_info->len, new ? new->len : 0); in rtw89_pci_rxbd_deliver_skbs()
362 if (desc_info->ready) { in rtw89_pci_rxbd_deliver_skbs()
367 rtw89_chip_query_rxdesc(rtwdev, desc_info, skb->data, rxinfo_size); in rtw89_pci_rxbd_deliver_skbs()
369 new = rtw89_alloc_skb_for_rx(rtwdev, desc_info->pkt_size); in rtw89_pci_rxbd_deliver_skbs()
373 rx_ring->diliver_skb = new; in rtw89_pci_rxbd_deliver_skbs()
376 offset = desc_info->offset + desc_info->rxd_len; in rtw89_pci_rxbd_deliver_skbs()
389 if (!desc_info->ready) { in rtw89_pci_rxbd_deliver_skbs()
395 rx_ring->diliver_skb = NULL; in rtw89_pci_rxbd_deliver_skbs()
396 desc_info->ready = false; in rtw89_pci_rxbd_deliver_skbs()
399 return cnt; in rtw89_pci_rxbd_deliver_skbs()
407 rx_ring->diliver_skb = NULL; in rtw89_pci_rxbd_deliver_skbs()
408 desc_info->ready = false; in rtw89_pci_rxbd_deliver_skbs()
410 return cnt; in rtw89_pci_rxbd_deliver_skbs()
415 u32 cnt) in rtw89_pci_rxbd_deliver() argument
417 struct rtw89_pci_dma_ring *bd_ring = &rx_ring->bd_ring; in rtw89_pci_rxbd_deliver()
420 while (cnt && rtwdev->napi_budget_countdown > 0) { in rtw89_pci_rxbd_deliver()
426 rtw89_pci_rxbd_increase(rx_ring, cnt); in rtw89_pci_rxbd_deliver()
430 cnt -= rx_cnt; in rtw89_pci_rxbd_deliver()
433 rtw89_write16(rtwdev, bd_ring->addr.idx, bd_ring->wp); in rtw89_pci_rxbd_deliver()
440 int countdown = rtwdev->napi_budget_countdown; in rtw89_pci_poll_rxq_dma()
441 u32 cnt; in rtw89_pci_poll_rxq_dma() local
443 rx_ring = &rtwpci->rx_rings[RTW89_RXCH_RXQ]; in rtw89_pci_poll_rxq_dma()
445 cnt = rtw89_pci_rxbd_recalc(rtwdev, rx_ring); in rtw89_pci_poll_rxq_dma()
446 if (!cnt) in rtw89_pci_poll_rxq_dma()
449 cnt = min_t(u32, budget, cnt); in rtw89_pci_poll_rxq_dma()
451 rtw89_pci_rxbd_deliver(rtwdev, rx_ring, cnt); in rtw89_pci_poll_rxq_dma()
454 if (rtwdev->napi_budget_countdown <= 0) in rtw89_pci_poll_rxq_dma()
457 return budget - countdown; in rtw89_pci_poll_rxq_dma()
472 if (info->flags & IEEE80211_TX_CTL_NO_ACK) in rtw89_pci_tx_status()
473 info->flags |= IEEE80211_TX_STAT_NOACK_TRANSMITTED; in rtw89_pci_tx_status()
475 info->flags |= IEEE80211_TX_STAT_ACK; in rtw89_pci_tx_status()
476 tx_ring->tx_acked++; in rtw89_pci_tx_status()
478 if (info->flags & IEEE80211_TX_CTL_REQ_TX_STATUS) in rtw89_pci_tx_status()
483 tx_ring->tx_retry_lmt++; in rtw89_pci_tx_status()
486 tx_ring->tx_life_time++; in rtw89_pci_tx_status()
489 tx_ring->tx_mac_id_drop++; in rtw89_pci_tx_status()
497 ieee80211_tx_status_ni(rtwdev->hw, skb); in rtw89_pci_tx_status()
503 u32 cnt; in rtw89_pci_reclaim_txbd() local
505 cnt = rtw89_pci_txbd_recalc(rtwdev, tx_ring); in rtw89_pci_reclaim_txbd()
506 while (cnt--) { in rtw89_pci_reclaim_txbd()
507 txwd = list_first_entry_or_null(&tx_ring->busy_pages, struct rtw89_pci_tx_wd, list); in rtw89_pci_reclaim_txbd()
513 list_del_init(&txwd->list); in rtw89_pci_reclaim_txbd()
516 if (skb_queue_len(&txwd->queue) == 0) in rtw89_pci_reclaim_txbd()
524 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in rtw89_pci_release_busy_txwd()
528 for (i = 0; i < wd_ring->page_num; i++) { in rtw89_pci_release_busy_txwd()
529 txwd = list_first_entry_or_null(&tx_ring->busy_pages, struct rtw89_pci_tx_wd, list); in rtw89_pci_release_busy_txwd()
533 list_del_init(&txwd->list); in rtw89_pci_release_busy_txwd()
542 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_release_txwd_skb()
545 u8 txch = tx_ring->txch; in rtw89_pci_release_txwd_skb()
547 if (!list_empty(&txwd->list)) { in rtw89_pci_release_txwd_skb()
552 if (!rtwpci->low_power && !list_empty(&txwd->list)) in rtw89_pci_release_txwd_skb()
557 skb_queue_walk_safe(&txwd->queue, skb, tmp) { in rtw89_pci_release_txwd_skb()
558 skb_unlink(skb, &txwd->queue); in rtw89_pci_release_txwd_skb()
561 dma_unmap_single(&rtwpci->pdev->dev, tx_data->dma, skb->len, in rtw89_pci_release_txwd_skb()
567 if (list_empty(&txwd->list)) in rtw89_pci_release_txwd_skb()
574 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_release_rpp()
581 seq = le32_get_bits(rpp->dword, RTW89_PCI_RPP_SEQ); in rtw89_pci_release_rpp()
582 qsel = le32_get_bits(rpp->dword, RTW89_PCI_RPP_QSEL); in rtw89_pci_release_rpp()
583 tx_status = le32_get_bits(rpp->dword, RTW89_PCI_RPP_TX_STATUS); in rtw89_pci_release_rpp()
591 tx_ring = &rtwpci->tx_rings[txch]; in rtw89_pci_release_rpp()
592 wd_ring = &tx_ring->wd_ring; in rtw89_pci_release_rpp()
593 txwd = &wd_ring->pages[seq]; in rtw89_pci_release_rpp()
601 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in rtw89_pci_release_pending_txwd_skb()
605 for (i = 0; i < wd_ring->page_num; i++) { in rtw89_pci_release_pending_txwd_skb()
606 txwd = &wd_ring->pages[i]; in rtw89_pci_release_pending_txwd_skb()
608 if (!list_empty(&txwd->list)) in rtw89_pci_release_pending_txwd_skb()
619 struct rtw89_pci_dma_ring *bd_ring = &rx_ring->bd_ring; in rtw89_pci_release_tx_skbs()
624 u32 cnt = 0; in rtw89_pci_release_tx_skbs() local
632 skb = rx_ring->buf[skb_idx]; in rtw89_pci_release_tx_skbs()
637 bd_ring->wp, ret); in rtw89_pci_release_tx_skbs()
642 if (!rx_info->fs || !rx_info->ls) { in rtw89_pci_release_tx_skbs()
644 return cnt; in rtw89_pci_release_tx_skbs()
647 rtw89_chip_query_rxdesc(rtwdev, &desc_info, skb->data, rxinfo_size); in rtw89_pci_release_tx_skbs()
651 for (; offset + rpp_size <= rx_info->len; offset += rpp_size) { in rtw89_pci_release_tx_skbs()
652 rpp = (struct rtw89_pci_rpp_fmt *)(skb->data + offset); in rtw89_pci_release_tx_skbs()
658 cnt++; in rtw89_pci_release_tx_skbs()
660 return cnt; in rtw89_pci_release_tx_skbs()
669 u32 cnt) in rtw89_pci_release_tx() argument
671 struct rtw89_pci_dma_ring *bd_ring = &rx_ring->bd_ring; in rtw89_pci_release_tx()
674 while (cnt) { in rtw89_pci_release_tx()
675 release_cnt = rtw89_pci_release_tx_skbs(rtwdev, rx_ring, cnt); in rtw89_pci_release_tx()
680 rtw89_pci_rxbd_increase(rx_ring, cnt); in rtw89_pci_release_tx()
684 cnt -= release_cnt; in rtw89_pci_release_tx()
687 rtw89_write16(rtwdev, bd_ring->addr.idx, bd_ring->wp); in rtw89_pci_release_tx()
694 u32 cnt; in rtw89_pci_poll_rpq_dma() local
697 rx_ring = &rtwpci->rx_rings[RTW89_RXCH_RPQ]; in rtw89_pci_poll_rpq_dma()
699 spin_lock_bh(&rtwpci->trx_lock); in rtw89_pci_poll_rpq_dma()
701 cnt = rtw89_pci_rxbd_recalc(rtwdev, rx_ring); in rtw89_pci_poll_rpq_dma()
702 if (cnt == 0) in rtw89_pci_poll_rpq_dma()
705 rtw89_pci_release_tx(rtwdev, rx_ring, cnt); in rtw89_pci_poll_rpq_dma()
708 spin_unlock_bh(&rtwpci->trx_lock); in rtw89_pci_poll_rpq_dma()
711 work_done = min_t(int, cnt, budget); in rtw89_pci_poll_rpq_dma()
712 rtwdev->napi_budget_countdown -= work_done; in rtw89_pci_poll_rpq_dma()
727 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_isr_rxd_unavail()
728 bd_ring = &rx_ring->bd_ring; in rtw89_pci_isr_rxd_unavail()
730 reg_idx = rtw89_read32(rtwdev, bd_ring->addr.idx); in rtw89_pci_isr_rxd_unavail()
733 hw_idx_next = (hw_idx + 1) % bd_ring->len; in rtw89_pci_isr_rxd_unavail()
740 i, reg_idx, bd_ring->len); in rtw89_pci_isr_rxd_unavail()
748 isrs->halt_c2h_isrs = rtw89_read32(rtwdev, R_AX_HISR0) & rtwpci->halt_c2h_intrs; in rtw89_pci_recognize_intrs()
749 isrs->isrs[0] = rtw89_read32(rtwdev, R_AX_PCIE_HISR00) & rtwpci->intrs[0]; in rtw89_pci_recognize_intrs()
750 isrs->isrs[1] = rtw89_read32(rtwdev, R_AX_PCIE_HISR10) & rtwpci->intrs[1]; in rtw89_pci_recognize_intrs()
752 rtw89_write32(rtwdev, R_AX_HISR0, isrs->halt_c2h_isrs); in rtw89_pci_recognize_intrs()
753 rtw89_write32(rtwdev, R_AX_PCIE_HISR00, isrs->isrs[0]); in rtw89_pci_recognize_intrs()
754 rtw89_write32(rtwdev, R_AX_PCIE_HISR10, isrs->isrs[1]); in rtw89_pci_recognize_intrs()
762 isrs->ind_isrs = rtw89_read32(rtwdev, R_AX_PCIE_HISR00_V1) & rtwpci->ind_intrs; in rtw89_pci_recognize_intrs_v1()
763 isrs->halt_c2h_isrs = isrs->ind_isrs & B_AX_HS0ISR_IND_INT_EN ? in rtw89_pci_recognize_intrs_v1()
764 rtw89_read32(rtwdev, R_AX_HISR0) & rtwpci->halt_c2h_intrs : 0; in rtw89_pci_recognize_intrs_v1()
765 isrs->isrs[0] = isrs->ind_isrs & B_AX_HCI_AXIDMA_INT_EN ? in rtw89_pci_recognize_intrs_v1()
766 rtw89_read32(rtwdev, R_AX_HAXI_HISR00) & rtwpci->intrs[0] : 0; in rtw89_pci_recognize_intrs_v1()
767 isrs->isrs[1] = isrs->ind_isrs & B_AX_HS1ISR_IND_INT_EN ? in rtw89_pci_recognize_intrs_v1()
768 rtw89_read32(rtwdev, R_AX_HISR1) & rtwpci->intrs[1] : 0; in rtw89_pci_recognize_intrs_v1()
770 if (isrs->halt_c2h_isrs) in rtw89_pci_recognize_intrs_v1()
771 rtw89_write32(rtwdev, R_AX_HISR0, isrs->halt_c2h_isrs); in rtw89_pci_recognize_intrs_v1()
772 if (isrs->isrs[0]) in rtw89_pci_recognize_intrs_v1()
773 rtw89_write32(rtwdev, R_AX_HAXI_HISR00, isrs->isrs[0]); in rtw89_pci_recognize_intrs_v1()
774 if (isrs->isrs[1]) in rtw89_pci_recognize_intrs_v1()
775 rtw89_write32(rtwdev, R_AX_HISR1, isrs->isrs[1]); in rtw89_pci_recognize_intrs_v1()
783 isrs->ind_isrs = rtw89_read32(rtwdev, R_BE_PCIE_HISR) & rtwpci->ind_intrs; in rtw89_pci_recognize_intrs_v2()
784 isrs->halt_c2h_isrs = isrs->ind_isrs & B_BE_HS0ISR_IND_INT ? in rtw89_pci_recognize_intrs_v2()
785 rtw89_read32(rtwdev, R_BE_HISR0) & rtwpci->halt_c2h_intrs : 0; in rtw89_pci_recognize_intrs_v2()
786 isrs->isrs[0] = isrs->ind_isrs & B_BE_HCI_AXIDMA_INT ? in rtw89_pci_recognize_intrs_v2()
787 rtw89_read32(rtwdev, R_BE_HAXI_HISR00) & rtwpci->intrs[0] : 0; in rtw89_pci_recognize_intrs_v2()
788 isrs->isrs[1] = rtw89_read32(rtwdev, R_BE_PCIE_DMA_ISR) & rtwpci->intrs[1]; in rtw89_pci_recognize_intrs_v2()
790 if (isrs->halt_c2h_isrs) in rtw89_pci_recognize_intrs_v2()
791 rtw89_write32(rtwdev, R_BE_HISR0, isrs->halt_c2h_isrs); in rtw89_pci_recognize_intrs_v2()
792 if (isrs->isrs[0]) in rtw89_pci_recognize_intrs_v2()
793 rtw89_write32(rtwdev, R_BE_HAXI_HISR00, isrs->isrs[0]); in rtw89_pci_recognize_intrs_v2()
794 if (isrs->isrs[1]) in rtw89_pci_recognize_intrs_v2()
795 rtw89_write32(rtwdev, R_BE_PCIE_DMA_ISR, isrs->isrs[1]); in rtw89_pci_recognize_intrs_v2()
796 rtw89_write32(rtwdev, R_BE_PCIE_HISR, isrs->ind_isrs); in rtw89_pci_recognize_intrs_v2()
802 rtw89_write32(rtwdev, R_AX_HIMR0, rtwpci->halt_c2h_intrs); in rtw89_pci_enable_intr()
803 rtw89_write32(rtwdev, R_AX_PCIE_HIMR00, rtwpci->intrs[0]); in rtw89_pci_enable_intr()
804 rtw89_write32(rtwdev, R_AX_PCIE_HIMR10, rtwpci->intrs[1]); in rtw89_pci_enable_intr()
818 rtw89_write32(rtwdev, R_AX_PCIE_HIMR00_V1, rtwpci->ind_intrs); in rtw89_pci_enable_intr_v1()
819 rtw89_write32(rtwdev, R_AX_HIMR0, rtwpci->halt_c2h_intrs); in rtw89_pci_enable_intr_v1()
820 rtw89_write32(rtwdev, R_AX_HAXI_HIMR00, rtwpci->intrs[0]); in rtw89_pci_enable_intr_v1()
821 rtw89_write32(rtwdev, R_AX_HIMR1, rtwpci->intrs[1]); in rtw89_pci_enable_intr_v1()
833 rtw89_write32(rtwdev, R_BE_HIMR0, rtwpci->halt_c2h_intrs); in rtw89_pci_enable_intr_v2()
834 rtw89_write32(rtwdev, R_BE_HAXI_HIMR00, rtwpci->intrs[0]); in rtw89_pci_enable_intr_v2()
835 rtw89_write32(rtwdev, R_BE_PCIE_DMA_IMR_0_V1, rtwpci->intrs[1]); in rtw89_pci_enable_intr_v2()
836 rtw89_write32(rtwdev, R_BE_PCIE_HIMR0, rtwpci->ind_intrs); in rtw89_pci_enable_intr_v2()
849 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_recovery_start()
852 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_ops_recovery_start()
856 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_ops_recovery_start()
861 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_recovery_complete()
864 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_ops_recovery_complete()
868 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_ops_recovery_complete()
873 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_low_power_interrupt_handler()
877 rtwdev->napi_budget_countdown = budget; in rtw89_pci_low_power_interrupt_handler()
886 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_interrupt_threadfn()
887 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_interrupt_threadfn()
888 const struct rtw89_pci_gen_def *gen_def = info->gen_def; in rtw89_pci_interrupt_threadfn()
892 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_threadfn()
894 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_threadfn()
896 if (unlikely(isrs.isrs[0] & gen_def->isr_rdu)) in rtw89_pci_interrupt_threadfn()
899 if (unlikely(isrs.halt_c2h_isrs & gen_def->isr_halt_c2h)) in rtw89_pci_interrupt_threadfn()
902 if (unlikely(isrs.halt_c2h_isrs & gen_def->isr_wdt_timeout)) in rtw89_pci_interrupt_threadfn()
905 if (unlikely(rtwpci->under_recovery)) in rtw89_pci_interrupt_threadfn()
908 if (unlikely(rtwpci->low_power)) { in rtw89_pci_interrupt_threadfn()
913 if (likely(rtwpci->running)) { in rtw89_pci_interrupt_threadfn()
915 napi_schedule(&rtwdev->napi); in rtw89_pci_interrupt_threadfn()
922 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_threadfn()
923 if (likely(rtwpci->running)) in rtw89_pci_interrupt_threadfn()
925 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_threadfn()
932 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_interrupt_handler()
936 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_handler()
941 if (unlikely(!rtwpci->running)) { in rtw89_pci_interrupt_handler()
948 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_interrupt_handler()
1065 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_get_txch_addrs()
1068 return -EINVAL; in rtw89_pci_get_txch_addrs()
1070 *addr = &info->dma_addr_set->tx[txch]; in rtw89_pci_get_txch_addrs()
1079 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_get_rxch_addrs()
1082 return -EINVAL; in rtw89_pci_get_rxch_addrs()
1084 *addr = &info->dma_addr_set->rx[rxch]; in rtw89_pci_get_rxch_addrs()
1091 struct rtw89_pci_dma_ring *bd_ring = &ring->bd_ring; in rtw89_pci_get_avail_txbd_num()
1094 if (bd_ring->rp > bd_ring->wp) in rtw89_pci_get_avail_txbd_num()
1095 return bd_ring->rp - bd_ring->wp - 1; in rtw89_pci_get_avail_txbd_num()
1097 return bd_ring->len - (bd_ring->wp - bd_ring->rp) - 1; in rtw89_pci_get_avail_txbd_num()
1103 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource()
1104 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[RTW89_TXCH_CH12]; in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource()
1105 u32 cnt; in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource() local
1107 spin_lock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource()
1109 cnt = rtw89_pci_get_avail_txbd_num(tx_ring); in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource()
1110 spin_unlock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource()
1112 return cnt; in __rtw89_pci_check_and_reclaim_tx_fwcmd_resource()
1119 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1120 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[txch]; in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1121 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1122 u32 cnt; in __rtw89_pci_check_and_reclaim_tx_resource_noio() local
1124 spin_lock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1125 cnt = rtw89_pci_get_avail_txbd_num(tx_ring); in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1127 cnt = min(cnt, wd_ring->curr_num); in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1128 spin_unlock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1130 return cnt; in __rtw89_pci_check_and_reclaim_tx_resource_noio()
1136 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in __rtw89_pci_check_and_reclaim_tx_resource()
1137 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[txch]; in __rtw89_pci_check_and_reclaim_tx_resource()
1138 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in __rtw89_pci_check_and_reclaim_tx_resource()
1139 const struct rtw89_chip_info *chip = rtwdev->chip; in __rtw89_pci_check_and_reclaim_tx_resource()
1143 u32 cnt; in __rtw89_pci_check_and_reclaim_tx_resource() local
1145 rx_ring = &rtwpci->rx_rings[RTW89_RXCH_RPQ]; in __rtw89_pci_check_and_reclaim_tx_resource()
1147 spin_lock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_resource()
1149 wd_cnt = wd_ring->curr_num; in __rtw89_pci_check_and_reclaim_tx_resource()
1152 cnt = rtw89_pci_rxbd_recalc(rtwdev, rx_ring); in __rtw89_pci_check_and_reclaim_tx_resource()
1153 if (cnt) in __rtw89_pci_check_and_reclaim_tx_resource()
1154 rtw89_pci_release_tx(rtwdev, rx_ring, cnt); in __rtw89_pci_check_and_reclaim_tx_resource()
1164 wd_cnt = wd_ring->curr_num; in __rtw89_pci_check_and_reclaim_tx_resource()
1171 if (rtwpci->low_power || chip->small_fifo_size) in __rtw89_pci_check_and_reclaim_tx_resource()
1182 spin_unlock_bh(&rtwpci->trx_lock); in __rtw89_pci_check_and_reclaim_tx_resource()
1190 if (rtwdev->hci.paused) in rtw89_pci_check_and_reclaim_tx_resource()
1201 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in __rtw89_pci_tx_kick_off()
1202 struct rtw89_pci_dma_ring *bd_ring = &tx_ring->bd_ring; in __rtw89_pci_tx_kick_off()
1205 spin_lock_bh(&rtwpci->trx_lock); in __rtw89_pci_tx_kick_off()
1207 addr = bd_ring->addr.idx; in __rtw89_pci_tx_kick_off()
1208 host_idx = bd_ring->wp; in __rtw89_pci_tx_kick_off()
1211 spin_unlock_bh(&rtwpci->trx_lock); in __rtw89_pci_tx_kick_off()
1217 struct rtw89_pci_dma_ring *bd_ring = &tx_ring->bd_ring; in rtw89_pci_tx_bd_ring_update()
1220 len = bd_ring->len; in rtw89_pci_tx_bd_ring_update()
1221 host_idx = bd_ring->wp + n_txbd; in rtw89_pci_tx_bd_ring_update()
1222 host_idx = host_idx < len ? host_idx : host_idx - len; in rtw89_pci_tx_bd_ring_update()
1224 bd_ring->wp = host_idx; in rtw89_pci_tx_bd_ring_update()
1229 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_tx_kick_off()
1230 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[txch]; in rtw89_pci_ops_tx_kick_off()
1232 if (rtwdev->hci.paused) { in rtw89_pci_ops_tx_kick_off()
1233 set_bit(txch, rtwpci->kick_map); in rtw89_pci_ops_tx_kick_off()
1242 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_tx_kick_off_pending()
1247 if (!test_and_clear_bit(txch, rtwpci->kick_map)) in rtw89_pci_tx_kick_off_pending()
1250 tx_ring = &rtwpci->tx_rings[txch]; in rtw89_pci_tx_kick_off_pending()
1257 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in __pci_flush_txch()
1258 struct rtw89_pci_tx_ring *tx_ring = &rtwpci->tx_rings[txch]; in __pci_flush_txch()
1259 struct rtw89_pci_dma_ring *bd_ring = &tx_ring->bd_ring; in __pci_flush_txch()
1269 cur_idx = rtw89_read32(rtwdev, bd_ring->addr.idx); in __pci_flush_txch()
1271 if (cur_rp == bd_ring->wp) in __pci_flush_txch()
1284 const struct rtw89_pci_info *info = rtwdev->pci_info; in __rtw89_pci_ops_flush_txchs()
1291 if (info->tx_dma_ch_mask & BIT(i)) in __rtw89_pci_ops_flush_txchs()
1302 __rtw89_pci_ops_flush_txchs(rtwdev, BIT(RTW89_TXCH_NUM) - 1, drop); in rtw89_pci_ops_flush_queues()
1307 dma_addr_t dma, u8 *add_info_nr) in rtw89_pci_fill_txaddr_info() argument
1312 txaddr_info->length = cpu_to_le16(total_len); in rtw89_pci_fill_txaddr_info()
1314 option |= le16_encode_bits(upper_32_bits(dma), RTW89_PCI_ADDR_HIGH_MASK); in rtw89_pci_fill_txaddr_info()
1315 txaddr_info->option = option; in rtw89_pci_fill_txaddr_info()
1316 txaddr_info->dma = cpu_to_le32(dma); in rtw89_pci_fill_txaddr_info()
1326 dma_addr_t dma, u8 *add_info_nr) in rtw89_pci_fill_txaddr_info_v1() argument
1337 remain -= len; in rtw89_pci_fill_txaddr_info_v1()
1342 length_option |= u16_encode_bits(upper_32_bits(dma), in rtw89_pci_fill_txaddr_info_v1()
1344 txaddr_info->length_opt = cpu_to_le16(length_option); in rtw89_pci_fill_txaddr_info_v1()
1345 txaddr_info->dma_low_lsb = cpu_to_le16(FIELD_GET(GENMASK(15, 0), dma)); in rtw89_pci_fill_txaddr_info_v1()
1346 txaddr_info->dma_low_msb = cpu_to_le16(FIELD_GET(GENMASK(31, 16), dma)); in rtw89_pci_fill_txaddr_info_v1()
1348 dma += len; in rtw89_pci_fill_txaddr_info_v1()
1366 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_txwd_submit()
1367 const struct rtw89_chip_info *chip = rtwdev->chip; in rtw89_pci_txwd_submit()
1368 struct rtw89_tx_desc_info *desc_info = &tx_req->desc_info; in rtw89_pci_txwd_submit()
1371 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_txwd_submit()
1372 struct sk_buff *skb = tx_req->skb; in rtw89_pci_txwd_submit()
1375 bool en_wd_info = desc_info->en_wd_info; in rtw89_pci_txwd_submit()
1379 dma_addr_t dma; in rtw89_pci_txwd_submit() local
1382 dma = dma_map_single(&pdev->dev, skb->data, skb->len, DMA_TO_DEVICE); in rtw89_pci_txwd_submit()
1383 if (dma_mapping_error(&pdev->dev, dma)) { in rtw89_pci_txwd_submit()
1384 rtw89_err(rtwdev, "failed to map skb dma data\n"); in rtw89_pci_txwd_submit()
1385 ret = -EBUSY; in rtw89_pci_txwd_submit()
1389 tx_data->dma = dma; in rtw89_pci_txwd_submit()
1390 rcu_assign_pointer(skb_data->wait, NULL); in rtw89_pci_txwd_submit()
1393 txwd_len = chip->txwd_body_size; in rtw89_pci_txwd_submit()
1394 txwd_len += en_wd_info ? chip->txwd_info_size : 0; in rtw89_pci_txwd_submit()
1396 txwp_info = txwd->vaddr + txwd_len; in rtw89_pci_txwd_submit()
1397 txwp_info->seq0 = cpu_to_le16(txwd->seq | RTW89_PCI_TXWP_VALID); in rtw89_pci_txwd_submit()
1398 txwp_info->seq1 = 0; in rtw89_pci_txwd_submit()
1399 txwp_info->seq2 = 0; in rtw89_pci_txwd_submit()
1400 txwp_info->seq3 = 0; in rtw89_pci_txwd_submit()
1402 tx_ring->tx_cnt++; in rtw89_pci_txwd_submit()
1403 txaddr_info_addr = txwd->vaddr + txwd_len + txwp_len; in rtw89_pci_txwd_submit()
1405 rtw89_chip_fill_txaddr_info(rtwdev, txaddr_info_addr, skb->len, in rtw89_pci_txwd_submit()
1406 dma, &desc_info->addr_info_nr); in rtw89_pci_txwd_submit()
1408 txwd->len = txwd_len + txwp_len + txaddr_info_len; in rtw89_pci_txwd_submit()
1410 rtw89_chip_fill_txdesc(rtwdev, desc_info, txwd->vaddr); in rtw89_pci_txwd_submit()
1412 skb_queue_tail(&txwd->queue, skb); in rtw89_pci_txwd_submit()
1425 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_fwcmd_submit()
1426 const struct rtw89_chip_info *chip = rtwdev->chip; in rtw89_pci_fwcmd_submit()
1427 struct rtw89_tx_desc_info *desc_info = &tx_req->desc_info; in rtw89_pci_fwcmd_submit()
1429 int txdesc_size = chip->h2c_desc_size; in rtw89_pci_fwcmd_submit()
1430 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_fwcmd_submit()
1431 struct sk_buff *skb = tx_req->skb; in rtw89_pci_fwcmd_submit()
1433 dma_addr_t dma; in rtw89_pci_fwcmd_submit() local
1440 dma = dma_map_single(&pdev->dev, skb->data, skb->len, DMA_TO_DEVICE); in rtw89_pci_fwcmd_submit()
1441 if (dma_mapping_error(&pdev->dev, dma)) { in rtw89_pci_fwcmd_submit()
1442 rtw89_err(rtwdev, "failed to map fwcmd dma data\n"); in rtw89_pci_fwcmd_submit()
1443 return -EBUSY; in rtw89_pci_fwcmd_submit()
1446 tx_data->dma = dma; in rtw89_pci_fwcmd_submit()
1448 opt |= le16_encode_bits(upper_32_bits(dma), RTW89_PCI_TXBD_OPT_DMA_HI); in rtw89_pci_fwcmd_submit()
1449 txbd->opt = opt; in rtw89_pci_fwcmd_submit()
1450 txbd->length = cpu_to_le16(skb->len); in rtw89_pci_fwcmd_submit()
1451 txbd->dma = cpu_to_le32(tx_data->dma); in rtw89_pci_fwcmd_submit()
1452 skb_queue_tail(&rtwpci->h2c_queue, skb); in rtw89_pci_fwcmd_submit()
1472 if (tx_ring->txch == RTW89_TXCH_CH12) in rtw89_pci_txbd_submit()
1478 ret = -ENOSPC; in rtw89_pci_txbd_submit()
1484 rtw89_err(rtwdev, "failed to submit TXWD %d\n", txwd->seq); in rtw89_pci_txbd_submit()
1488 list_add_tail(&txwd->list, &tx_ring->busy_pages); in rtw89_pci_txbd_submit()
1491 opt |= le16_encode_bits(upper_32_bits(txwd->paddr), RTW89_PCI_TXBD_OPT_DMA_HI); in rtw89_pci_txbd_submit()
1492 txbd->opt = opt; in rtw89_pci_txbd_submit()
1493 txbd->length = cpu_to_le16(txwd->len); in rtw89_pci_txbd_submit()
1494 txbd->dma = cpu_to_le32(txwd->paddr); in rtw89_pci_txbd_submit()
1509 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_tx_write()
1515 /* check the tx type and dma channel for fw cmd queue */ in rtw89_pci_tx_write()
1517 tx_req->tx_type == RTW89_CORE_TX_TYPE_FWCMD) && in rtw89_pci_tx_write()
1519 tx_req->tx_type != RTW89_CORE_TX_TYPE_FWCMD)) { in rtw89_pci_tx_write()
1520 rtw89_err(rtwdev, "only fw cmd uses dma channel 12\n"); in rtw89_pci_tx_write()
1521 return -EINVAL; in rtw89_pci_tx_write()
1524 tx_ring = &rtwpci->tx_rings[txch]; in rtw89_pci_tx_write()
1525 spin_lock_bh(&rtwpci->trx_lock); in rtw89_pci_tx_write()
1530 ret = -ENOSPC; in rtw89_pci_tx_write()
1541 spin_unlock_bh(&rtwpci->trx_lock); in rtw89_pci_tx_write()
1545 spin_unlock_bh(&rtwpci->trx_lock); in rtw89_pci_tx_write()
1551 struct rtw89_tx_desc_info *desc_info = &tx_req->desc_info; in rtw89_pci_ops_tx_write()
1554 ret = rtw89_pci_tx_write(rtwdev, tx_req, desc_info->ch_dma); in rtw89_pci_ops_tx_write()
1556 rtw89_err(rtwdev, "failed to TX Queue %d\n", desc_info->ch_dma); in rtw89_pci_ops_tx_write()
1593 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_init_wp_16sel()
1594 u32 addr = info->wp_sel_addr; in rtw89_pci_init_wp_16sel()
1598 if (!info->wp_sel_addr) in rtw89_pci_init_wp_16sel()
1612 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_reset_trx_rings()
1613 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_reset_trx_rings()
1614 const struct rtw89_pci_bd_ram *bd_ram_table = *info->bd_ram_table; in rtw89_pci_reset_trx_rings()
1627 if (info->tx_dma_ch_mask & BIT(i)) in rtw89_pci_reset_trx_rings()
1630 tx_ring = &rtwpci->tx_rings[i]; in rtw89_pci_reset_trx_rings()
1631 bd_ring = &tx_ring->bd_ring; in rtw89_pci_reset_trx_rings()
1633 addr_num = bd_ring->addr.num; in rtw89_pci_reset_trx_rings()
1634 addr_bdram = bd_ring->addr.bdram; in rtw89_pci_reset_trx_rings()
1635 addr_desa_l = bd_ring->addr.desa_l; in rtw89_pci_reset_trx_rings()
1636 bd_ring->wp = 0; in rtw89_pci_reset_trx_rings()
1637 bd_ring->rp = 0; in rtw89_pci_reset_trx_rings()
1639 rtw89_write16(rtwdev, addr_num, bd_ring->len); in rtw89_pci_reset_trx_rings()
1641 val32 = FIELD_PREP(BDRAM_SIDX_MASK, bd_ram->start_idx) | in rtw89_pci_reset_trx_rings()
1642 FIELD_PREP(BDRAM_MAX_MASK, bd_ram->max_num) | in rtw89_pci_reset_trx_rings()
1643 FIELD_PREP(BDRAM_MIN_MASK, bd_ram->min_num); in rtw89_pci_reset_trx_rings()
1647 rtw89_write32(rtwdev, addr_desa_l, bd_ring->dma); in rtw89_pci_reset_trx_rings()
1648 rtw89_write32(rtwdev, addr_desa_l + 4, upper_32_bits(bd_ring->dma)); in rtw89_pci_reset_trx_rings()
1652 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_reset_trx_rings()
1653 bd_ring = &rx_ring->bd_ring; in rtw89_pci_reset_trx_rings()
1654 addr_num = bd_ring->addr.num; in rtw89_pci_reset_trx_rings()
1655 addr_idx = bd_ring->addr.idx; in rtw89_pci_reset_trx_rings()
1656 addr_desa_l = bd_ring->addr.desa_l; in rtw89_pci_reset_trx_rings()
1657 if (info->rx_ring_eq_is_full) in rtw89_pci_reset_trx_rings()
1658 bd_ring->wp = bd_ring->len - 1; in rtw89_pci_reset_trx_rings()
1660 bd_ring->wp = 0; in rtw89_pci_reset_trx_rings()
1661 bd_ring->rp = 0; in rtw89_pci_reset_trx_rings()
1662 rx_ring->diliver_skb = NULL; in rtw89_pci_reset_trx_rings()
1663 rx_ring->diliver_desc.ready = false; in rtw89_pci_reset_trx_rings()
1664 rx_ring->target_rx_tag = 0; in rtw89_pci_reset_trx_rings()
1666 rtw89_write16(rtwdev, addr_num, bd_ring->len); in rtw89_pci_reset_trx_rings()
1667 rtw89_write32(rtwdev, addr_desa_l, bd_ring->dma); in rtw89_pci_reset_trx_rings()
1668 rtw89_write32(rtwdev, addr_desa_l + 4, upper_32_bits(bd_ring->dma)); in rtw89_pci_reset_trx_rings()
1670 if (info->rx_ring_eq_is_full) in rtw89_pci_reset_trx_rings()
1671 rtw89_write16(rtwdev, addr_idx, bd_ring->wp); in rtw89_pci_reset_trx_rings()
1686 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_reset()
1687 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ops_reset()
1692 spin_lock_bh(&rtwpci->trx_lock); in rtw89_pci_ops_reset()
1694 if (info->tx_dma_ch_mask & BIT(txch)) in rtw89_pci_ops_reset()
1698 skb_queue_len(&rtwpci->h2c_queue), true); in rtw89_pci_ops_reset()
1701 rtw89_pci_release_tx_ring(rtwdev, &rtwpci->tx_rings[txch]); in rtw89_pci_ops_reset()
1703 spin_unlock_bh(&rtwpci->trx_lock); in rtw89_pci_ops_reset()
1708 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_enable_intr_lock()
1711 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_enable_intr_lock()
1712 rtwpci->running = true; in rtw89_pci_enable_intr_lock()
1714 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_enable_intr_lock()
1719 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_disable_intr_lock()
1722 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_disable_intr_lock()
1723 rtwpci->running = false; in rtw89_pci_disable_intr_lock()
1725 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_disable_intr_lock()
1738 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_stop()
1739 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_ops_stop()
1742 synchronize_irq(pdev->irq); in rtw89_pci_ops_stop()
1748 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_pause()
1749 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_ops_pause()
1753 synchronize_irq(pdev->irq); in rtw89_pci_ops_pause()
1754 if (test_bit(RTW89_FLAG_NAPI_RUNNING, rtwdev->flags)) in rtw89_pci_ops_pause()
1755 napi_synchronize(&rtwdev->napi); in rtw89_pci_ops_pause()
1765 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_switch_bd_idx_addr()
1766 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_switch_bd_idx_addr()
1767 const struct rtw89_pci_bd_idx_addr *bd_idx_addr = info->bd_idx_addr_low_power; in rtw89_pci_switch_bd_idx_addr()
1768 const struct rtw89_pci_ch_dma_addr_set *dma_addr_set = info->dma_addr_set; in rtw89_pci_switch_bd_idx_addr()
1777 tx_ring = &rtwpci->tx_rings[i]; in rtw89_pci_switch_bd_idx_addr()
1778 tx_ring->bd_ring.addr.idx = low_power ? in rtw89_pci_switch_bd_idx_addr()
1779 bd_idx_addr->tx_bd_addrs[i] : in rtw89_pci_switch_bd_idx_addr()
1780 dma_addr_set->tx[i].idx; in rtw89_pci_switch_bd_idx_addr()
1784 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_switch_bd_idx_addr()
1785 rx_ring->bd_ring.addr.idx = low_power ? in rtw89_pci_switch_bd_idx_addr()
1786 bd_idx_addr->rx_bd_addrs[i] : in rtw89_pci_switch_bd_idx_addr()
1787 dma_addr_set->rx[i].idx; in rtw89_pci_switch_bd_idx_addr()
1795 WARN(!rtwdev->hci.paused, "HCI isn't paused\n"); in rtw89_pci_ops_switch_mode()
1806 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_read32_cmac()
1807 u32 val = readl(rtwpci->mmap + addr); in rtw89_pci_ops_read32_cmac()
1818 val = readl(rtwpci->mmap + addr); in rtw89_pci_ops_read32_cmac()
1826 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_read8()
1830 return readb(rtwpci->mmap + addr); in rtw89_pci_ops_read8()
1840 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_read16()
1844 return readw(rtwpci->mmap + addr); in rtw89_pci_ops_read16()
1854 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_read32()
1857 return readl(rtwpci->mmap + addr); in rtw89_pci_ops_read32()
1864 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_write8()
1866 writeb(data, rtwpci->mmap + addr); in rtw89_pci_ops_write8()
1871 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_write16()
1873 writew(data, rtwpci->mmap + addr); in rtw89_pci_ops_write16()
1878 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_ops_write32()
1880 writel(data, rtwpci->mmap + addr); in rtw89_pci_ops_write32()
1885 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ctrl_dma_trx()
1888 rtw89_write32_set(rtwdev, info->init_cfg_reg, in rtw89_pci_ctrl_dma_trx()
1889 info->rxhci_en_bit | info->txhci_en_bit); in rtw89_pci_ctrl_dma_trx()
1891 rtw89_write32_clr(rtwdev, info->init_cfg_reg, in rtw89_pci_ctrl_dma_trx()
1892 info->rxhci_en_bit | info->txhci_en_bit); in rtw89_pci_ctrl_dma_trx()
1897 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ctrl_dma_io()
1898 const struct rtw89_reg_def *reg = &info->dma_io_stop; in rtw89_pci_ctrl_dma_io()
1901 rtw89_write32_clr(rtwdev, reg->addr, reg->mask); in rtw89_pci_ctrl_dma_io()
1903 rtw89_write32_set(rtwdev, reg->addr, reg->mask); in rtw89_pci_ctrl_dma_io()
1934 return -EINVAL; in rtw89_pci_check_mdio()
2075 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_write_config_byte()
2076 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_write_config_byte()
2077 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_write_config_byte()
2093 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_read_config_byte()
2094 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_read_config_byte()
2095 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_read_config_byte()
2172 return -EINVAL; in __get_target()
2216 return -EOPNOTSUPP; in rtw89_pci_auto_refclk_cal()
2267 mgn_set = tar * INTF_INTGRA_HOSTREF_V1 / INTF_INTGRA_MINREF_V1 - tar; in rtw89_pci_auto_refclk_cal()
2345 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_deglitch_setting()
2377 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_disable_eq_ax()
2441 if (!test_bit(RTW89_QUIRK_PCI_BER, rtwdev->quirks)) in rtw89_pci_ber()
2455 if (rtwdev->chip->chip_id != RTL8852A) in rtw89_pci_rxdma_prefth()
2463 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_l1off_pwroff()
2475 if (rtwdev->chip->chip_id != RTL8852A) in rtw89_pci_l2_rxen_lat()
2493 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_aphy_pwrcut()
2503 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_hci_ldo()
2510 } else if (rtwdev->chip->chip_id == RTL8852C) { in rtw89_pci_hci_ldo()
2535 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_autoload_hang()
2544 if (!(rtwdev->chip->chip_id == RTL8852C && rtwdev->hal.cv == CHIP_CAV)) in rtw89_pci_l12_vmain()
2552 if (!(rtwdev->chip->chip_id == RTL8852C && rtwdev->hal.cv == CHIP_CAV)) in rtw89_pci_gen2_force_ib()
2564 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_l1_ent_lat()
2572 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_wd_exit_l1()
2580 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_set_sic()
2589 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_set_lbc()
2592 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_set_lbc()
2596 if (info->lbc_en == MAC_AX_PCIE_ENABLE) { in rtw89_pci_set_lbc()
2597 lbc = u32_replace_bits(lbc, info->lbc_tmr, B_AX_LBC_TIMER); in rtw89_pci_set_lbc()
2608 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_set_io_rcy()
2611 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_set_io_rcy()
2614 if (info->io_rcy_en == MAC_AX_PCIE_ENABLE) { in rtw89_pci_set_io_rcy()
2616 info->io_rcy_tmr); in rtw89_pci_set_io_rcy()
2635 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_set_dbg()
2641 if (rtwdev->chip->chip_id == RTL8852A) in rtw89_pci_set_dbg()
2648 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_set_keep_reg()
2657 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_clr_idx_all_ax()
2658 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_clr_idx_all_ax()
2662 u32 rxbd_rwptr_clr = info->rxbd_rwptr_clr_reg; in rtw89_pci_clr_idx_all_ax()
2663 u32 txbd_rwptr_clr2 = info->txbd_rwptr_clr2_reg; in rtw89_pci_clr_idx_all_ax()
2668 /* clear DMA indexes */ in rtw89_pci_clr_idx_all_ax()
2679 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_poll_txdma_ch_idle_ax()
2680 u32 dma_busy1 = info->dma_busy1.addr; in rtw89_pci_poll_txdma_ch_idle_ax()
2681 u32 dma_busy2 = info->dma_busy2_reg; in rtw89_pci_poll_txdma_ch_idle_ax()
2685 check = info->dma_busy1.mask; in rtw89_pci_poll_txdma_ch_idle_ax()
2707 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_poll_rxdma_ch_idle_ax()
2708 u32 dma_busy3 = info->dma_busy3_reg; in rtw89_pci_poll_rxdma_ch_idle_ax()
2743 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_mode_op()
2744 enum mac_ax_bd_trunc_mode txbd_trunc_mode = info->txbd_trunc_mode; in rtw89_pci_mode_op()
2745 enum mac_ax_bd_trunc_mode rxbd_trunc_mode = info->rxbd_trunc_mode; in rtw89_pci_mode_op()
2746 enum mac_ax_rxbd_mode rxbd_mode = info->rxbd_mode; in rtw89_pci_mode_op()
2747 enum mac_ax_tag_mode tag_mode = info->tag_mode; in rtw89_pci_mode_op()
2748 enum mac_ax_wd_dma_intvl wd_dma_idle_intvl = info->wd_dma_idle_intvl; in rtw89_pci_mode_op()
2749 enum mac_ax_wd_dma_intvl wd_dma_act_intvl = info->wd_dma_act_intvl; in rtw89_pci_mode_op()
2750 enum mac_ax_tx_burst tx_burst = info->tx_burst; in rtw89_pci_mode_op()
2751 enum mac_ax_rx_burst rx_burst = info->rx_burst; in rtw89_pci_mode_op()
2752 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_mode_op()
2753 u8 cv = rtwdev->hal.cv; in rtw89_pci_mode_op()
2773 rtw89_write32_clr(rtwdev, info->init_cfg_reg, info->rxbd_mode_bit); in rtw89_pci_mode_op()
2775 rtw89_write32_set(rtwdev, info->init_cfg_reg, info->rxbd_mode_bit); in rtw89_pci_mode_op()
2802 rtw89_write32_mask(rtwdev, info->exp_ctrl_reg, info->max_tag_num_mask, in rtw89_pci_mode_op()
2803 info->multi_tag_num); in rtw89_pci_mode_op()
2832 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ops_deinit()
2836 if (rtwdev->chip->chip_id == RTL8852A) { in rtw89_pci_ops_deinit()
2840 info->ltr_set(rtwdev, false); in rtw89_pci_ops_deinit()
2849 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ops_mac_pre_init_ax()
2890 rtw89_write32_set(rtwdev, info->dma_stop1.addr, B_AX_STOP_WPDMA); in rtw89_pci_ops_mac_pre_init_ax()
2892 /* stop DMA activities */ in rtw89_pci_ops_mac_pre_init_ax()
2897 rtw89_err(rtwdev, "[ERR] poll pcie dma all idle\n"); in rtw89_pci_ops_mac_pre_init_ax()
2917 /* start DMA activities */ in rtw89_pci_ops_mac_pre_init_ax()
2939 return -EINVAL; in rtw89_pci_ltr_set()
2942 return -EINVAL; in rtw89_pci_ltr_set()
2945 return -EINVAL; in rtw89_pci_ltr_set()
2948 return -EINVAL; in rtw89_pci_ltr_set()
2972 return -EINVAL; in rtw89_pci_ltr_set_v1()
2975 return -EINVAL; in rtw89_pci_ltr_set_v1()
2978 return -EINVAL; in rtw89_pci_ltr_set_v1()
2981 return -EINVAL; in rtw89_pci_ltr_set_v1()
2984 return -EINVAL; in rtw89_pci_ltr_set_v1()
3014 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ops_mac_post_init_ax()
3015 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_ops_mac_post_init_ax()
3018 ret = info->ltr_set(rtwdev, true); in rtw89_pci_ops_mac_post_init_ax()
3028 /* ADDR info 8-byte mode */ in rtw89_pci_ops_mac_post_init_ax()
3034 /* enable DMA for all queues */ in rtw89_pci_ops_mac_post_init_ax()
3038 rtw89_write32_clr(rtwdev, info->dma_stop1.addr, in rtw89_pci_ops_mac_post_init_ax()
3047 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_claim_device()
3057 pci_set_drvdata(pdev, rtwdev->hw); in rtw89_pci_claim_device()
3059 rtwpci->pdev = pdev; in rtw89_pci_claim_device()
3072 const struct rtw89_chip_info *chip = rtwdev->chip; in rtw89_pci_chip_is_manual_dac()
3074 switch (chip->chip_id) { in rtw89_pci_chip_is_manual_dac()
3087 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_is_dac_compatible_bridge()
3088 struct pci_dev *bridge = pci_upstream_bridge(rtwpci->pdev); in rtw89_pci_is_dac_compatible_bridge()
3096 switch (bridge->vendor) { in rtw89_pci_is_dac_compatible_bridge()
3100 if (bridge->device == 0x2806) in rtw89_pci_is_dac_compatible_bridge()
3110 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_cfg_dac()
3112 if (!rtwpci->enable_dac) in rtw89_pci_cfg_dac()
3124 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_setup_mapping()
3138 ret = dma_set_mask_and_coherent(&pdev->dev, DMA_BIT_MASK(36)); in rtw89_pci_setup_mapping()
3140 rtwpci->enable_dac = true; in rtw89_pci_setup_mapping()
3143 ret = dma_set_mask_and_coherent(&pdev->dev, DMA_BIT_MASK(32)); in rtw89_pci_setup_mapping()
3146 "failed to set dma and consistent mask to 32/36-bit\n"); in rtw89_pci_setup_mapping()
3153 rtwpci->mmap = pci_iomap(pdev, bar_id, resource_len); in rtw89_pci_setup_mapping()
3154 if (!rtwpci->mmap) { in rtw89_pci_setup_mapping()
3156 ret = -EIO; in rtw89_pci_setup_mapping()
3171 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_clear_mapping()
3173 if (rtwpci->mmap) { in rtw89_pci_clear_mapping()
3174 pci_iounmap(pdev, rtwpci->mmap); in rtw89_pci_clear_mapping()
3183 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in rtw89_pci_free_tx_wd_ring()
3184 u8 *head = wd_ring->head; in rtw89_pci_free_tx_wd_ring()
3185 dma_addr_t dma = wd_ring->dma; in rtw89_pci_free_tx_wd_ring() local
3186 u32 page_size = wd_ring->page_size; in rtw89_pci_free_tx_wd_ring()
3187 u32 page_num = wd_ring->page_num; in rtw89_pci_free_tx_wd_ring()
3190 dma_free_coherent(&pdev->dev, ring_sz, head, dma); in rtw89_pci_free_tx_wd_ring()
3191 wd_ring->head = NULL; in rtw89_pci_free_tx_wd_ring()
3200 dma_addr_t dma; in rtw89_pci_free_tx_ring() local
3202 head = tx_ring->bd_ring.head; in rtw89_pci_free_tx_ring()
3203 dma = tx_ring->bd_ring.dma; in rtw89_pci_free_tx_ring()
3204 ring_sz = tx_ring->bd_ring.desc_size * tx_ring->bd_ring.len; in rtw89_pci_free_tx_ring()
3205 dma_free_coherent(&pdev->dev, ring_sz, head, dma); in rtw89_pci_free_tx_ring()
3207 tx_ring->bd_ring.head = NULL; in rtw89_pci_free_tx_ring()
3213 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_free_tx_rings()
3214 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_free_tx_rings()
3219 if (info->tx_dma_ch_mask & BIT(i)) in rtw89_pci_free_tx_rings()
3221 tx_ring = &rtwpci->tx_rings[i]; in rtw89_pci_free_tx_rings()
3233 dma_addr_t dma; in rtw89_pci_free_rx_ring() local
3236 int ring_sz = rx_ring->bd_ring.desc_size * rx_ring->bd_ring.len; in rtw89_pci_free_rx_ring()
3239 buf_sz = rx_ring->buf_sz; in rtw89_pci_free_rx_ring()
3240 for (i = 0; i < rx_ring->bd_ring.len; i++) { in rtw89_pci_free_rx_ring()
3241 skb = rx_ring->buf[i]; in rtw89_pci_free_rx_ring()
3246 dma = rx_info->dma; in rtw89_pci_free_rx_ring()
3247 dma_unmap_single(&pdev->dev, dma, buf_sz, DMA_FROM_DEVICE); in rtw89_pci_free_rx_ring()
3249 rx_ring->buf[i] = NULL; in rtw89_pci_free_rx_ring()
3252 head = rx_ring->bd_ring.head; in rtw89_pci_free_rx_ring()
3253 dma = rx_ring->bd_ring.dma; in rtw89_pci_free_rx_ring()
3254 dma_free_coherent(&pdev->dev, ring_sz, head, dma); in rtw89_pci_free_rx_ring()
3256 rx_ring->bd_ring.head = NULL; in rtw89_pci_free_rx_ring()
3262 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_free_rx_rings()
3267 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_free_rx_rings()
3285 dma_addr_t dma; in rtw89_pci_init_rx_bd() local
3288 return -EINVAL; in rtw89_pci_init_rx_bd()
3290 dma = dma_map_single(&pdev->dev, skb->data, buf_sz, DMA_FROM_DEVICE); in rtw89_pci_init_rx_bd()
3291 if (dma_mapping_error(&pdev->dev, dma)) in rtw89_pci_init_rx_bd()
3292 return -EBUSY; in rtw89_pci_init_rx_bd()
3298 rx_bd->buf_size = cpu_to_le16(buf_sz); in rtw89_pci_init_rx_bd()
3299 rx_bd->dma = cpu_to_le32(dma); in rtw89_pci_init_rx_bd()
3300 rx_bd->opt = le16_encode_bits(upper_32_bits(dma), RTW89_PCI_RXBD_OPT_DMA_HI); in rtw89_pci_init_rx_bd()
3301 rx_info->dma = dma; in rtw89_pci_init_rx_bd()
3311 struct rtw89_pci_tx_wd_ring *wd_ring = &tx_ring->wd_ring; in rtw89_pci_alloc_tx_wd_ring()
3313 dma_addr_t dma; in rtw89_pci_alloc_tx_wd_ring() local
3327 head = dma_alloc_coherent(&pdev->dev, ring_sz, &dma, GFP_KERNEL); in rtw89_pci_alloc_tx_wd_ring()
3329 return -ENOMEM; in rtw89_pci_alloc_tx_wd_ring()
3331 INIT_LIST_HEAD(&wd_ring->free_pages); in rtw89_pci_alloc_tx_wd_ring()
3332 wd_ring->head = head; in rtw89_pci_alloc_tx_wd_ring()
3333 wd_ring->dma = dma; in rtw89_pci_alloc_tx_wd_ring()
3334 wd_ring->page_size = page_size; in rtw89_pci_alloc_tx_wd_ring()
3335 wd_ring->page_num = page_num; in rtw89_pci_alloc_tx_wd_ring()
3339 txwd = &wd_ring->pages[i]; in rtw89_pci_alloc_tx_wd_ring()
3340 cur_paddr = dma + page_offset; in rtw89_pci_alloc_tx_wd_ring()
3343 skb_queue_head_init(&txwd->queue); in rtw89_pci_alloc_tx_wd_ring()
3344 INIT_LIST_HEAD(&txwd->list); in rtw89_pci_alloc_tx_wd_ring()
3345 txwd->paddr = cur_paddr; in rtw89_pci_alloc_tx_wd_ring()
3346 txwd->vaddr = cur_vaddr; in rtw89_pci_alloc_tx_wd_ring()
3347 txwd->len = page_size; in rtw89_pci_alloc_tx_wd_ring()
3348 txwd->seq = i; in rtw89_pci_alloc_tx_wd_ring()
3366 dma_addr_t dma; in rtw89_pci_alloc_tx_ring() local
3381 head = dma_alloc_coherent(&pdev->dev, ring_sz, &dma, GFP_KERNEL); in rtw89_pci_alloc_tx_ring()
3383 ret = -ENOMEM; in rtw89_pci_alloc_tx_ring()
3387 INIT_LIST_HEAD(&tx_ring->busy_pages); in rtw89_pci_alloc_tx_ring()
3388 tx_ring->bd_ring.head = head; in rtw89_pci_alloc_tx_ring()
3389 tx_ring->bd_ring.dma = dma; in rtw89_pci_alloc_tx_ring()
3390 tx_ring->bd_ring.len = len; in rtw89_pci_alloc_tx_ring()
3391 tx_ring->bd_ring.desc_size = desc_size; in rtw89_pci_alloc_tx_ring()
3392 tx_ring->bd_ring.addr = *txch_addr; in rtw89_pci_alloc_tx_ring()
3393 tx_ring->bd_ring.wp = 0; in rtw89_pci_alloc_tx_ring()
3394 tx_ring->bd_ring.rp = 0; in rtw89_pci_alloc_tx_ring()
3395 tx_ring->txch = txch; in rtw89_pci_alloc_tx_ring()
3408 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_alloc_tx_rings()
3409 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_alloc_tx_rings()
3417 if (info->tx_dma_ch_mask & BIT(i)) in rtw89_pci_alloc_tx_rings()
3419 tx_ring = &rtwpci->tx_rings[i]; in rtw89_pci_alloc_tx_rings()
3435 tx_ring = &rtwpci->tx_rings[i]; in rtw89_pci_alloc_tx_rings()
3447 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_alloc_rx_ring()
3451 dma_addr_t dma; in rtw89_pci_alloc_rx_ring() local
3463 head = dma_alloc_coherent(&pdev->dev, ring_sz, &dma, GFP_KERNEL); in rtw89_pci_alloc_rx_ring()
3465 ret = -ENOMEM; in rtw89_pci_alloc_rx_ring()
3469 rx_ring->bd_ring.head = head; in rtw89_pci_alloc_rx_ring()
3470 rx_ring->bd_ring.dma = dma; in rtw89_pci_alloc_rx_ring()
3471 rx_ring->bd_ring.len = len; in rtw89_pci_alloc_rx_ring()
3472 rx_ring->bd_ring.desc_size = desc_size; in rtw89_pci_alloc_rx_ring()
3473 rx_ring->bd_ring.addr = *rxch_addr; in rtw89_pci_alloc_rx_ring()
3474 if (info->rx_ring_eq_is_full) in rtw89_pci_alloc_rx_ring()
3475 rx_ring->bd_ring.wp = len - 1; in rtw89_pci_alloc_rx_ring()
3477 rx_ring->bd_ring.wp = 0; in rtw89_pci_alloc_rx_ring()
3478 rx_ring->bd_ring.rp = 0; in rtw89_pci_alloc_rx_ring()
3479 rx_ring->buf_sz = buf_sz; in rtw89_pci_alloc_rx_ring()
3480 rx_ring->diliver_skb = NULL; in rtw89_pci_alloc_rx_ring()
3481 rx_ring->diliver_desc.ready = false; in rtw89_pci_alloc_rx_ring()
3482 rx_ring->target_rx_tag = 0; in rtw89_pci_alloc_rx_ring()
3487 ret = -ENOMEM; in rtw89_pci_alloc_rx_ring()
3491 memset(skb->data, 0, buf_sz); in rtw89_pci_alloc_rx_ring()
3492 rx_ring->buf[i] = skb; in rtw89_pci_alloc_rx_ring()
3498 rx_ring->buf[i] = NULL; in rtw89_pci_alloc_rx_ring()
3508 skb = rx_ring->buf[i]; in rtw89_pci_alloc_rx_ring()
3511 dma = *((dma_addr_t *)skb->cb); in rtw89_pci_alloc_rx_ring()
3512 dma_unmap_single(&pdev->dev, dma, buf_sz, DMA_FROM_DEVICE); in rtw89_pci_alloc_rx_ring()
3514 rx_ring->buf[i] = NULL; in rtw89_pci_alloc_rx_ring()
3517 head = rx_ring->bd_ring.head; in rtw89_pci_alloc_rx_ring()
3518 dma = rx_ring->bd_ring.dma; in rtw89_pci_alloc_rx_ring()
3519 dma_free_coherent(&pdev->dev, ring_sz, head, dma); in rtw89_pci_alloc_rx_ring()
3521 rx_ring->bd_ring.head = NULL; in rtw89_pci_alloc_rx_ring()
3529 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_alloc_rx_rings()
3537 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_alloc_rx_rings()
3553 rx_ring = &rtwpci->rx_rings[i]; in rtw89_pci_alloc_rx_rings()
3567 rtw89_err(rtwdev, "failed to alloc dma tx rings\n"); in rtw89_pci_alloc_trx_rings()
3573 rtw89_err(rtwdev, "failed to alloc dma rx rings\n"); in rtw89_pci_alloc_trx_rings()
3588 skb_queue_head_init(&rtwpci->h2c_queue); in rtw89_pci_h2c_init()
3589 skb_queue_head_init(&rtwpci->h2c_release_queue); in rtw89_pci_h2c_init()
3595 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_setup_resource()
3612 spin_lock_init(&rtwpci->irq_lock); in rtw89_pci_setup_resource()
3613 spin_lock_init(&rtwpci->trx_lock); in rtw89_pci_setup_resource()
3626 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_clear_resource()
3631 skb_queue_len(&rtwpci->h2c_queue), true); in rtw89_pci_clear_resource()
3636 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_config_intr_mask()
3637 const struct rtw89_chip_info *chip = rtwdev->chip; in rtw89_pci_config_intr_mask()
3640 if (chip->chip_id == RTL8851B) in rtw89_pci_config_intr_mask()
3643 rtwpci->halt_c2h_intrs = B_AX_HALT_C2H_INT_EN | 0; in rtw89_pci_config_intr_mask()
3645 if (rtwpci->under_recovery) { in rtw89_pci_config_intr_mask()
3646 rtwpci->intrs[0] = hs0isr_ind_int_en; in rtw89_pci_config_intr_mask()
3647 rtwpci->intrs[1] = 0; in rtw89_pci_config_intr_mask()
3649 rtwpci->intrs[0] = B_AX_TXDMA_STUCK_INT_EN | in rtw89_pci_config_intr_mask()
3658 rtwpci->intrs[1] = B_AX_HC10ISR_IND_INT_EN; in rtw89_pci_config_intr_mask()
3665 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_recovery_intr_mask_v1()
3667 rtwpci->ind_intrs = B_AX_HS0ISR_IND_INT_EN; in rtw89_pci_recovery_intr_mask_v1()
3668 rtwpci->halt_c2h_intrs = B_AX_HALT_C2H_INT_EN | B_AX_WDT_TIMEOUT_INT_EN; in rtw89_pci_recovery_intr_mask_v1()
3669 rtwpci->intrs[0] = 0; in rtw89_pci_recovery_intr_mask_v1()
3670 rtwpci->intrs[1] = 0; in rtw89_pci_recovery_intr_mask_v1()
3675 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_default_intr_mask_v1()
3677 rtwpci->ind_intrs = B_AX_HCI_AXIDMA_INT_EN | in rtw89_pci_default_intr_mask_v1()
3680 rtwpci->halt_c2h_intrs = B_AX_HALT_C2H_INT_EN | B_AX_WDT_TIMEOUT_INT_EN; in rtw89_pci_default_intr_mask_v1()
3681 rtwpci->intrs[0] = B_AX_TXDMA_STUCK_INT_EN | in rtw89_pci_default_intr_mask_v1()
3688 rtwpci->intrs[1] = B_AX_GPIO18_INT_EN; in rtw89_pci_default_intr_mask_v1()
3693 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_low_power_intr_mask_v1()
3695 rtwpci->ind_intrs = B_AX_HS1ISR_IND_INT_EN | in rtw89_pci_low_power_intr_mask_v1()
3697 rtwpci->halt_c2h_intrs = B_AX_HALT_C2H_INT_EN | B_AX_WDT_TIMEOUT_INT_EN; in rtw89_pci_low_power_intr_mask_v1()
3698 rtwpci->intrs[0] = 0; in rtw89_pci_low_power_intr_mask_v1()
3699 rtwpci->intrs[1] = B_AX_GPIO18_INT_EN; in rtw89_pci_low_power_intr_mask_v1()
3704 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_config_intr_mask_v1()
3706 if (rtwpci->under_recovery) in rtw89_pci_config_intr_mask_v1()
3708 else if (rtwpci->low_power) in rtw89_pci_config_intr_mask_v1()
3717 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_recovery_intr_mask_v2()
3719 rtwpci->ind_intrs = B_BE_HS0_IND_INT_EN0; in rtw89_pci_recovery_intr_mask_v2()
3720 rtwpci->halt_c2h_intrs = B_BE_HALT_C2H_INT_EN | B_BE_WDT_TIMEOUT_INT_EN; in rtw89_pci_recovery_intr_mask_v2()
3721 rtwpci->intrs[0] = 0; in rtw89_pci_recovery_intr_mask_v2()
3722 rtwpci->intrs[1] = 0; in rtw89_pci_recovery_intr_mask_v2()
3727 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_default_intr_mask_v2()
3729 rtwpci->ind_intrs = B_BE_HCI_AXIDMA_INT_EN0 | in rtw89_pci_default_intr_mask_v2()
3731 rtwpci->halt_c2h_intrs = B_BE_HALT_C2H_INT_EN | B_BE_WDT_TIMEOUT_INT_EN; in rtw89_pci_default_intr_mask_v2()
3732 rtwpci->intrs[0] = B_BE_RDU_CH1_INT_IMR_V1 | in rtw89_pci_default_intr_mask_v2()
3734 rtwpci->intrs[1] = B_BE_PCIE_RX_RX0P2_IMR0_V1 | in rtw89_pci_default_intr_mask_v2()
3740 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_low_power_intr_mask_v2()
3742 rtwpci->ind_intrs = B_BE_HS0_IND_INT_EN0 | in rtw89_pci_low_power_intr_mask_v2()
3744 rtwpci->halt_c2h_intrs = B_BE_HALT_C2H_INT_EN | B_BE_WDT_TIMEOUT_INT_EN; in rtw89_pci_low_power_intr_mask_v2()
3745 rtwpci->intrs[0] = 0; in rtw89_pci_low_power_intr_mask_v2()
3746 rtwpci->intrs[1] = B_BE_PCIE_RX_RX0P2_IMR0_V1 | in rtw89_pci_low_power_intr_mask_v2()
3752 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_config_intr_mask_v2()
3754 if (rtwpci->under_recovery) in rtw89_pci_config_intr_mask_v2()
3756 else if (rtwpci->low_power) in rtw89_pci_config_intr_mask_v2()
3776 ret = devm_request_threaded_irq(rtwdev->dev, pdev->irq, in rtw89_pci_request_irq()
3798 devm_free_irq(rtwdev->dev, pdev->irq, rtwdev); in rtw89_pci_free_irq()
3816 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_filter_out()
3817 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_filter_out()
3822 if (rtwdev->chip->chip_id != RTL8852C) in rtw89_pci_filter_out()
3861 return -EOPNOTSUPP; in rtw89_pci_filter_out()
3871 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_clkreq_set()
3872 const struct rtw89_pci_gen_def *gen_def = info->gen_def; in rtw89_pci_clkreq_set()
3877 gen_def->clkreq_set(rtwdev, enable); in rtw89_pci_clkreq_set()
3882 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_clkreq_set_ax()
3916 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_aspm_set()
3917 const struct rtw89_pci_gen_def *gen_def = info->gen_def; in rtw89_pci_aspm_set()
3922 gen_def->aspm_set(rtwdev, enable); in rtw89_pci_aspm_set()
3927 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_aspm_set_ax()
3966 enum rtw89_chip_gen chip_gen = rtwdev->chip->chip_gen; in rtw89_pci_recalc_int_mit()
3967 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_recalc_int_mit()
3968 struct rtw89_traffic_stats *stats = &rtwdev->stats; in rtw89_pci_recalc_int_mit()
3969 enum rtw89_tfc_lv tx_tfc_lv = stats->tx_tfc_lv; in rtw89_pci_recalc_int_mit()
3970 enum rtw89_tfc_lv rx_tfc_lv = stats->rx_tfc_lv; in rtw89_pci_recalc_int_mit()
3973 if (rtwdev->scanning || in rtw89_pci_recalc_int_mit()
3986 rtw89_write32(rtwdev, info->mit_addr, val); in rtw89_pci_recalc_int_mit()
3991 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_link_cfg()
3992 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_link_cfg()
4005 * settings (ex. CLKREQ# not Bi-Direction), it could lead to device in rtw89_pci_link_cfg()
4027 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_l1ss_set()
4028 const struct rtw89_pci_gen_def *gen_def = info->gen_def; in rtw89_pci_l1ss_set()
4033 gen_def->l1ss_set(rtwdev, enable); in rtw89_pci_l1ss_set()
4038 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_l1ss_set_ax()
4070 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_l1ss_cfg()
4071 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_l1ss_cfg()
4089 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_cpl_timeout_cfg()
4090 struct pci_dev *pdev = rtwpci->pdev; in rtw89_pci_cpl_timeout_cfg()
4108 return -EINVAL; in rtw89_pci_poll_io_idle_ax()
4118 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_lv1rst_stop_dma_ax()
4147 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_lv1rst_start_dma_ax()
4165 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_ops_mac_lv1_recovery()
4166 const struct rtw89_pci_gen_def *gen_def = info->gen_def; in rtw89_pci_ops_mac_lv1_recovery()
4171 ret = gen_def->lv1rst_stop_dma(rtwdev); in rtw89_pci_ops_mac_lv1_recovery()
4173 rtw89_err(rtwdev, "lv1 rcvy pci stop dma fail\n"); in rtw89_pci_ops_mac_lv1_recovery()
4178 ret = gen_def->lv1rst_start_dma(rtwdev); in rtw89_pci_ops_mac_lv1_recovery()
4180 rtw89_err(rtwdev, "lv1 rcvy pci start dma fail\n"); in rtw89_pci_ops_mac_lv1_recovery()
4184 return -EINVAL; in rtw89_pci_ops_mac_lv1_recovery()
4192 if (rtwdev->chip->chip_gen == RTW89_CHIP_BE) in rtw89_pci_ops_dump_err_status()
4195 if (rtwdev->chip->chip_id == RTL8852C) { in rtw89_pci_ops_dump_err_status()
4213 struct rtw89_pci *rtwpci = (struct rtw89_pci *)rtwdev->priv; in rtw89_pci_napi_poll()
4214 const struct rtw89_pci_info *info = rtwdev->pci_info; in rtw89_pci_napi_poll()
4215 const struct rtw89_pci_gen_def *gen_def = info->gen_def; in rtw89_pci_napi_poll()
4219 rtwdev->napi_budget_countdown = budget; in rtw89_pci_napi_poll()
4221 rtw89_write32(rtwdev, gen_def->isr_clear_rpq.addr, gen_def->isr_clear_rpq.data); in rtw89_pci_napi_poll()
4222 work_done = rtw89_pci_poll_rpq_dma(rtwdev, rtwpci, rtwdev->napi_budget_countdown); in rtw89_pci_napi_poll()
4226 rtw89_write32(rtwdev, gen_def->isr_clear_rxq.addr, gen_def->isr_clear_rxq.data); in rtw89_pci_napi_poll()
4227 work_done += rtw89_pci_poll_rxq_dma(rtwdev, rtwpci, rtwdev->napi_budget_countdown); in rtw89_pci_napi_poll()
4229 spin_lock_irqsave(&rtwpci->irq_lock, flags); in rtw89_pci_napi_poll()
4230 if (likely(rtwpci->running)) in rtw89_pci_napi_poll()
4232 spin_unlock_irqrestore(&rtwpci->irq_lock, flags); in rtw89_pci_napi_poll()
4249 if (ssid_quirks->vendor == 0 && ssid_quirks->device == 0) in rtw89_check_pci_ssid_quirks()
4252 if (ssid_quirks->vendor != pdev->vendor || in rtw89_check_pci_ssid_quirks()
4253 ssid_quirks->device != pdev->device || in rtw89_check_pci_ssid_quirks()
4254 ssid_quirks->subsystem_vendor != pdev->subsystem_vendor || in rtw89_check_pci_ssid_quirks()
4255 ssid_quirks->subsystem_device != pdev->subsystem_device) in rtw89_check_pci_ssid_quirks()
4258 bitmap_or(rtwdev->quirks, rtwdev->quirks, &ssid_quirks->bitmap, in rtw89_check_pci_ssid_quirks()
4260 rtwdev->custid = ssid_quirks->custid; in rtw89_check_pci_ssid_quirks()
4265 (int)sizeof(rtwdev->quirks), rtwdev->quirks, rtwdev->custid); in rtw89_check_pci_ssid_quirks()
4271 struct rtw89_dev *rtwdev = hw->priv; in rtw89_pci_suspend()
4272 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_suspend()
4292 if (rtwdev->chip->chip_id == RTL8852C) in rtw89_pci_l2_hci_ldo()
4317 struct rtw89_dev *rtwdev = hw->priv; in rtw89_pci_resume()
4318 enum rtw89_core_chip_id chip_id = rtwdev->chip->chip_id; in rtw89_pci_resume()
4425 info = (const struct rtw89_driver_info *)id->driver_data; in rtw89_pci_probe()
4427 rtwdev = rtw89_alloc_ieee80211_hw(&pdev->dev, in rtw89_pci_probe()
4429 info->chip, info->variant); in rtw89_pci_probe()
4431 dev_err(&pdev->dev, "failed to allocate hw\n"); in rtw89_pci_probe()
4432 return -ENOMEM; in rtw89_pci_probe()
4435 pci_info = info->bus.pci; in rtw89_pci_probe()
4437 rtwdev->pci_info = info->bus.pci; in rtw89_pci_probe()
4438 rtwdev->hci.ops = &rtw89_pci_ops; in rtw89_pci_probe()
4439 rtwdev->hci.type = RTW89_HCI_TYPE_PCIE; in rtw89_pci_probe()
4440 rtwdev->hci.rpwm_addr = pci_info->rpwm_addr; in rtw89_pci_probe()
4441 rtwdev->hci.cpwm_addr = pci_info->cpwm_addr; in rtw89_pci_probe()
4443 rtw89_check_quirks(rtwdev, info->quirks); in rtw89_pci_probe()
4444 rtw89_check_pci_ssid_quirks(rtwdev, pdev, pci_info->ssid_quirks); in rtw89_pci_probe()
4446 SET_IEEE80211_DEV(rtwdev->hw, &pdev->dev); in rtw89_pci_probe()
4492 set_bit(RTW89_FLAG_PROBE_DONE, rtwdev->flags); in rtw89_pci_probe()
4518 rtwdev = hw->priv; in rtw89_pci_remove()