Lines Matching +full:rates +full:- +full:ru
1 // SPDX-License-Identifier: ISC
82 /* UNII-5 */
107 /* UNII-6 */
114 /* UNII-7 */
132 /* UNII-8 */
198 struct mt76_dev *dev = phy->dev; in mt76_led_init()
199 struct ieee80211_hw *hw = phy->hw; in mt76_led_init()
200 struct device_node *np = dev->dev->of_node; in mt76_led_init()
202 if (!phy->leds.cdev.brightness_set && !phy->leds.cdev.blink_set) in mt76_led_init()
209 dev_info(dev->dev, in mt76_led_init()
214 if (phy == &dev->phy) { in mt76_led_init()
217 if (!of_property_read_u32(np, "led-sources", &led_pin)) in mt76_led_init()
218 phy->leds.pin = led_pin; in mt76_led_init()
220 phy->leds.al = in mt76_led_init()
221 of_property_read_bool(np, "led-active-low"); in mt76_led_init()
227 snprintf(phy->leds.name, sizeof(phy->leds.name), "mt76-%s", in mt76_led_init()
228 wiphy_name(hw->wiphy)); in mt76_led_init()
230 phy->leds.cdev.name = phy->leds.name; in mt76_led_init()
231 phy->leds.cdev.default_trigger = in mt76_led_init()
237 dev_info(dev->dev, in mt76_led_init()
238 "registering led '%s'\n", phy->leds.name); in mt76_led_init()
240 return led_classdev_register(dev->dev, &phy->leds.cdev); in mt76_led_init()
245 if (!phy->leds.cdev.brightness_set && !phy->leds.cdev.blink_set) in mt76_led_cleanup()
248 led_classdev_unregister(&phy->leds.cdev); in mt76_led_cleanup()
255 struct ieee80211_sta_ht_cap *ht_cap = &sband->ht_cap; in mt76_init_stream_cap()
256 int i, nstream = hweight8(phy->antenna_mask); in mt76_init_stream_cap()
261 ht_cap->cap |= IEEE80211_HT_CAP_TX_STBC; in mt76_init_stream_cap()
263 ht_cap->cap &= ~IEEE80211_HT_CAP_TX_STBC; in mt76_init_stream_cap()
266 ht_cap->mcs.rx_mask[i] = i < nstream ? 0xff : 0; in mt76_init_stream_cap()
271 vht_cap = &sband->vht_cap; in mt76_init_stream_cap()
273 vht_cap->cap |= IEEE80211_VHT_CAP_TXSTBC; in mt76_init_stream_cap()
275 vht_cap->cap &= ~IEEE80211_VHT_CAP_TXSTBC; in mt76_init_stream_cap()
276 vht_cap->cap |= IEEE80211_VHT_CAP_TX_ANTENNA_PATTERN | in mt76_init_stream_cap()
286 vht_cap->vht_mcs.rx_mcs_map = cpu_to_le16(mcs_map); in mt76_init_stream_cap()
287 vht_cap->vht_mcs.tx_mcs_map = cpu_to_le16(mcs_map); in mt76_init_stream_cap()
288 if (ieee80211_hw_check(phy->hw, SUPPORTS_VHT_EXT_NSS_BW)) in mt76_init_stream_cap()
289 vht_cap->vht_mcs.tx_highest |= in mt76_init_stream_cap()
295 if (phy->cap.has_2ghz) in mt76_set_stream_caps()
296 mt76_init_stream_cap(phy, &phy->sband_2g.sband, false); in mt76_set_stream_caps()
297 if (phy->cap.has_5ghz) in mt76_set_stream_caps()
298 mt76_init_stream_cap(phy, &phy->sband_5g.sband, vht); in mt76_set_stream_caps()
299 if (phy->cap.has_6ghz) in mt76_set_stream_caps()
300 mt76_init_stream_cap(phy, &phy->sband_6g.sband, vht); in mt76_set_stream_caps()
307 struct ieee80211_rate *rates, int n_rates, in mt76_init_sband() argument
310 struct ieee80211_supported_band *sband = &msband->sband; in mt76_init_sband()
313 struct mt76_dev *dev = phy->dev; in mt76_init_sband()
318 chanlist = devm_kmemdup(dev->dev, chan, size, GFP_KERNEL); in mt76_init_sband()
320 return -ENOMEM; in mt76_init_sband()
322 msband->chan = devm_kcalloc(dev->dev, n_chan, sizeof(*msband->chan), in mt76_init_sband()
324 if (!msband->chan) in mt76_init_sband()
325 return -ENOMEM; in mt76_init_sband()
327 sband->channels = chanlist; in mt76_init_sband()
328 sband->n_channels = n_chan; in mt76_init_sband()
329 sband->bitrates = rates; in mt76_init_sband()
330 sband->n_bitrates = n_rates; in mt76_init_sband()
335 ht_cap = &sband->ht_cap; in mt76_init_sband()
336 ht_cap->ht_supported = true; in mt76_init_sband()
337 ht_cap->cap |= IEEE80211_HT_CAP_SUP_WIDTH_20_40 | in mt76_init_sband()
343 ht_cap->mcs.tx_params = IEEE80211_HT_MCS_TX_DEFINED; in mt76_init_sband()
344 ht_cap->ampdu_factor = IEEE80211_HT_MAX_AMPDU_64K; in mt76_init_sband()
351 vht_cap = &sband->vht_cap; in mt76_init_sband()
352 vht_cap->vht_supported = true; in mt76_init_sband()
353 vht_cap->cap |= IEEE80211_VHT_CAP_RXLDPC | in mt76_init_sband()
362 mt76_init_sband_2g(struct mt76_phy *phy, struct ieee80211_rate *rates, in mt76_init_sband_2g() argument
365 phy->hw->wiphy->bands[NL80211_BAND_2GHZ] = &phy->sband_2g.sband; in mt76_init_sband_2g()
367 return mt76_init_sband(phy, &phy->sband_2g, mt76_channels_2ghz, in mt76_init_sband_2g()
368 ARRAY_SIZE(mt76_channels_2ghz), rates, in mt76_init_sband_2g()
373 mt76_init_sband_5g(struct mt76_phy *phy, struct ieee80211_rate *rates, in mt76_init_sband_5g() argument
376 phy->hw->wiphy->bands[NL80211_BAND_5GHZ] = &phy->sband_5g.sband; in mt76_init_sband_5g()
378 return mt76_init_sband(phy, &phy->sband_5g, mt76_channels_5ghz, in mt76_init_sband_5g()
379 ARRAY_SIZE(mt76_channels_5ghz), rates, in mt76_init_sband_5g()
384 mt76_init_sband_6g(struct mt76_phy *phy, struct ieee80211_rate *rates, in mt76_init_sband_6g() argument
387 phy->hw->wiphy->bands[NL80211_BAND_6GHZ] = &phy->sband_6g.sband; in mt76_init_sband_6g()
389 return mt76_init_sband(phy, &phy->sband_6g, mt76_channels_6ghz, in mt76_init_sband_6g()
390 ARRAY_SIZE(mt76_channels_6ghz), rates, in mt76_init_sband_6g()
398 struct ieee80211_supported_band *sband = &msband->sband; in mt76_check_sband()
405 for (i = 0; i < sband->n_channels; i++) { in mt76_check_sband()
406 if (sband->channels[i].flags & IEEE80211_CHAN_DISABLED) in mt76_check_sband()
414 cfg80211_chandef_create(&phy->chandef, &sband->channels[0], in mt76_check_sband()
416 phy->chan_state = &msband->chan[0]; in mt76_check_sband()
417 phy->dev->band_phys[band] = phy; in mt76_check_sband()
421 sband->n_channels = 0; in mt76_check_sband()
422 if (phy->hw->wiphy->bands[band] == sband) in mt76_check_sband()
423 phy->hw->wiphy->bands[band] = NULL; in mt76_check_sband()
429 struct mt76_dev *dev = phy->dev; in mt76_phy_init()
430 struct wiphy *wiphy = hw->wiphy; in mt76_phy_init()
432 INIT_LIST_HEAD(&phy->tx_list); in mt76_phy_init()
433 spin_lock_init(&phy->tx_lock); in mt76_phy_init()
434 INIT_DELAYED_WORK(&phy->roc_work, mt76_roc_complete_work); in mt76_phy_init()
436 if ((void *)phy != hw->priv) in mt76_phy_init()
439 SET_IEEE80211_DEV(hw, dev->dev); in mt76_phy_init()
440 SET_IEEE80211_PERM_ADDR(hw, phy->macaddr); in mt76_phy_init()
442 wiphy->features |= NL80211_FEATURE_ACTIVE_MONITOR | in mt76_phy_init()
444 wiphy->flags |= WIPHY_FLAG_HAS_CHANNEL_SWITCH | in mt76_phy_init()
452 wiphy->available_antennas_tx = phy->antenna_mask; in mt76_phy_init()
453 wiphy->available_antennas_rx = phy->antenna_mask; in mt76_phy_init()
455 wiphy->sar_capa = &mt76_sar_capa; in mt76_phy_init()
456 phy->frp = devm_kcalloc(dev->dev, wiphy->sar_capa->num_freq_ranges, in mt76_phy_init()
459 if (!phy->frp) in mt76_phy_init()
460 return -ENOMEM; in mt76_phy_init()
462 hw->txq_data_size = sizeof(struct mt76_txq); in mt76_phy_init()
463 hw->uapsd_max_sp_len = IEEE80211_WMM_IE_STA_QOSINFO_SP_ALL; in mt76_phy_init()
465 if (!hw->max_tx_fragments) in mt76_phy_init()
466 hw->max_tx_fragments = 16; in mt76_phy_init()
477 if (!(dev->drv->drv_flags & MT_DRV_AMSDU_OFFLOAD) && in mt76_phy_init()
478 hw->max_tx_fragments > 1) { in mt76_phy_init()
494 struct ieee80211_hw *hw = dev->phy.hw; in mt76_alloc_radio_phy()
499 phy = devm_kzalloc(dev->dev, size + phy_size, GFP_KERNEL); in mt76_alloc_radio_phy()
503 phy->dev = dev; in mt76_alloc_radio_phy()
504 phy->hw = hw; in mt76_alloc_radio_phy()
505 phy->priv = (void *)phy + phy_size; in mt76_alloc_radio_phy()
506 phy->band_idx = band_idx; in mt76_alloc_radio_phy()
525 phy = hw->priv; in mt76_alloc_phy()
526 phy->dev = dev; in mt76_alloc_phy()
527 phy->hw = hw; in mt76_alloc_phy()
528 phy->priv = hw->priv + phy_size; in mt76_alloc_phy()
529 phy->band_idx = band_idx; in mt76_alloc_phy()
531 hw->wiphy->flags |= WIPHY_FLAG_IBSS_RSN; in mt76_alloc_phy()
532 hw->wiphy->interface_modes = in mt76_alloc_phy()
547 struct ieee80211_rate *rates, int n_rates) in mt76_register_phy() argument
551 ret = mt76_phy_init(phy, phy->hw); in mt76_register_phy()
555 if (phy->cap.has_2ghz) { in mt76_register_phy()
556 ret = mt76_init_sband_2g(phy, rates, n_rates); in mt76_register_phy()
561 if (phy->cap.has_5ghz) { in mt76_register_phy()
562 ret = mt76_init_sband_5g(phy, rates + 4, n_rates - 4, vht); in mt76_register_phy()
567 if (phy->cap.has_6ghz) { in mt76_register_phy()
568 ret = mt76_init_sband_6g(phy, rates + 4, n_rates - 4); in mt76_register_phy()
579 wiphy_read_of_freq_limits(phy->hw->wiphy); in mt76_register_phy()
580 mt76_check_sband(phy, &phy->sband_2g, NL80211_BAND_2GHZ); in mt76_register_phy()
581 mt76_check_sband(phy, &phy->sband_5g, NL80211_BAND_5GHZ); in mt76_register_phy()
582 mt76_check_sband(phy, &phy->sband_6g, NL80211_BAND_6GHZ); in mt76_register_phy()
584 if ((void *)phy == phy->hw->priv) { in mt76_register_phy()
585 ret = ieee80211_register_hw(phy->hw); in mt76_register_phy()
590 set_bit(MT76_STATE_REGISTERED, &phy->state); in mt76_register_phy()
591 phy->dev->phys[phy->band_idx] = phy; in mt76_register_phy()
599 struct mt76_dev *dev = phy->dev; in mt76_unregister_phy()
601 if (!test_bit(MT76_STATE_REGISTERED, &phy->state)) in mt76_unregister_phy()
607 ieee80211_unregister_hw(phy->hw); in mt76_unregister_phy()
608 dev->phys[phy->band_idx] = NULL; in mt76_unregister_phy()
619 .dev = dev->dma_dev, in mt76_create_page_pool()
621 int idx = is_qrx ? q - dev->q_rx : -1; in mt76_create_page_pool()
645 if (idx >= 0 && idx < ARRAY_SIZE(dev->napi)) in mt76_create_page_pool()
646 pp_params.napi = &dev->napi[idx]; in mt76_create_page_pool()
649 q->page_pool = page_pool_create(&pp_params); in mt76_create_page_pool()
650 if (IS_ERR(q->page_pool)) { in mt76_create_page_pool()
651 int err = PTR_ERR(q->page_pool); in mt76_create_page_pool()
653 q->page_pool = NULL; in mt76_create_page_pool()
675 dev = hw->priv; in mt76_alloc_device()
676 dev->hw = hw; in mt76_alloc_device()
677 dev->dev = pdev; in mt76_alloc_device()
678 dev->drv = drv_ops; in mt76_alloc_device()
679 dev->dma_dev = pdev; in mt76_alloc_device()
681 phy = &dev->phy; in mt76_alloc_device()
682 phy->dev = dev; in mt76_alloc_device()
683 phy->hw = hw; in mt76_alloc_device()
684 phy->band_idx = MT_BAND0; in mt76_alloc_device()
685 dev->phys[phy->band_idx] = phy; in mt76_alloc_device()
687 spin_lock_init(&dev->rx_lock); in mt76_alloc_device()
688 spin_lock_init(&dev->lock); in mt76_alloc_device()
689 spin_lock_init(&dev->cc_lock); in mt76_alloc_device()
690 spin_lock_init(&dev->status_lock); in mt76_alloc_device()
691 spin_lock_init(&dev->wed_lock); in mt76_alloc_device()
692 mutex_init(&dev->mutex); in mt76_alloc_device()
693 init_waitqueue_head(&dev->tx_wait); in mt76_alloc_device()
695 skb_queue_head_init(&dev->mcu.res_q); in mt76_alloc_device()
696 init_waitqueue_head(&dev->mcu.wait); in mt76_alloc_device()
697 mutex_init(&dev->mcu.mutex); in mt76_alloc_device()
698 dev->tx_worker.fn = mt76_tx_worker; in mt76_alloc_device()
700 hw->wiphy->flags |= WIPHY_FLAG_IBSS_RSN; in mt76_alloc_device()
701 hw->wiphy->interface_modes = in mt76_alloc_device()
711 spin_lock_init(&dev->token_lock); in mt76_alloc_device()
712 idr_init(&dev->token); in mt76_alloc_device()
714 spin_lock_init(&dev->rx_token_lock); in mt76_alloc_device()
715 idr_init(&dev->rx_token); in mt76_alloc_device()
717 INIT_LIST_HEAD(&dev->wcid_list); in mt76_alloc_device()
718 INIT_LIST_HEAD(&dev->sta_poll_list); in mt76_alloc_device()
719 spin_lock_init(&dev->sta_poll_lock); in mt76_alloc_device()
721 INIT_LIST_HEAD(&dev->txwi_cache); in mt76_alloc_device()
722 INIT_LIST_HEAD(&dev->rxwi_cache); in mt76_alloc_device()
723 dev->token_size = dev->drv->token_size; in mt76_alloc_device()
724 INIT_DELAYED_WORK(&dev->scan_work, mt76_scan_work); in mt76_alloc_device()
726 for (i = 0; i < ARRAY_SIZE(dev->q_rx); i++) in mt76_alloc_device()
727 skb_queue_head_init(&dev->rx_skb[i]); in mt76_alloc_device()
729 dev->wq = alloc_ordered_workqueue("mt76", 0); in mt76_alloc_device()
730 if (!dev->wq) { in mt76_alloc_device()
740 struct ieee80211_rate *rates, int n_rates) in mt76_register_device() argument
742 struct ieee80211_hw *hw = dev->hw; in mt76_register_device()
743 struct mt76_phy *phy = &dev->phy; in mt76_register_device()
746 dev_set_drvdata(dev->dev, dev); in mt76_register_device()
747 mt76_wcid_init(&dev->global_wcid, phy->band_idx); in mt76_register_device()
752 if (phy->cap.has_2ghz) { in mt76_register_device()
753 ret = mt76_init_sband_2g(phy, rates, n_rates); in mt76_register_device()
758 if (phy->cap.has_5ghz) { in mt76_register_device()
759 ret = mt76_init_sband_5g(phy, rates + 4, n_rates - 4, vht); in mt76_register_device()
764 if (phy->cap.has_6ghz) { in mt76_register_device()
765 ret = mt76_init_sband_6g(phy, rates + 4, n_rates - 4); in mt76_register_device()
770 wiphy_read_of_freq_limits(hw->wiphy); in mt76_register_device()
771 mt76_check_sband(&dev->phy, &phy->sband_2g, NL80211_BAND_2GHZ); in mt76_register_device()
772 mt76_check_sband(&dev->phy, &phy->sband_5g, NL80211_BAND_5GHZ); in mt76_register_device()
773 mt76_check_sband(&dev->phy, &phy->sband_6g, NL80211_BAND_6GHZ); in mt76_register_device()
785 WARN_ON(mt76_worker_setup(hw, &dev->tx_worker, NULL, "tx")); in mt76_register_device()
786 set_bit(MT76_STATE_REGISTERED, &phy->state); in mt76_register_device()
787 sched_set_fifo_low(dev->tx_worker.task); in mt76_register_device()
795 struct ieee80211_hw *hw = dev->hw; in mt76_unregister_device()
797 if (!test_bit(MT76_STATE_REGISTERED, &dev->phy.state)) in mt76_unregister_device()
801 mt76_led_cleanup(&dev->phy); in mt76_unregister_device()
803 mt76_wcid_cleanup(dev, &dev->global_wcid); in mt76_unregister_device()
810 mt76_worker_teardown(&dev->tx_worker); in mt76_free_device()
811 if (dev->wq) { in mt76_free_device()
812 destroy_workqueue(dev->wq); in mt76_free_device()
813 dev->wq = NULL; in mt76_free_device()
815 ieee80211_free_hw(dev->hw); in mt76_free_device()
822 struct mt76_vif_link *mlink = (struct mt76_vif_link *)vif->drv_priv; in mt76_vif_phy()
825 if (!hw->wiphy->n_radio) in mt76_vif_phy()
826 return hw->priv; in mt76_vif_phy()
828 if (!mlink->ctx) in mt76_vif_phy()
831 ctx = (struct mt76_chanctx *)mlink->ctx->drv_priv; in mt76_vif_phy()
832 return ctx->phy; in mt76_vif_phy()
837 struct sk_buff *skb = phy->rx_amsdu[q].head; in mt76_rx_release_amsdu()
838 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_rx_release_amsdu()
839 struct mt76_dev *dev = phy->dev; in mt76_rx_release_amsdu()
841 phy->rx_amsdu[q].head = NULL; in mt76_rx_release_amsdu()
842 phy->rx_amsdu[q].tail = NULL; in mt76_rx_release_amsdu()
846 * A single MSDU can be parsed as A-MSDU when the unauthenticated A-MSDU in mt76_rx_release_amsdu()
851 if (skb_shinfo(skb)->frag_list) { in mt76_rx_release_amsdu()
854 if (!(status->flag & RX_FLAG_8023)) { in mt76_rx_release_amsdu()
857 if ((status->flag & in mt76_rx_release_amsdu()
863 if (ether_addr_equal(skb->data + offset, rfc1042_header)) { in mt76_rx_release_amsdu()
868 __skb_queue_tail(&dev->rx_skb[q], skb); in mt76_rx_release_amsdu()
874 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_rx_release_burst()
876 if (phy->rx_amsdu[q].head && in mt76_rx_release_burst()
877 (!status->amsdu || status->first_amsdu || in mt76_rx_release_burst()
878 status->seqno != phy->rx_amsdu[q].seqno)) in mt76_rx_release_burst()
881 if (!phy->rx_amsdu[q].head) { in mt76_rx_release_burst()
882 phy->rx_amsdu[q].tail = &skb_shinfo(skb)->frag_list; in mt76_rx_release_burst()
883 phy->rx_amsdu[q].seqno = status->seqno; in mt76_rx_release_burst()
884 phy->rx_amsdu[q].head = skb; in mt76_rx_release_burst()
886 *phy->rx_amsdu[q].tail = skb; in mt76_rx_release_burst()
887 phy->rx_amsdu[q].tail = &skb->next; in mt76_rx_release_burst()
890 if (!status->amsdu || status->last_amsdu) in mt76_rx_release_burst()
896 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_rx()
897 struct mt76_phy *phy = mt76_dev_phy(dev, status->phy_idx); in mt76_rx()
899 if (!test_bit(MT76_STATE_RUNNING, &phy->state)) { in mt76_rx()
905 if (phy->test.state == MT76_TM_STATE_RX_FRAMES) { in mt76_rx()
906 phy->test.rx_stats.packets[q]++; in mt76_rx()
907 if (status->flag & RX_FLAG_FAILED_FCS_CRC) in mt76_rx()
908 phy->test.rx_stats.fcs_error[q]++; in mt76_rx()
922 q = phy->q_tx[i]; in mt76_has_tx_pending()
923 if (q && q->queued) in mt76_has_tx_pending()
937 if (c->band == NL80211_BAND_2GHZ) in mt76_channel_state()
938 msband = &phy->sband_2g; in mt76_channel_state()
939 else if (c->band == NL80211_BAND_6GHZ) in mt76_channel_state()
940 msband = &phy->sband_6g; in mt76_channel_state()
942 msband = &phy->sband_5g; in mt76_channel_state()
944 idx = c - &msband->sband.channels[0]; in mt76_channel_state()
945 return &msband->chan[idx]; in mt76_channel_state()
950 struct mt76_channel_state *state = phy->chan_state; in mt76_update_survey_active_time()
952 state->cc_active += ktime_to_us(ktime_sub(time, in mt76_update_survey_active_time()
953 phy->survey_time)); in mt76_update_survey_active_time()
954 phy->survey_time = time; in mt76_update_survey_active_time()
960 struct mt76_dev *dev = phy->dev; in mt76_update_survey()
963 if (dev->drv->update_survey) in mt76_update_survey()
964 dev->drv->update_survey(phy); in mt76_update_survey()
969 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) { in mt76_update_survey()
970 struct mt76_channel_state *state = phy->chan_state; in mt76_update_survey()
972 spin_lock_bh(&dev->cc_lock); in mt76_update_survey()
973 state->cc_bss_rx += dev->cur_cc_bss_rx; in mt76_update_survey()
974 dev->cur_cc_bss_rx = 0; in mt76_update_survey()
975 spin_unlock_bh(&dev->cc_lock); in mt76_update_survey()
983 struct mt76_dev *dev = phy->dev; in __mt76_set_channel()
987 set_bit(MT76_RESET, &phy->state); in __mt76_set_channel()
989 mt76_worker_disable(&dev->tx_worker); in __mt76_set_channel()
990 wait_event_timeout(dev->tx_wait, !mt76_has_tx_pending(phy), timeout); in __mt76_set_channel()
993 if (phy->chandef.chan->center_freq != chandef->chan->center_freq || in __mt76_set_channel()
994 phy->chandef.width != chandef->width) in __mt76_set_channel()
995 phy->dfs_state = MT_DFS_STATE_UNKNOWN; in __mt76_set_channel()
997 phy->chandef = *chandef; in __mt76_set_channel()
998 phy->chan_state = mt76_channel_state(phy, chandef->chan); in __mt76_set_channel()
999 phy->offchannel = offchannel; in __mt76_set_channel()
1002 phy->main_chandef = *chandef; in __mt76_set_channel()
1004 if (chandef->chan != phy->main_chandef.chan) in __mt76_set_channel()
1005 memset(phy->chan_state, 0, sizeof(*phy->chan_state)); in __mt76_set_channel()
1007 ret = dev->drv->set_channel(phy); in __mt76_set_channel()
1009 clear_bit(MT76_RESET, &phy->state); in __mt76_set_channel()
1010 mt76_worker_enable(&dev->tx_worker); in __mt76_set_channel()
1011 mt76_worker_schedule(&dev->tx_worker); in __mt76_set_channel()
1019 struct mt76_dev *dev = phy->dev; in mt76_set_channel()
1022 cancel_delayed_work_sync(&phy->mac_work); in mt76_set_channel()
1024 mutex_lock(&dev->mutex); in mt76_set_channel()
1026 mutex_unlock(&dev->mutex); in mt76_set_channel()
1033 struct ieee80211_hw *hw = phy->hw; in mt76_update_channel()
1034 struct cfg80211_chan_def *chandef = &hw->conf.chandef; in mt76_update_channel()
1035 bool offchannel = hw->conf.flags & IEEE80211_CONF_OFFCHANNEL; in mt76_update_channel()
1037 phy->radar_enabled = hw->conf.radar_enabled; in mt76_update_channel()
1046 if (*idx < phy->sband_2g.sband.n_channels) in mt76_get_survey_sband()
1047 return &phy->sband_2g; in mt76_get_survey_sband()
1049 *idx -= phy->sband_2g.sband.n_channels; in mt76_get_survey_sband()
1050 if (*idx < phy->sband_5g.sband.n_channels) in mt76_get_survey_sband()
1051 return &phy->sband_5g; in mt76_get_survey_sband()
1053 *idx -= phy->sband_5g.sband.n_channels; in mt76_get_survey_sband()
1054 if (*idx < phy->sband_6g.sband.n_channels) in mt76_get_survey_sband()
1055 return &phy->sband_6g; in mt76_get_survey_sband()
1057 *idx -= phy->sband_6g.sband.n_channels; in mt76_get_survey_sband()
1064 struct mt76_phy *phy = hw->priv; in mt76_get_survey()
1065 struct mt76_dev *dev = phy->dev; in mt76_get_survey()
1072 mutex_lock(&dev->mutex); in mt76_get_survey()
1074 for (phy_idx = 0; phy_idx < ARRAY_SIZE(dev->phys); phy_idx++) { in mt76_get_survey()
1076 phy = dev->phys[phy_idx]; in mt76_get_survey()
1077 if (!phy || phy->hw != hw) in mt76_get_survey()
1082 if (idx == 0 && phy->dev->drv->update_survey) in mt76_get_survey()
1085 if (sband || !hw->wiphy->n_radio) in mt76_get_survey()
1090 ret = -ENOENT; in mt76_get_survey()
1094 chan = &sband->sband.channels[idx]; in mt76_get_survey()
1098 survey->channel = chan; in mt76_get_survey()
1099 survey->filled = SURVEY_INFO_TIME | SURVEY_INFO_TIME_BUSY; in mt76_get_survey()
1100 survey->filled |= dev->drv->survey_flags; in mt76_get_survey()
1101 if (state->noise) in mt76_get_survey()
1102 survey->filled |= SURVEY_INFO_NOISE_DBM; in mt76_get_survey()
1104 if (chan == phy->main_chandef.chan) { in mt76_get_survey()
1105 survey->filled |= SURVEY_INFO_IN_USE; in mt76_get_survey()
1107 if (dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME) in mt76_get_survey()
1108 survey->filled |= SURVEY_INFO_TIME_BSS_RX; in mt76_get_survey()
1111 survey->time_busy = div_u64(state->cc_busy, 1000); in mt76_get_survey()
1112 survey->time_rx = div_u64(state->cc_rx, 1000); in mt76_get_survey()
1113 survey->time = div_u64(state->cc_active, 1000); in mt76_get_survey()
1114 survey->noise = state->noise; in mt76_get_survey()
1116 spin_lock_bh(&dev->cc_lock); in mt76_get_survey()
1117 survey->time_bss_rx = div_u64(state->cc_bss_rx, 1000); in mt76_get_survey()
1118 survey->time_tx = div_u64(state->cc_tx, 1000); in mt76_get_survey()
1119 spin_unlock_bh(&dev->cc_lock); in mt76_get_survey()
1122 mutex_unlock(&dev->mutex); in mt76_get_survey()
1134 wcid->rx_check_pn = false; in mt76_wcid_key_setup()
1139 if (key->cipher != WLAN_CIPHER_SUITE_CCMP) in mt76_wcid_key_setup()
1142 wcid->rx_check_pn = true; in mt76_wcid_key_setup()
1147 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn)); in mt76_wcid_key_setup()
1151 ieee80211_get_key_rx_seq(key, -1, &seq); in mt76_wcid_key_setup()
1152 memcpy(wcid->rx_key_pn[i], seq.ccmp.pn, sizeof(seq.ccmp.pn)); in mt76_wcid_key_setup()
1159 int signal = -128; in mt76_rx_signal()
1173 diff = signal - cur; in mt76_rx_signal()
1195 mstat = *((struct mt76_rx_status *)skb->cb); in mt76_rx_convert()
1198 status->flag = mstat.flag; in mt76_rx_convert()
1199 status->freq = mstat.freq; in mt76_rx_convert()
1200 status->enc_flags = mstat.enc_flags; in mt76_rx_convert()
1201 status->encoding = mstat.encoding; in mt76_rx_convert()
1202 status->bw = mstat.bw; in mt76_rx_convert()
1203 if (status->encoding == RX_ENC_EHT) { in mt76_rx_convert()
1204 status->eht.ru = mstat.eht.ru; in mt76_rx_convert()
1205 status->eht.gi = mstat.eht.gi; in mt76_rx_convert()
1207 status->he_ru = mstat.he_ru; in mt76_rx_convert()
1208 status->he_gi = mstat.he_gi; in mt76_rx_convert()
1209 status->he_dcm = mstat.he_dcm; in mt76_rx_convert()
1211 status->rate_idx = mstat.rate_idx; in mt76_rx_convert()
1212 status->nss = mstat.nss; in mt76_rx_convert()
1213 status->band = mstat.band; in mt76_rx_convert()
1214 status->signal = mstat.signal; in mt76_rx_convert()
1215 status->chains = mstat.chains; in mt76_rx_convert()
1216 status->ampdu_reference = mstat.ampdu_ref; in mt76_rx_convert()
1217 status->device_timestamp = mstat.timestamp; in mt76_rx_convert()
1218 status->mactime = mstat.timestamp; in mt76_rx_convert()
1219 status->signal = mt76_rx_signal(mstat.chains, mstat.chain_signal); in mt76_rx_convert()
1220 if (status->signal <= -128) in mt76_rx_convert()
1221 status->flag |= RX_FLAG_NO_SIGNAL_VAL; in mt76_rx_convert()
1223 if (ieee80211_is_beacon(hdr->frame_control) || in mt76_rx_convert()
1224 ieee80211_is_probe_resp(hdr->frame_control)) in mt76_rx_convert()
1225 status->boottime_ns = ktime_get_boottime_ns(); in mt76_rx_convert()
1227 BUILD_BUG_ON(sizeof(mstat) > sizeof(skb->cb)); in mt76_rx_convert()
1228 BUILD_BUG_ON(sizeof(status->chain_signal) != in mt76_rx_convert()
1230 memcpy(status->chain_signal, mstat.chain_signal, in mt76_rx_convert()
1234 status->link_valid = mstat.wcid->link_valid; in mt76_rx_convert()
1235 status->link_id = mstat.wcid->link_id; in mt76_rx_convert()
1245 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_check_ccmp_pn()
1246 struct mt76_wcid *wcid = status->wcid; in mt76_check_ccmp_pn()
1251 if (!(status->flag & RX_FLAG_DECRYPTED)) in mt76_check_ccmp_pn()
1254 if (status->flag & RX_FLAG_ONLY_MONITOR) in mt76_check_ccmp_pn()
1257 if (!wcid || !wcid->rx_check_pn) in mt76_check_ccmp_pn()
1260 security_idx = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK; in mt76_check_ccmp_pn()
1261 if (status->flag & RX_FLAG_8023) in mt76_check_ccmp_pn()
1265 if (!(status->flag & RX_FLAG_IV_STRIPPED)) { in mt76_check_ccmp_pn()
1271 !ieee80211_is_first_frag(hdr->frame_control)) in mt76_check_ccmp_pn()
1275 /* IEEE 802.11-2020, 12.5.3.4.4 "PN and replay detection" c): in mt76_check_ccmp_pn()
1281 if (ieee80211_is_mgmt(hdr->frame_control) && in mt76_check_ccmp_pn()
1282 !ieee80211_has_tods(hdr->frame_control)) in mt76_check_ccmp_pn()
1286 BUILD_BUG_ON(sizeof(status->iv) != sizeof(wcid->rx_key_pn[0])); in mt76_check_ccmp_pn()
1287 ret = memcmp(status->iv, wcid->rx_key_pn[security_idx], in mt76_check_ccmp_pn()
1288 sizeof(status->iv)); in mt76_check_ccmp_pn()
1290 status->flag |= RX_FLAG_ONLY_MONITOR; in mt76_check_ccmp_pn()
1294 memcpy(wcid->rx_key_pn[security_idx], status->iv, sizeof(status->iv)); in mt76_check_ccmp_pn()
1296 if (status->flag & RX_FLAG_IV_STRIPPED) in mt76_check_ccmp_pn()
1297 status->flag |= RX_FLAG_PN_VALIDATED; in mt76_check_ccmp_pn()
1304 struct mt76_wcid *wcid = status->wcid; in mt76_airtime_report()
1306 .enc_flags = status->enc_flags, in mt76_airtime_report()
1307 .rate_idx = status->rate_idx, in mt76_airtime_report()
1308 .encoding = status->encoding, in mt76_airtime_report()
1309 .band = status->band, in mt76_airtime_report()
1310 .nss = status->nss, in mt76_airtime_report()
1311 .bw = status->bw, in mt76_airtime_report()
1315 u8 tidno = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK; in mt76_airtime_report()
1317 airtime = ieee80211_calc_rx_airtime(dev->hw, &info, len); in mt76_airtime_report()
1318 spin_lock(&dev->cc_lock); in mt76_airtime_report()
1319 dev->cur_cc_bss_rx += airtime; in mt76_airtime_report()
1320 spin_unlock(&dev->cc_lock); in mt76_airtime_report()
1322 if (!wcid || !wcid->sta) in mt76_airtime_report()
1335 if (!dev->rx_ampdu_len) in mt76_airtime_flush_ampdu()
1338 wcid_idx = dev->rx_ampdu_status.wcid_idx; in mt76_airtime_flush_ampdu()
1339 if (wcid_idx < ARRAY_SIZE(dev->wcid)) in mt76_airtime_flush_ampdu()
1340 wcid = rcu_dereference(dev->wcid[wcid_idx]); in mt76_airtime_flush_ampdu()
1343 dev->rx_ampdu_status.wcid = wcid; in mt76_airtime_flush_ampdu()
1345 mt76_airtime_report(dev, &dev->rx_ampdu_status, dev->rx_ampdu_len); in mt76_airtime_flush_ampdu()
1347 dev->rx_ampdu_len = 0; in mt76_airtime_flush_ampdu()
1348 dev->rx_ampdu_ref = 0; in mt76_airtime_flush_ampdu()
1354 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_airtime_check()
1355 struct mt76_wcid *wcid = status->wcid; in mt76_airtime_check()
1357 if (!(dev->drv->drv_flags & MT_DRV_SW_RX_AIRTIME)) in mt76_airtime_check()
1360 if (!wcid || !wcid->sta) { in mt76_airtime_check()
1363 if (status->flag & RX_FLAG_8023) in mt76_airtime_check()
1366 if (!ether_addr_equal(hdr->addr1, dev->phy.macaddr)) in mt76_airtime_check()
1372 if (!(status->flag & RX_FLAG_AMPDU_DETAILS) || in mt76_airtime_check()
1373 status->ampdu_ref != dev->rx_ampdu_ref) in mt76_airtime_check()
1376 if (status->flag & RX_FLAG_AMPDU_DETAILS) { in mt76_airtime_check()
1377 if (!dev->rx_ampdu_len || in mt76_airtime_check()
1378 status->ampdu_ref != dev->rx_ampdu_ref) { in mt76_airtime_check()
1379 dev->rx_ampdu_status = *status; in mt76_airtime_check()
1380 dev->rx_ampdu_status.wcid_idx = wcid ? wcid->idx : 0xff; in mt76_airtime_check()
1381 dev->rx_ampdu_ref = status->ampdu_ref; in mt76_airtime_check()
1384 dev->rx_ampdu_len += skb->len; in mt76_airtime_check()
1388 mt76_airtime_report(dev, status, skb->len); in mt76_airtime_check()
1394 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_check_sta()
1398 struct mt76_wcid *wcid = status->wcid; in mt76_check_sta()
1399 u8 tidno = status->qos_ctl & IEEE80211_QOS_CTL_TID_MASK; in mt76_check_sta()
1402 hw = mt76_phy_hw(dev, status->phy_idx); in mt76_check_sta()
1403 if (ieee80211_is_pspoll(hdr->frame_control) && !wcid && in mt76_check_sta()
1404 !(status->flag & RX_FLAG_8023)) { in mt76_check_sta()
1405 sta = ieee80211_find_sta_by_ifaddr(hw, hdr->addr2, NULL); in mt76_check_sta()
1407 wcid = status->wcid = (struct mt76_wcid *)sta->drv_priv; in mt76_check_sta()
1412 if (!wcid || !wcid->sta) in mt76_check_sta()
1417 if (status->signal <= 0) in mt76_check_sta()
1418 ewma_signal_add(&wcid->rssi, -status->signal); in mt76_check_sta()
1420 wcid->inactive_count = 0; in mt76_check_sta()
1422 if (status->flag & RX_FLAG_8023) in mt76_check_sta()
1425 if (!test_bit(MT_WCID_FLAG_CHECK_PS, &wcid->flags)) in mt76_check_sta()
1428 if (ieee80211_is_pspoll(hdr->frame_control)) { in mt76_check_sta()
1433 if (ieee80211_has_morefrags(hdr->frame_control) || in mt76_check_sta()
1434 !(ieee80211_is_mgmt(hdr->frame_control) || in mt76_check_sta()
1435 ieee80211_is_data(hdr->frame_control))) in mt76_check_sta()
1438 ps = ieee80211_has_pm(hdr->frame_control); in mt76_check_sta()
1440 if (ps && (ieee80211_is_data_qos(hdr->frame_control) || in mt76_check_sta()
1441 ieee80211_is_qos_nullfunc(hdr->frame_control))) in mt76_check_sta()
1444 if (!!test_bit(MT_WCID_FLAG_PS, &wcid->flags) == ps) in mt76_check_sta()
1448 set_bit(MT_WCID_FLAG_PS, &wcid->flags); in mt76_check_sta()
1450 if (dev->drv->sta_ps) in mt76_check_sta()
1451 dev->drv->sta_ps(dev, sta, ps); in mt76_check_sta()
1454 clear_bit(MT_WCID_FLAG_PS, &wcid->flags); in mt76_check_sta()
1467 spin_lock(&dev->rx_lock); in mt76_rx_complete()
1469 struct sk_buff *nskb = skb_shinfo(skb)->frag_list; in mt76_rx_complete()
1472 skb_shinfo(skb)->frag_list = NULL; in mt76_rx_complete()
1479 nskb = nskb->next; in mt76_rx_complete()
1480 skb->next = NULL; in mt76_rx_complete()
1486 spin_unlock(&dev->rx_lock); in mt76_rx_complete()
1507 while ((skb = __skb_dequeue(&dev->rx_skb[q])) != NULL) { in mt76_rx_poll_complete()
1509 if (mtk_wed_device_active(&dev->mmio.wed)) in mt76_rx_poll_complete()
1523 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; in mt76_sta_add()
1524 struct mt76_dev *dev = phy->dev; in mt76_sta_add()
1528 mutex_lock(&dev->mutex); in mt76_sta_add()
1530 ret = dev->drv->sta_add(dev, vif, sta); in mt76_sta_add()
1534 for (i = 0; i < ARRAY_SIZE(sta->txq); i++) { in mt76_sta_add()
1537 if (!sta->txq[i]) in mt76_sta_add()
1540 mtxq = (struct mt76_txq *)sta->txq[i]->drv_priv; in mt76_sta_add()
1541 mtxq->wcid = wcid->idx; in mt76_sta_add()
1544 ewma_signal_init(&wcid->rssi); in mt76_sta_add()
1545 rcu_assign_pointer(dev->wcid[wcid->idx], wcid); in mt76_sta_add()
1546 phy->num_sta++; in mt76_sta_add()
1548 mt76_wcid_init(wcid, phy->band_idx); in mt76_sta_add()
1550 mutex_unlock(&dev->mutex); in mt76_sta_add()
1558 struct mt76_dev *dev = phy->dev; in __mt76_sta_remove()
1559 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; in __mt76_sta_remove()
1560 int i, idx = wcid->idx; in __mt76_sta_remove()
1562 for (i = 0; i < ARRAY_SIZE(wcid->aggr); i++) in __mt76_sta_remove()
1565 if (dev->drv->sta_remove) in __mt76_sta_remove()
1566 dev->drv->sta_remove(dev, vif, sta); in __mt76_sta_remove()
1570 mt76_wcid_mask_clear(dev->wcid_mask, idx); in __mt76_sta_remove()
1571 phy->num_sta--; in __mt76_sta_remove()
1579 struct mt76_dev *dev = phy->dev; in mt76_sta_remove()
1581 mutex_lock(&dev->mutex); in mt76_sta_remove()
1583 mutex_unlock(&dev->mutex); in mt76_sta_remove()
1591 struct mt76_phy *phy = hw->priv; in mt76_sta_state()
1592 struct mt76_dev *dev = phy->dev; in mt76_sta_state()
1597 return -EINVAL; in mt76_sta_state()
1607 if (!dev->drv->sta_event) in mt76_sta_state()
1622 return dev->drv->sta_event(dev, vif, sta, ev); in mt76_sta_state()
1629 struct mt76_phy *phy = hw->priv; in mt76_sta_pre_rcu_remove()
1630 struct mt76_dev *dev = phy->dev; in mt76_sta_pre_rcu_remove()
1631 struct mt76_wcid *wcid = (struct mt76_wcid *)sta->drv_priv; in mt76_sta_pre_rcu_remove()
1633 mutex_lock(&dev->mutex); in mt76_sta_pre_rcu_remove()
1634 spin_lock_bh(&dev->status_lock); in mt76_sta_pre_rcu_remove()
1635 rcu_assign_pointer(dev->wcid[wcid->idx], NULL); in mt76_sta_pre_rcu_remove()
1636 spin_unlock_bh(&dev->status_lock); in mt76_sta_pre_rcu_remove()
1637 mutex_unlock(&dev->mutex); in mt76_sta_pre_rcu_remove()
1643 wcid->hw_key_idx = -1; in mt76_wcid_init()
1644 wcid->phy_idx = band_idx; in mt76_wcid_init()
1646 INIT_LIST_HEAD(&wcid->tx_list); in mt76_wcid_init()
1647 skb_queue_head_init(&wcid->tx_pending); in mt76_wcid_init()
1648 skb_queue_head_init(&wcid->tx_offchannel); in mt76_wcid_init()
1650 INIT_LIST_HEAD(&wcid->list); in mt76_wcid_init()
1651 idr_init(&wcid->pktid); in mt76_wcid_init()
1653 INIT_LIST_HEAD(&wcid->poll_list); in mt76_wcid_init()
1659 struct mt76_phy *phy = mt76_dev_phy(dev, wcid->phy_idx); in mt76_wcid_cleanup()
1665 mt76_tx_status_skb_get(dev, wcid, -1, &list); in mt76_wcid_cleanup()
1668 idr_destroy(&wcid->pktid); in mt76_wcid_cleanup()
1670 spin_lock_bh(&phy->tx_lock); in mt76_wcid_cleanup()
1672 if (!list_empty(&wcid->tx_list)) in mt76_wcid_cleanup()
1673 list_del_init(&wcid->tx_list); in mt76_wcid_cleanup()
1675 spin_lock(&wcid->tx_pending.lock); in mt76_wcid_cleanup()
1676 skb_queue_splice_tail_init(&wcid->tx_pending, &list); in mt76_wcid_cleanup()
1677 spin_unlock(&wcid->tx_pending.lock); in mt76_wcid_cleanup()
1679 spin_unlock_bh(&phy->tx_lock); in mt76_wcid_cleanup()
1690 if (test_bit(MT76_MCU_RESET, &dev->phy.state)) in mt76_wcid_add_poll()
1693 spin_lock_bh(&dev->sta_poll_lock); in mt76_wcid_add_poll()
1694 if (list_empty(&wcid->poll_list)) in mt76_wcid_add_poll()
1695 list_add_tail(&wcid->poll_list, &dev->sta_poll_list); in mt76_wcid_add_poll()
1696 spin_unlock_bh(&dev->sta_poll_lock); in mt76_wcid_add_poll()
1707 return -EINVAL; in mt76_get_txpower()
1709 n_chains = hweight16(phy->chainmask); in mt76_get_txpower()
1711 *dbm = DIV_ROUND_UP(phy->txpower_cur + delta, 2); in mt76_get_txpower()
1720 struct mt76_phy *phy = hw->priv; in mt76_init_sar_power()
1721 const struct cfg80211_sar_capa *capa = hw->wiphy->sar_capa; in mt76_init_sar_power()
1724 if (sar->type != NL80211_SAR_TYPE_POWER || !sar->num_sub_specs) in mt76_init_sar_power()
1725 return -EINVAL; in mt76_init_sar_power()
1727 for (i = 0; i < sar->num_sub_specs; i++) { in mt76_init_sar_power()
1728 u32 index = sar->sub_specs[i].freq_range_index; in mt76_init_sar_power()
1730 s32 power = sar->sub_specs[i].power >> 1; in mt76_init_sar_power()
1732 if (power > 127 || power < -127) in mt76_init_sar_power()
1735 phy->frp[index].range = &capa->freq_ranges[index]; in mt76_init_sar_power()
1736 phy->frp[index].power = power; in mt76_init_sar_power()
1747 const struct cfg80211_sar_capa *capa = phy->hw->wiphy->sar_capa; in mt76_get_sar_power()
1750 if (!capa || !phy->frp) in mt76_get_sar_power()
1753 if (power > 127 || power < -127) in mt76_get_sar_power()
1756 freq = ieee80211_channel_to_frequency(chan->hw_value, chan->band); in mt76_get_sar_power()
1757 for (i = 0 ; i < capa->num_freq_ranges; i++) { in mt76_get_sar_power()
1758 if (phy->frp[i].range && in mt76_get_sar_power()
1759 freq >= phy->frp[i].range->start_freq && in mt76_get_sar_power()
1760 freq < phy->frp[i].range->end_freq) { in mt76_get_sar_power()
1761 power = min_t(int, phy->frp[i].power, power); in mt76_get_sar_power()
1773 if (vif->bss_conf.csa_active && ieee80211_beacon_cntdwn_is_complete(vif, 0)) in __mt76_csa_finish()
1779 if (!dev->csa_complete) in mt76_csa_finish()
1782 ieee80211_iterate_active_interfaces_atomic(dev->hw, in mt76_csa_finish()
1786 dev->csa_complete = 0; in mt76_csa_finish()
1795 if (!vif->bss_conf.csa_active) in __mt76_csa_check()
1798 dev->csa_complete |= ieee80211_beacon_cntdwn_is_complete(vif, 0); in __mt76_csa_check()
1803 ieee80211_iterate_active_interfaces_atomic(dev->hw, in mt76_csa_check()
1818 struct mt76_rx_status *status = (struct mt76_rx_status *)skb->cb; in mt76_insert_ccmp_hdr()
1820 u8 *hdr, *pn = status->iv; in mt76_insert_ccmp_hdr()
1823 memmove(skb->data, skb->data + 8, hdr_len); in mt76_insert_ccmp_hdr()
1824 hdr = skb->data + hdr_len; in mt76_insert_ccmp_hdr()
1835 status->flag &= ~RX_FLAG_IV_STRIPPED; in mt76_insert_ccmp_hdr()
1843 bool is_2g = sband->band == NL80211_BAND_2GHZ; in mt76_get_rate()
1844 int i, offset = 0, len = sband->n_bitrates; in mt76_get_rate()
1856 if ((sband->bitrates[i].hw_value & GENMASK(7, 0)) == idx) in mt76_get_rate()
1867 struct mt76_phy *phy = hw->priv; in mt76_sw_scan()
1869 set_bit(MT76_SCANNING, &phy->state); in mt76_sw_scan()
1875 struct mt76_phy *phy = hw->priv; in mt76_sw_scan_complete()
1877 clear_bit(MT76_SCANNING, &phy->state); in mt76_sw_scan_complete()
1883 struct mt76_phy *phy = hw->priv; in mt76_get_antenna()
1884 struct mt76_dev *dev = phy->dev; in mt76_get_antenna()
1887 mutex_lock(&dev->mutex); in mt76_get_antenna()
1889 for (i = 0; i < ARRAY_SIZE(dev->phys); i++) in mt76_get_antenna()
1890 if (dev->phys[i] && dev->phys[i]->hw == hw) in mt76_get_antenna()
1891 *tx_ant |= dev->phys[i]->chainmask; in mt76_get_antenna()
1893 mutex_unlock(&dev->mutex); in mt76_get_antenna()
1906 hwq = devm_kzalloc(dev->dev, sizeof(*hwq), GFP_KERNEL); in mt76_init_queue()
1908 return ERR_PTR(-ENOMEM); in mt76_init_queue()
1910 hwq->flags = flags; in mt76_init_queue()
1911 hwq->wed = wed; in mt76_init_queue()
1913 err = dev->queue_ops->alloc(dev, hwq, idx, n_desc, 0, ring_base); in mt76_init_queue()
1924 int i, ei = wi->initial_stat_idx; in mt76_ethtool_worker()
1925 u64 *data = wi->data; in mt76_ethtool_worker()
1927 wi->sta_count++; in mt76_ethtool_worker()
1929 data[ei++] += stats->tx_mode[MT_PHY_TYPE_CCK]; in mt76_ethtool_worker()
1930 data[ei++] += stats->tx_mode[MT_PHY_TYPE_OFDM]; in mt76_ethtool_worker()
1931 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HT]; in mt76_ethtool_worker()
1932 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HT_GF]; in mt76_ethtool_worker()
1933 data[ei++] += stats->tx_mode[MT_PHY_TYPE_VHT]; in mt76_ethtool_worker()
1934 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_SU]; in mt76_ethtool_worker()
1935 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_EXT_SU]; in mt76_ethtool_worker()
1936 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_TB]; in mt76_ethtool_worker()
1937 data[ei++] += stats->tx_mode[MT_PHY_TYPE_HE_MU]; in mt76_ethtool_worker()
1939 data[ei++] += stats->tx_mode[MT_PHY_TYPE_EHT_SU]; in mt76_ethtool_worker()
1940 data[ei++] += stats->tx_mode[MT_PHY_TYPE_EHT_TRIG]; in mt76_ethtool_worker()
1941 data[ei++] += stats->tx_mode[MT_PHY_TYPE_EHT_MU]; in mt76_ethtool_worker()
1944 for (i = 0; i < (ARRAY_SIZE(stats->tx_bw) - !eht); i++) in mt76_ethtool_worker()
1945 data[ei++] += stats->tx_bw[i]; in mt76_ethtool_worker()
1948 data[ei++] += stats->tx_mcs[i]; in mt76_ethtool_worker()
1951 data[ei++] += stats->tx_nss[i]; in mt76_ethtool_worker()
1953 wi->worker_stat_count = ei - wi->initial_stat_idx; in mt76_ethtool_worker()
1964 page_pool_get_stats(dev->q_rx[i].page_pool, &stats); in mt76_ethtool_page_pool_stats()
1974 struct ieee80211_hw *hw = phy->hw; in mt76_phy_dfs_state()
1975 struct mt76_dev *dev = phy->dev; in mt76_phy_dfs_state()
1977 if (dev->region == NL80211_DFS_UNSET || in mt76_phy_dfs_state()
1978 test_bit(MT76_SCANNING, &phy->state)) in mt76_phy_dfs_state()
1981 if (!phy->radar_enabled) { in mt76_phy_dfs_state()
1982 if ((hw->conf.flags & IEEE80211_CONF_MONITOR) && in mt76_phy_dfs_state()
1983 (phy->chandef.chan->flags & IEEE80211_CHAN_RADAR)) in mt76_phy_dfs_state()
1989 if (!cfg80211_reg_can_beacon(hw->wiphy, &phy->chandef, NL80211_IFTYPE_AP)) in mt76_phy_dfs_state()
1998 struct mt76_vif_link *mlink = (struct mt76_vif_link *)vif->drv_priv; in mt76_vif_cleanup()
1999 struct mt76_vif_data *mvif = mlink->mvif; in mt76_vif_cleanup()
2001 rcu_assign_pointer(mvif->link[0], NULL); in mt76_vif_cleanup()
2003 if (mvif->roc_phy) in mt76_vif_cleanup()
2004 mt76_abort_roc(mvif->roc_phy); in mt76_vif_cleanup()