Lines Matching full:ab
25 u8 *ath11k_dp_rx_h_80211_hdr(struct ath11k_base *ab, struct hal_rx_desc *desc) in ath11k_dp_rx_h_80211_hdr() argument
27 return ab->hw_params.hw_ops->rx_desc_get_hdr_status(desc); in ath11k_dp_rx_h_80211_hdr()
31 enum hal_encrypt_type ath11k_dp_rx_h_mpdu_start_enctype(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_enctype() argument
34 if (!ab->hw_params.hw_ops->rx_desc_encrypt_valid(desc)) in ath11k_dp_rx_h_mpdu_start_enctype()
37 return ab->hw_params.hw_ops->rx_desc_get_encrypt_type(desc); in ath11k_dp_rx_h_mpdu_start_enctype()
40 static inline u8 ath11k_dp_rx_h_msdu_start_decap_type(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_decap_type() argument
43 return ab->hw_params.hw_ops->rx_desc_get_decap_type(desc); in ath11k_dp_rx_h_msdu_start_decap_type()
47 bool ath11k_dp_rx_h_msdu_start_ldpc_support(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_ldpc_support() argument
50 return ab->hw_params.hw_ops->rx_desc_get_ldpc_support(desc); in ath11k_dp_rx_h_msdu_start_ldpc_support()
54 u8 ath11k_dp_rx_h_msdu_start_mesh_ctl_present(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_mesh_ctl_present() argument
57 return ab->hw_params.hw_ops->rx_desc_get_mesh_ctl(desc); in ath11k_dp_rx_h_msdu_start_mesh_ctl_present()
61 bool ath11k_dp_rx_h_mpdu_start_seq_ctrl_valid(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_seq_ctrl_valid() argument
64 return ab->hw_params.hw_ops->rx_desc_get_mpdu_seq_ctl_vld(desc); in ath11k_dp_rx_h_mpdu_start_seq_ctrl_valid()
67 static inline bool ath11k_dp_rx_h_mpdu_start_fc_valid(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_fc_valid() argument
70 return ab->hw_params.hw_ops->rx_desc_get_mpdu_fc_valid(desc); in ath11k_dp_rx_h_mpdu_start_fc_valid()
73 static inline bool ath11k_dp_rx_h_mpdu_start_more_frags(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_more_frags() argument
78 hdr = (struct ieee80211_hdr *)(skb->data + ab->hw_params.hal_desc_sz); in ath11k_dp_rx_h_mpdu_start_more_frags()
82 static inline u16 ath11k_dp_rx_h_mpdu_start_frag_no(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_frag_no() argument
87 hdr = (struct ieee80211_hdr *)(skb->data + ab->hw_params.hal_desc_sz); in ath11k_dp_rx_h_mpdu_start_frag_no()
91 static inline u16 ath11k_dp_rx_h_mpdu_start_seq_no(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_seq_no() argument
94 return ab->hw_params.hw_ops->rx_desc_get_mpdu_start_seq_no(desc); in ath11k_dp_rx_h_mpdu_start_seq_no()
97 static inline void *ath11k_dp_rx_get_attention(struct ath11k_base *ab, in ath11k_dp_rx_get_attention() argument
100 return ab->hw_params.hw_ops->rx_desc_get_attention(desc); in ath11k_dp_rx_get_attention()
157 static bool ath11k_dp_rx_h_attn_msdu_len_err(struct ath11k_base *ab, in ath11k_dp_rx_h_attn_msdu_len_err() argument
163 rx_attention = ath11k_dp_rx_get_attention(ab, desc); in ath11k_dp_rx_h_attn_msdu_len_err()
169 static inline u16 ath11k_dp_rx_h_msdu_start_msdu_len(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_msdu_len() argument
172 return ab->hw_params.hw_ops->rx_desc_get_msdu_len(desc); in ath11k_dp_rx_h_msdu_start_msdu_len()
175 static inline u8 ath11k_dp_rx_h_msdu_start_sgi(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_sgi() argument
178 return ab->hw_params.hw_ops->rx_desc_get_msdu_sgi(desc); in ath11k_dp_rx_h_msdu_start_sgi()
181 static inline u8 ath11k_dp_rx_h_msdu_start_rate_mcs(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_rate_mcs() argument
184 return ab->hw_params.hw_ops->rx_desc_get_msdu_rate_mcs(desc); in ath11k_dp_rx_h_msdu_start_rate_mcs()
187 static inline u8 ath11k_dp_rx_h_msdu_start_rx_bw(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_rx_bw() argument
190 return ab->hw_params.hw_ops->rx_desc_get_msdu_rx_bw(desc); in ath11k_dp_rx_h_msdu_start_rx_bw()
193 static inline u32 ath11k_dp_rx_h_msdu_start_freq(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_freq() argument
196 return ab->hw_params.hw_ops->rx_desc_get_msdu_freq(desc); in ath11k_dp_rx_h_msdu_start_freq()
199 static inline u8 ath11k_dp_rx_h_msdu_start_pkt_type(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_pkt_type() argument
202 return ab->hw_params.hw_ops->rx_desc_get_msdu_pkt_type(desc); in ath11k_dp_rx_h_msdu_start_pkt_type()
205 static inline u8 ath11k_dp_rx_h_msdu_start_nss(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_start_nss() argument
208 return hweight8(ab->hw_params.hw_ops->rx_desc_get_msdu_nss(desc)); in ath11k_dp_rx_h_msdu_start_nss()
211 static inline u8 ath11k_dp_rx_h_mpdu_start_tid(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_tid() argument
214 return ab->hw_params.hw_ops->rx_desc_get_mpdu_tid(desc); in ath11k_dp_rx_h_mpdu_start_tid()
217 static inline u16 ath11k_dp_rx_h_mpdu_start_peer_id(struct ath11k_base *ab, in ath11k_dp_rx_h_mpdu_start_peer_id() argument
220 return ab->hw_params.hw_ops->rx_desc_get_mpdu_peer_id(desc); in ath11k_dp_rx_h_mpdu_start_peer_id()
223 static inline u8 ath11k_dp_rx_h_msdu_end_l3pad(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_end_l3pad() argument
226 return ab->hw_params.hw_ops->rx_desc_get_l3_pad_bytes(desc); in ath11k_dp_rx_h_msdu_end_l3pad()
229 static inline bool ath11k_dp_rx_h_msdu_end_first_msdu(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_end_first_msdu() argument
232 return ab->hw_params.hw_ops->rx_desc_get_first_msdu(desc); in ath11k_dp_rx_h_msdu_end_first_msdu()
235 static bool ath11k_dp_rx_h_msdu_end_last_msdu(struct ath11k_base *ab, in ath11k_dp_rx_h_msdu_end_last_msdu() argument
238 return ab->hw_params.hw_ops->rx_desc_get_last_msdu(desc); in ath11k_dp_rx_h_msdu_end_last_msdu()
241 static void ath11k_dp_rx_desc_end_tlv_copy(struct ath11k_base *ab, in ath11k_dp_rx_desc_end_tlv_copy() argument
245 ab->hw_params.hw_ops->rx_desc_copy_attn_end_tlv(fdesc, ldesc); in ath11k_dp_rx_desc_end_tlv_copy()
254 static inline u8 *ath11k_dp_rxdesc_get_80211hdr(struct ath11k_base *ab, in ath11k_dp_rxdesc_get_80211hdr() argument
259 rx_pkt_hdr = ab->hw_params.hw_ops->rx_desc_get_msdu_payload(rx_desc); in ath11k_dp_rxdesc_get_80211hdr()
264 static inline bool ath11k_dp_rxdesc_mpdu_valid(struct ath11k_base *ab, in ath11k_dp_rxdesc_mpdu_valid() argument
269 tlv_tag = ab->hw_params.hw_ops->rx_desc_get_mpdu_start_tag(rx_desc); in ath11k_dp_rxdesc_mpdu_valid()
274 static inline u32 ath11k_dp_rxdesc_get_ppduid(struct ath11k_base *ab, in ath11k_dp_rxdesc_get_ppduid() argument
277 return ab->hw_params.hw_ops->rx_desc_get_mpdu_ppdu_id(rx_desc); in ath11k_dp_rxdesc_get_ppduid()
280 static inline void ath11k_dp_rxdesc_set_msdu_len(struct ath11k_base *ab, in ath11k_dp_rxdesc_set_msdu_len() argument
284 ab->hw_params.hw_ops->rx_desc_set_msdu_len(desc, len); in ath11k_dp_rxdesc_set_msdu_len()
287 static bool ath11k_dp_rx_h_attn_is_mcbc(struct ath11k_base *ab, in ath11k_dp_rx_h_attn_is_mcbc() argument
290 struct rx_attention *attn = ath11k_dp_rx_get_attention(ab, desc); in ath11k_dp_rx_h_attn_is_mcbc()
292 return ath11k_dp_rx_h_msdu_end_first_msdu(ab, desc) && in ath11k_dp_rx_h_attn_is_mcbc()
297 static bool ath11k_dp_rxdesc_mac_addr2_valid(struct ath11k_base *ab, in ath11k_dp_rxdesc_mac_addr2_valid() argument
300 return ab->hw_params.hw_ops->rx_desc_mac_addr2_valid(desc); in ath11k_dp_rxdesc_mac_addr2_valid()
303 static u8 *ath11k_dp_rxdesc_mpdu_start_addr2(struct ath11k_base *ab, in ath11k_dp_rxdesc_mpdu_start_addr2() argument
306 return ab->hw_params.hw_ops->rx_desc_mpdu_start_addr2(desc); in ath11k_dp_rxdesc_mpdu_start_addr2()
311 struct ath11k_base *ab = from_timer(ab, t, mon_reap_timer); in ath11k_dp_service_mon_ring() local
314 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) in ath11k_dp_service_mon_ring()
315 ath11k_dp_rx_process_mon_rings(ab, i, NULL, DP_MON_SERVICE_BUDGET); in ath11k_dp_service_mon_ring()
317 mod_timer(&ab->mon_reap_timer, jiffies + in ath11k_dp_service_mon_ring()
321 static int ath11k_dp_purge_mon_ring(struct ath11k_base *ab) in ath11k_dp_purge_mon_ring() argument
327 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) in ath11k_dp_purge_mon_ring()
328 reaped += ath11k_dp_rx_process_mon_rings(ab, i, in ath11k_dp_purge_mon_ring()
338 ath11k_warn(ab, "dp mon ring purge timeout"); in ath11k_dp_purge_mon_ring()
344 int ath11k_dp_rxbufs_replenish(struct ath11k_base *ab, int mac_id, in ath11k_dp_rxbufs_replenish() argument
360 srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id]; in ath11k_dp_rxbufs_replenish()
364 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rxbufs_replenish()
366 num_free = ath11k_hal_srng_src_num_free(ab, srng, true); in ath11k_dp_rxbufs_replenish()
386 paddr = dma_map_single(ab->dev, skb->data, in ath11k_dp_rxbufs_replenish()
389 if (dma_mapping_error(ab->dev, paddr)) in ath11k_dp_rxbufs_replenish()
399 desc = ath11k_hal_srng_src_get_next_entry(ab, srng); in ath11k_dp_rxbufs_replenish()
413 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rxbufs_replenish()
424 dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb), in ath11k_dp_rxbufs_replenish()
429 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rxbufs_replenish()
448 dma_unmap_single(ar->ab->dev, ATH11K_SKB_RXCB(skb)->paddr, in ath11k_dp_rxdma_buf_ring_free()
462 struct ath11k_base *ab = ar->ab; in ath11k_dp_rxdma_pdev_buf_free() local
471 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_rxdma_pdev_buf_free()
487 ath11k_hal_srng_get_entrysize(ar->ab, ringtype); in ath11k_dp_rxdma_ring_buf_setup()
490 ath11k_dp_rxbufs_replenish(ar->ab, dp->mac_id, rx_ring, num_entries, in ath11k_dp_rxdma_ring_buf_setup()
491 ar->ab->hw_params.hal_params->rx_buf_rbm); in ath11k_dp_rxdma_ring_buf_setup()
498 struct ath11k_base *ab = ar->ab; in ath11k_dp_rxdma_pdev_buf_setup() local
504 if (ar->ab->hw_params.rxdma1_enable) { in ath11k_dp_rxdma_pdev_buf_setup()
509 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_rxdma_pdev_buf_setup()
520 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_pdev_srng_free() local
523 ath11k_dp_srng_cleanup(ab, &dp->rx_refill_buf_ring.refill_buf_ring); in ath11k_dp_rx_pdev_srng_free()
525 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_rx_pdev_srng_free()
526 if (ab->hw_params.rx_mac_buf_ring) in ath11k_dp_rx_pdev_srng_free()
527 ath11k_dp_srng_cleanup(ab, &dp->rx_mac_buf_ring[i]); in ath11k_dp_rx_pdev_srng_free()
529 ath11k_dp_srng_cleanup(ab, &dp->rxdma_err_dst_ring[i]); in ath11k_dp_rx_pdev_srng_free()
530 ath11k_dp_srng_cleanup(ab, in ath11k_dp_rx_pdev_srng_free()
534 ath11k_dp_srng_cleanup(ab, &dp->rxdma_mon_buf_ring.refill_buf_ring); in ath11k_dp_rx_pdev_srng_free()
537 void ath11k_dp_pdev_reo_cleanup(struct ath11k_base *ab) in ath11k_dp_pdev_reo_cleanup() argument
539 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_pdev_reo_cleanup()
543 ath11k_dp_srng_cleanup(ab, &dp->reo_dst_ring[i]); in ath11k_dp_pdev_reo_cleanup()
546 int ath11k_dp_pdev_reo_setup(struct ath11k_base *ab) in ath11k_dp_pdev_reo_setup() argument
548 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_pdev_reo_setup()
553 ret = ath11k_dp_srng_setup(ab, &dp->reo_dst_ring[i], in ath11k_dp_pdev_reo_setup()
557 ath11k_warn(ab, "failed to setup reo_dst_ring\n"); in ath11k_dp_pdev_reo_setup()
565 ath11k_dp_pdev_reo_cleanup(ab); in ath11k_dp_pdev_reo_setup()
573 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_pdev_srng_alloc() local
578 ret = ath11k_dp_srng_setup(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
583 ath11k_warn(ar->ab, "failed to setup rx_refill_buf_ring\n"); in ath11k_dp_rx_pdev_srng_alloc()
587 if (ar->ab->hw_params.rx_mac_buf_ring) { in ath11k_dp_rx_pdev_srng_alloc()
588 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_rx_pdev_srng_alloc()
589 ret = ath11k_dp_srng_setup(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
594 ath11k_warn(ar->ab, "failed to setup rx_mac_buf_ring %d\n", in ath11k_dp_rx_pdev_srng_alloc()
601 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_rx_pdev_srng_alloc()
602 ret = ath11k_dp_srng_setup(ar->ab, &dp->rxdma_err_dst_ring[i], in ath11k_dp_rx_pdev_srng_alloc()
606 ath11k_warn(ar->ab, "failed to setup rxdma_err_dst_ring %d\n", i); in ath11k_dp_rx_pdev_srng_alloc()
611 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_rx_pdev_srng_alloc()
613 ret = ath11k_dp_srng_setup(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
618 ath11k_warn(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
629 if (!ar->ab->hw_params.rxdma1_enable) { in ath11k_dp_rx_pdev_srng_alloc()
631 timer_setup(&ar->ab->mon_reap_timer, in ath11k_dp_rx_pdev_srng_alloc()
636 ret = ath11k_dp_srng_setup(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
641 ath11k_warn(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
646 ret = ath11k_dp_srng_setup(ar->ab, &dp->rxdma_mon_dst_ring, in ath11k_dp_rx_pdev_srng_alloc()
650 ath11k_warn(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
655 ret = ath11k_dp_srng_setup(ar->ab, &dp->rxdma_mon_desc_ring, in ath11k_dp_rx_pdev_srng_alloc()
659 ath11k_warn(ar->ab, in ath11k_dp_rx_pdev_srng_alloc()
667 void ath11k_dp_reo_cmd_list_cleanup(struct ath11k_base *ab) in ath11k_dp_reo_cmd_list_cleanup() argument
669 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_reo_cmd_list_cleanup()
679 dma_unmap_single(ab->dev, rx_tid->paddr, in ath11k_dp_reo_cmd_list_cleanup()
693 dma_unmap_single(ab->dev, rx_tid->paddr, in ath11k_dp_reo_cmd_list_cleanup()
709 ath11k_warn(dp->ab, "failed to flush rx tid hw desc, tid %d status %d\n", in ath11k_dp_reo_cmd_free()
712 dma_unmap_single(dp->ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_reo_cmd_free()
719 static void ath11k_dp_reo_cache_flush(struct ath11k_base *ab, in ath11k_dp_reo_cache_flush() argument
733 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_reo_cache_flush()
737 ath11k_warn(ab, in ath11k_dp_reo_cache_flush()
746 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_reo_cache_flush()
750 ath11k_err(ab, "failed to send HAL_REO_CMD_FLUSH_CACHE cmd, tid %d (%d)\n", in ath11k_dp_reo_cache_flush()
752 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_reo_cache_flush()
762 struct ath11k_base *ab = dp->ab; in ath11k_dp_rx_tid_del_func() local
770 ath11k_warn(ab, "failed to delete rx tid %d hw descriptor %d\n", in ath11k_dp_rx_tid_del_func()
796 ath11k_dp_reo_cache_flush(ab, &elem->data); in ath11k_dp_rx_tid_del_func()
805 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_rx_tid_del_func()
827 ret = ath11k_dp_tx_send_reo_cmd(ar->ab, rx_tid, in ath11k_peer_rx_tid_delete()
832 ath11k_err(ar->ab, "failed to send HAL_REO_CMD_UPDATE_RX_QUEUE cmd, tid %d (%d)\n", in ath11k_peer_rx_tid_delete()
834 dma_unmap_single(ar->ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_peer_rx_tid_delete()
844 static int ath11k_dp_rx_link_desc_return(struct ath11k_base *ab, in ath11k_dp_rx_link_desc_return() argument
848 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_rx_link_desc_return()
853 srng = &ab->hal.srng_list[dp->wbm_desc_rel_ring.ring_id]; in ath11k_dp_rx_link_desc_return()
857 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rx_link_desc_return()
859 desc = ath11k_hal_srng_src_get_next_entry(ab, srng); in ath11k_dp_rx_link_desc_return()
865 ath11k_hal_rx_msdu_link_desc_set(ab, (void *)desc, (void *)link_desc, in ath11k_dp_rx_link_desc_return()
869 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_link_desc_return()
878 struct ath11k_base *ab = rx_tid->ab; in ath11k_dp_rx_frags_cleanup() local
880 lockdep_assert_held(&ab->base_lock); in ath11k_dp_rx_frags_cleanup()
884 ath11k_dp_rx_link_desc_return(ab, (u32 *)rx_tid->dst_ring_desc, in ath11k_dp_rx_frags_cleanup()
901 lockdep_assert_held(&ar->ab->base_lock); in ath11k_peer_frags_flush()
906 spin_unlock_bh(&ar->ab->base_lock); in ath11k_peer_frags_flush()
908 spin_lock_bh(&ar->ab->base_lock); in ath11k_peer_frags_flush()
919 lockdep_assert_held(&ar->ab->base_lock); in ath11k_peer_rx_tid_cleanup()
927 spin_unlock_bh(&ar->ab->base_lock); in ath11k_peer_rx_tid_cleanup()
929 spin_lock_bh(&ar->ab->base_lock); in ath11k_peer_rx_tid_cleanup()
953 ret = ath11k_dp_tx_send_reo_cmd(ar->ab, rx_tid, in ath11k_peer_rx_tid_reo_update()
957 ath11k_warn(ar->ab, "failed to update rx tid queue, tid %d (%d)\n", in ath11k_peer_rx_tid_reo_update()
967 static void ath11k_dp_rx_tid_mem_free(struct ath11k_base *ab, in ath11k_dp_rx_tid_mem_free() argument
973 spin_lock_bh(&ab->base_lock); in ath11k_dp_rx_tid_mem_free()
975 peer = ath11k_peer_find(ab, vdev_id, peer_mac); in ath11k_dp_rx_tid_mem_free()
977 ath11k_warn(ab, "failed to find the peer to free up rx tid mem\n"); in ath11k_dp_rx_tid_mem_free()
985 dma_unmap_single(ab->dev, rx_tid->paddr, rx_tid->size, in ath11k_dp_rx_tid_mem_free()
993 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_tid_mem_free()
1000 struct ath11k_base *ab = ar->ab; in ath11k_peer_rx_tid_setup() local
1009 spin_lock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
1011 peer = ath11k_peer_find(ab, vdev_id, peer_mac); in ath11k_peer_rx_tid_setup()
1013 ath11k_warn(ab, "failed to find the peer %pM to set up rx tid\n", in ath11k_peer_rx_tid_setup()
1015 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
1025 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
1027 ath11k_warn(ab, "failed to update reo for peer %pM rx tid %d\n: %d", in ath11k_peer_rx_tid_setup()
1036 ath11k_warn(ab, "failed to send wmi rx reorder queue for peer %pM tid %d: %d\n", in ath11k_peer_rx_tid_setup()
1055 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
1064 paddr = dma_map_single(ab->dev, addr_aligned, hw_desc_sz, in ath11k_peer_rx_tid_setup()
1067 ret = dma_mapping_error(ab->dev, paddr); in ath11k_peer_rx_tid_setup()
1069 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
1070 ath11k_warn(ab, "failed to setup dma map for peer %pM rx tid %d: %d\n", in ath11k_peer_rx_tid_setup()
1080 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_tid_setup()
1085 ath11k_warn(ar->ab, "failed to setup rx reorder queue for peer %pM tid %d: %d\n", in ath11k_peer_rx_tid_setup()
1087 ath11k_dp_rx_tid_mem_free(ab, peer_mac, vdev_id, tid); in ath11k_peer_rx_tid_setup()
1102 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_ampdu_start() local
1111 ath11k_warn(ab, "failed to setup rx tid %d\n", ret); in ath11k_dp_rx_ampdu_start()
1119 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_ampdu_stop() local
1127 spin_lock_bh(&ab->base_lock); in ath11k_dp_rx_ampdu_stop()
1129 peer = ath11k_peer_find(ab, vdev_id, params->sta->addr); in ath11k_dp_rx_ampdu_stop()
1131 ath11k_warn(ab, "failed to find the peer to stop rx aggregation\n"); in ath11k_dp_rx_ampdu_stop()
1132 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_ampdu_stop()
1140 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_ampdu_stop()
1145 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_ampdu_stop()
1147 ath11k_warn(ab, "failed to update reo for rx tid %d: %d\n", in ath11k_dp_rx_ampdu_stop()
1156 ath11k_warn(ab, "failed to send wmi to delete rx tid %d\n", in ath11k_dp_rx_ampdu_stop()
1168 struct ath11k_base *ab = ar->ab; in ath11k_dp_peer_rx_pn_replay_config() local
1204 spin_lock_bh(&ab->base_lock); in ath11k_dp_peer_rx_pn_replay_config()
1206 peer = ath11k_peer_find(ab, arvif->vdev_id, peer_addr); in ath11k_dp_peer_rx_pn_replay_config()
1208 ath11k_warn(ab, "failed to find the peer to configure pn replay detection\n"); in ath11k_dp_peer_rx_pn_replay_config()
1209 spin_unlock_bh(&ab->base_lock); in ath11k_dp_peer_rx_pn_replay_config()
1219 ret = ath11k_dp_tx_send_reo_cmd(ab, rx_tid, in ath11k_dp_peer_rx_pn_replay_config()
1223 ath11k_warn(ab, "failed to configure rx tid %d queue for pn replay detection %d\n", in ath11k_dp_peer_rx_pn_replay_config()
1229 spin_unlock_bh(&ab->base_lock); in ath11k_dp_peer_rx_pn_replay_config()
1251 static int ath11k_htt_tlv_ppdu_stats_parse(struct ath11k_base *ab, in ath11k_htt_tlv_ppdu_stats_parse() argument
1265 ath11k_warn(ab, "Invalid len %d for the tag 0x%x\n", in ath11k_htt_tlv_ppdu_stats_parse()
1274 ath11k_warn(ab, "Invalid len %d for the tag 0x%x\n", in ath11k_htt_tlv_ppdu_stats_parse()
1293 ath11k_warn(ab, "Invalid len %d for the tag 0x%x\n", in ath11k_htt_tlv_ppdu_stats_parse()
1313 ath11k_warn(ab, "Invalid len %d for the tag 0x%x\n", in ath11k_htt_tlv_ppdu_stats_parse()
1335 int ath11k_dp_htt_tlv_iter(struct ath11k_base *ab, const void *ptr, size_t len, in ath11k_dp_htt_tlv_iter() argument
1347 ath11k_err(ab, "htt tlv parse failure at byte %zd (%zu bytes left, %zu expected)\n", in ath11k_dp_htt_tlv_iter()
1358 ath11k_err(ab, "htt tlv parse failure of tag %u at byte %zd (%zu bytes left, %u expected)\n", in ath11k_dp_htt_tlv_iter()
1362 ret = iter(ab, tlv_tag, tlv_len, ptr, data); in ath11k_dp_htt_tlv_iter()
1376 struct ath11k_base *ab = ar->ab; in ath11k_update_per_peer_tx_stats() local
1425 ath11k_warn(ab, "Invalid HE mcs %d peer stats", mcs); in ath11k_update_per_peer_tx_stats()
1430 ath11k_warn(ab, "Invalid VHT mcs %d peer stats", mcs); in ath11k_update_per_peer_tx_stats()
1435 ath11k_warn(ab, "Invalid HT mcs %d nss %d peer stats", in ath11k_update_per_peer_tx_stats()
1450 spin_lock_bh(&ab->base_lock); in ath11k_update_per_peer_tx_stats()
1451 peer = ath11k_peer_find_by_id(ab, usr_stats->peer_id); in ath11k_update_per_peer_tx_stats()
1454 spin_unlock_bh(&ab->base_lock); in ath11k_update_per_peer_tx_stats()
1517 spin_unlock_bh(&ab->base_lock); in ath11k_update_per_peer_tx_stats()
1564 static int ath11k_htt_pull_ppdu_stats(struct ath11k_base *ab, in ath11k_htt_pull_ppdu_stats() argument
1580 ar = ath11k_mac_get_ar_by_pdev_id(ab, pdev_id); in ath11k_htt_pull_ppdu_stats()
1597 ret = ath11k_dp_htt_tlv_iter(ab, msg->data, len, in ath11k_htt_pull_ppdu_stats()
1601 ath11k_warn(ab, "Failed to parse tlv %d\n", ret); in ath11k_htt_pull_ppdu_stats()
1614 static void ath11k_htt_pktlog(struct ath11k_base *ab, struct sk_buff *skb) in ath11k_htt_pktlog() argument
1625 ar = ath11k_mac_get_ar_by_pdev_id(ab, pdev_id); in ath11k_htt_pktlog()
1627 ath11k_warn(ab, "invalid pdev id %d on htt pktlog\n", pdev_id); in ath11k_htt_pktlog()
1632 ar->ab->pktlog_defs_checksum); in ath11k_htt_pktlog()
1638 static void ath11k_htt_backpressure_event_handler(struct ath11k_base *ab, in ath11k_htt_backpressure_event_handler() argument
1658 …ath11k_dbg(ab, ATH11K_DBG_DP_HTT, "backpressure event, pdev %d, ring type %d,ring id %d, hp %d tp … in ath11k_htt_backpressure_event_handler()
1665 bp_stats = &ab->soc_stats.bp_stats.umac_ring_bp_stats[ring_id]; in ath11k_htt_backpressure_event_handler()
1672 bp_stats = &ab->soc_stats.bp_stats.lmac_ring_bp_stats[ring_id][pdev_idx]; in ath11k_htt_backpressure_event_handler()
1674 ath11k_warn(ab, "unknown ring type received in htt bp event %d\n", in ath11k_htt_backpressure_event_handler()
1679 spin_lock_bh(&ab->base_lock); in ath11k_htt_backpressure_event_handler()
1684 spin_unlock_bh(&ab->base_lock); in ath11k_htt_backpressure_event_handler()
1687 void ath11k_dp_htt_htc_t2h_msg_handler(struct ath11k_base *ab, in ath11k_dp_htt_htc_t2h_msg_handler() argument
1690 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_htt_htc_t2h_msg_handler()
1700 ath11k_dbg(ab, ATH11K_DBG_DP_HTT, "dp_htt rx msg type :0x%0x\n", type); in ath11k_dp_htt_htc_t2h_msg_handler()
1719 ath11k_peer_map_event(ab, vdev_id, peer_id, mac_addr, 0, 0); in ath11k_dp_htt_htc_t2h_msg_handler()
1734 ath11k_peer_map_event(ab, vdev_id, peer_id, mac_addr, ast_hash, in ath11k_dp_htt_htc_t2h_msg_handler()
1741 ath11k_peer_unmap_event(ab, peer_id); in ath11k_dp_htt_htc_t2h_msg_handler()
1744 ath11k_htt_pull_ppdu_stats(ab, skb); in ath11k_dp_htt_htc_t2h_msg_handler()
1747 ath11k_debugfs_htt_ext_stats_handler(ab, skb); in ath11k_dp_htt_htc_t2h_msg_handler()
1750 ath11k_htt_pktlog(ab, skb); in ath11k_dp_htt_htc_t2h_msg_handler()
1753 ath11k_htt_backpressure_event_handler(ab, skb); in ath11k_dp_htt_htc_t2h_msg_handler()
1756 ath11k_warn(ab, "htt event %d not handled\n", type); in ath11k_dp_htt_htc_t2h_msg_handler()
1768 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_msdu_coalesce() local
1774 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_msdu_coalesce()
1790 rxcb->is_first_msdu = ath11k_dp_rx_h_msdu_end_first_msdu(ab, ldesc); in ath11k_dp_rx_msdu_coalesce()
1791 rxcb->is_last_msdu = ath11k_dp_rx_h_msdu_end_last_msdu(ab, ldesc); in ath11k_dp_rx_msdu_coalesce()
1803 ath11k_dp_rx_desc_end_tlv_copy(ab, rxcb->rx_desc, ldesc); in ath11k_dp_rx_msdu_coalesce()
1872 rx_attention = ath11k_dp_rx_get_attention(ar->ab, rxcb->rx_desc); in ath11k_dp_rx_h_csum_offload()
1902 ath11k_warn(ar->ab, "unsupported encryption type %d for mic len\n", enctype); in ath11k_dp_rx_crypto_mic_len()
1930 ath11k_warn(ar->ab, "unsupported encryption type %d\n", enctype); in ath11k_dp_rx_crypto_param_len()
1955 ath11k_warn(ar->ab, "unsupported encryption type %d\n", enctype); in ath11k_dp_rx_crypto_icv_len()
2004 if (ath11k_dp_rx_h_msdu_start_mesh_ctl_present(ar->ab, rxcb->rx_desc)) in ath11k_dp_rx_h_undecap_nwifi()
2108 hdr = (struct ieee80211_hdr *)ath11k_dp_rx_h_80211_hdr(ar->ab, rxcb->rx_desc); in ath11k_dp_rx_h_find_rfc1042()
2182 first_hdr = ath11k_dp_rx_h_80211_hdr(ar->ab, rx_desc); in ath11k_dp_rx_h_undecap()
2183 decap = ath11k_dp_rx_h_msdu_start_decap_type(ar->ab, rx_desc); in ath11k_dp_rx_h_undecap()
2219 ath11k_dp_rx_h_find_peer(struct ath11k_base *ab, struct sk_buff *msdu) in ath11k_dp_rx_h_find_peer() argument
2225 lockdep_assert_held(&ab->base_lock); in ath11k_dp_rx_h_find_peer()
2228 peer = ath11k_peer_find_by_id(ab, rxcb->peer_id); in ath11k_dp_rx_h_find_peer()
2233 if (!rx_desc || !(ath11k_dp_rxdesc_mac_addr2_valid(ab, rx_desc))) in ath11k_dp_rx_h_find_peer()
2236 peer = ath11k_peer_find_by_addr(ab, in ath11k_dp_rx_h_find_peer()
2237 ath11k_dp_rxdesc_mpdu_start_addr2(ab, rx_desc)); in ath11k_dp_rx_h_find_peer()
2257 fill_crypto_hdr = ath11k_dp_rx_h_attn_is_mcbc(ar->ab, rx_desc); in ath11k_dp_rx_h_mpdu()
2261 rxcb->peer_id = ath11k_dp_rx_h_mpdu_start_peer_id(ar->ab, rx_desc); in ath11k_dp_rx_h_mpdu()
2262 rxcb->seq_no = ath11k_dp_rx_h_mpdu_start_seq_no(ar->ab, rx_desc); in ath11k_dp_rx_h_mpdu()
2265 spin_lock_bh(&ar->ab->base_lock); in ath11k_dp_rx_h_mpdu()
2266 peer = ath11k_dp_rx_h_find_peer(ar->ab, msdu); in ath11k_dp_rx_h_mpdu()
2273 enctype = ath11k_dp_rx_h_mpdu_start_enctype(ar->ab, rx_desc); in ath11k_dp_rx_h_mpdu()
2275 spin_unlock_bh(&ar->ab->base_lock); in ath11k_dp_rx_h_mpdu()
2277 rx_attention = ath11k_dp_rx_get_attention(ar->ab, rx_desc); in ath11k_dp_rx_h_mpdu()
2312 if (ath11k_dp_rx_h_msdu_start_decap_type(ar->ab, rx_desc) != in ath11k_dp_rx_h_mpdu()
2329 pkt_type = ath11k_dp_rx_h_msdu_start_pkt_type(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2330 bw = ath11k_dp_rx_h_msdu_start_rx_bw(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2331 rate_mcs = ath11k_dp_rx_h_msdu_start_rate_mcs(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2332 nss = ath11k_dp_rx_h_msdu_start_nss(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2333 sgi = ath11k_dp_rx_h_msdu_start_sgi(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2346 ath11k_warn(ar->ab, in ath11k_dp_rx_h_rate()
2360 ath11k_warn(ar->ab, in ath11k_dp_rx_h_rate()
2369 is_ldpc = ath11k_dp_rx_h_msdu_start_ldpc_support(ar->ab, rx_desc); in ath11k_dp_rx_h_rate()
2376 ath11k_warn(ar->ab, in ath11k_dp_rx_h_rate()
2404 meta_data = ath11k_dp_rx_h_msdu_start_freq(ar->ab, rx_desc); in ath11k_dp_rx_h_ppdu()
2425 ath11k_dbg_dump(ar->ab, ATH11K_DBG_DATA, NULL, "rx_desc: ", in ath11k_dp_rx_h_ppdu()
2463 decap = ath11k_dp_rx_h_msdu_start_decap_type(ar->ab, rxcb->rx_desc); in ath11k_dp_rx_deliver_msdu()
2465 spin_lock_bh(&ar->ab->base_lock); in ath11k_dp_rx_deliver_msdu()
2466 peer = ath11k_dp_rx_h_find_peer(ar->ab, msdu); in ath11k_dp_rx_deliver_msdu()
2469 spin_unlock_bh(&ar->ab->base_lock); in ath11k_dp_rx_deliver_msdu()
2471 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_deliver_msdu()
2495 ath11k_dbg_dump(ar->ab, ATH11K_DBG_DP_RX, NULL, "dp rx msdu: ", in ath11k_dp_rx_deliver_msdu()
2520 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_process_msdu() local
2529 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_process_msdu()
2533 ath11k_warn(ab, in ath11k_dp_rx_process_msdu()
2540 if (ath11k_dp_rx_h_attn_msdu_len_err(ab, rx_desc)) { in ath11k_dp_rx_process_msdu()
2541 ath11k_warn(ar->ab, "msdu len not valid\n"); in ath11k_dp_rx_process_msdu()
2547 rx_attention = ath11k_dp_rx_get_attention(ab, lrx_desc); in ath11k_dp_rx_process_msdu()
2549 ath11k_warn(ab, "msdu_done bit in attention is not set\n"); in ath11k_dp_rx_process_msdu()
2556 msdu_len = ath11k_dp_rx_h_msdu_start_msdu_len(ab, rx_desc); in ath11k_dp_rx_process_msdu()
2557 l3_pad_bytes = ath11k_dp_rx_h_msdu_end_l3pad(ab, lrx_desc); in ath11k_dp_rx_process_msdu()
2563 hdr_status = ath11k_dp_rx_h_80211_hdr(ab, rx_desc); in ath11k_dp_rx_process_msdu()
2565 ath11k_warn(ab, "invalid msdu len %u\n", msdu_len); in ath11k_dp_rx_process_msdu()
2566 ath11k_dbg_dump(ab, ATH11K_DBG_DATA, NULL, "", hdr_status, in ath11k_dp_rx_process_msdu()
2568 ath11k_dbg_dump(ab, ATH11K_DBG_DATA, NULL, "", rx_desc, in ath11k_dp_rx_process_msdu()
2579 ath11k_warn(ab, in ath11k_dp_rx_process_msdu()
2596 static void ath11k_dp_rx_process_received_packets(struct ath11k_base *ab, in ath11k_dp_rx_process_received_packets() argument
2609 if (unlikely(!rcu_access_pointer(ab->pdevs_active[mac_id]))) { in ath11k_dp_rx_process_received_packets()
2614 ar = ab->pdevs[mac_id].ar; in ath11k_dp_rx_process_received_packets()
2623 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_rx_process_received_packets()
2633 int ath11k_dp_process_rx(struct ath11k_base *ab, int ring_id, in ath11k_dp_process_rx() argument
2636 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_process_rx()
2655 srng = &ab->hal.srng_list[dp->reo_dst_ring[ring_id].ring_id]; in ath11k_dp_process_rx()
2660 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_process_rx()
2663 (struct hal_reo_dest_ring *)ath11k_hal_srng_dst_get_next_entry(ab, in ath11k_dp_process_rx()
2674 ar = ab->pdevs[mac_id].ar; in ath11k_dp_process_rx()
2679 ath11k_warn(ab, "frame rx with invalid buf_id %d\n", in ath11k_dp_process_rx()
2689 dma_unmap_single(ab->dev, rxcb->paddr, in ath11k_dp_process_rx()
2700 ab->soc_stats.hal_reo_error[ring_id]++; in ath11k_dp_process_rx()
2737 if (unlikely(!done && ath11k_hal_srng_dst_num_free(ab, srng, true))) { in ath11k_dp_process_rx()
2738 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_process_rx()
2742 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_process_rx()
2749 for (i = 0; i < ab->num_radios; i++) { in ath11k_dp_process_rx()
2753 ath11k_dp_rx_process_received_packets(ab, napi, &msdu_list[i], i); in ath11k_dp_process_rx()
2755 ar = ab->pdevs[i].ar; in ath11k_dp_process_rx()
2758 ath11k_dp_rxbufs_replenish(ab, i, rx_ring, num_buffs_reaped[i], in ath11k_dp_process_rx()
2759 ab->hw_params.hal_params->rx_buf_rbm); in ath11k_dp_process_rx()
2846 static struct sk_buff *ath11k_dp_rx_alloc_mon_status_buf(struct ath11k_base *ab, in ath11k_dp_rx_alloc_mon_status_buf() argument
2865 paddr = dma_map_single(ab->dev, skb->data, in ath11k_dp_rx_alloc_mon_status_buf()
2868 if (unlikely(dma_mapping_error(ab->dev, paddr))) in ath11k_dp_rx_alloc_mon_status_buf()
2882 dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb), in ath11k_dp_rx_alloc_mon_status_buf()
2890 int ath11k_dp_rx_mon_status_bufs_replenish(struct ath11k_base *ab, int mac_id, in ath11k_dp_rx_mon_status_bufs_replenish() argument
2906 srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id]; in ath11k_dp_rx_mon_status_bufs_replenish()
2910 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rx_mon_status_bufs_replenish()
2912 num_free = ath11k_hal_srng_src_num_free(ab, srng, true); in ath11k_dp_rx_mon_status_bufs_replenish()
2918 skb = ath11k_dp_rx_alloc_mon_status_buf(ab, rx_ring, in ath11k_dp_rx_mon_status_bufs_replenish()
2924 desc = ath11k_hal_srng_src_get_next_entry(ab, srng); in ath11k_dp_rx_mon_status_bufs_replenish()
2936 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_mon_status_bufs_replenish()
2946 dma_unmap_single(ab->dev, paddr, skb->len + skb_tailroom(skb), in ath11k_dp_rx_mon_status_bufs_replenish()
2949 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_mon_status_bufs_replenish()
2993 ath11k_dp_rx_mon_buf_done(struct ath11k_base *ab, struct hal_srng *srng, in ath11k_dp_rx_mon_buf_done() argument
3005 status_desc = ath11k_hal_srng_src_next_peek(ab, srng); in ath11k_dp_rx_mon_buf_done()
3021 dma_sync_single_for_cpu(ab->dev, rxcb->paddr, in ath11k_dp_rx_mon_buf_done()
3032 static int ath11k_dp_rx_reap_mon_status_ring(struct ath11k_base *ab, int mac_id, in ath11k_dp_rx_reap_mon_status_ring() argument
3052 ar = ab->pdevs[ath11k_hw_mac_id_to_pdev_id(&ab->hw_params, mac_id)].ar; in ath11k_dp_rx_reap_mon_status_ring()
3055 srng_id = ath11k_hw_mac_id_to_srng_id(&ab->hw_params, mac_id); in ath11k_dp_rx_reap_mon_status_ring()
3058 srng = &ab->hal.srng_list[rx_ring->refill_buf_ring.ring_id]; in ath11k_dp_rx_reap_mon_status_ring()
3062 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rx_reap_mon_status_ring()
3066 ath11k_hal_srng_src_peek(ab, srng); in ath11k_dp_rx_reap_mon_status_ring()
3082 ath11k_warn(ab, "rx monitor status with invalid buf_id %d\n", in ath11k_dp_rx_reap_mon_status_ring()
3090 dma_sync_single_for_cpu(ab->dev, rxcb->paddr, in ath11k_dp_rx_reap_mon_status_ring()
3097 ath11k_warn(ab, "mon status DONE not set %lx, buf_id %d\n", in ath11k_dp_rx_reap_mon_status_ring()
3116 reap_status = ath11k_dp_rx_mon_buf_done(ab, srng, in ath11k_dp_rx_reap_mon_status_ring()
3125 dma_unmap_single(ab->dev, rxcb->paddr, in ath11k_dp_rx_reap_mon_status_ring()
3137 if (ab->hw_params.full_monitor_mode) { in ath11k_dp_rx_reap_mon_status_ring()
3143 dma_unmap_single(ab->dev, rxcb->paddr, in ath11k_dp_rx_reap_mon_status_ring()
3152 skb = ath11k_dp_rx_alloc_mon_status_buf(ab, rx_ring, in ath11k_dp_rx_reap_mon_status_ring()
3156 hal_params = ab->hw_params.hal_params; in ath11k_dp_rx_reap_mon_status_ring()
3169 ab->hw_params.hal_params->rx_buf_rbm); in ath11k_dp_rx_reap_mon_status_ring()
3170 ath11k_hal_srng_src_get_next_entry(ab, srng); in ath11k_dp_rx_reap_mon_status_ring()
3173 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_reap_mon_status_ring()
3183 spin_lock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3186 spin_unlock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3190 spin_unlock_bh(&rx_tid->ab->base_lock); in ath11k_dp_rx_frag_timer()
3195 struct ath11k_base *ab = ar->ab; in ath11k_peer_rx_frag_setup() local
3203 ath11k_warn(ab, "failed to allocate michael_mic shash: %ld\n", in ath11k_peer_rx_frag_setup()
3208 spin_lock_bh(&ab->base_lock); in ath11k_peer_rx_frag_setup()
3210 peer = ath11k_peer_find(ab, vdev_id, peer_mac); in ath11k_peer_rx_frag_setup()
3212 ath11k_warn(ab, "failed to find the peer to set up fragment info\n"); in ath11k_peer_rx_frag_setup()
3213 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_frag_setup()
3220 rx_tid->ab = ab; in ath11k_peer_rx_frag_setup()
3227 spin_unlock_bh(&ab->base_lock); in ath11k_peer_rx_frag_setup()
3283 u32 hdr_len, hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_verify_tkip_mic()
3287 if (ath11k_dp_rx_h_mpdu_start_enctype(ar->ab, rx_desc) != in ath11k_dp_rx_h_verify_tkip_mic()
3334 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_undecap_frag()
3372 u32 flags, hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_defrag()
3382 enctype = ath11k_dp_rx_h_mpdu_start_enctype(ar->ab, rx_desc); in ath11k_dp_rx_h_defrag()
3384 rx_attention = ath11k_dp_rx_get_attention(ar->ab, rx_desc); in ath11k_dp_rx_h_defrag()
3432 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_h_defrag_reo_reinject() local
3447 hal_rx_desc_sz = ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_defrag_reo_reinject()
3448 link_desc_banks = ab->dp.link_desc_banks; in ath11k_dp_rx_h_defrag_reo_reinject()
3451 ath11k_hal_rx_reo_ent_paddr_get(ab, reo_dest_ring, &paddr, &desc_bank); in ath11k_dp_rx_h_defrag_reo_reinject()
3469 ath11k_dp_rxdesc_set_msdu_len(ab, rx_desc, defrag_skb->len - hal_rx_desc_sz); in ath11k_dp_rx_h_defrag_reo_reinject()
3471 paddr = dma_map_single(ab->dev, defrag_skb->data, in ath11k_dp_rx_h_defrag_reo_reinject()
3474 if (dma_mapping_error(ab->dev, paddr)) in ath11k_dp_rx_h_defrag_reo_reinject()
3491 ab->hw_params.hal_params->rx_buf_rbm); in ath11k_dp_rx_h_defrag_reo_reinject()
3494 srng = &ab->hal.srng_list[ab->dp.reo_reinject_ring.ring_id]; in ath11k_dp_rx_h_defrag_reo_reinject()
3497 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rx_h_defrag_reo_reinject()
3500 ath11k_hal_srng_src_get_next_entry(ab, srng); in ath11k_dp_rx_h_defrag_reo_reinject()
3502 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_h_defrag_reo_reinject()
3509 ath11k_hal_rx_reo_ent_paddr_get(ab, reo_dest_ring, &paddr, &desc_bank); in ath11k_dp_rx_h_defrag_reo_reinject()
3528 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_h_defrag_reo_reinject()
3538 dma_unmap_single(ab->dev, paddr, defrag_skb->len + skb_tailroom(defrag_skb), in ath11k_dp_rx_h_defrag_reo_reinject()
3548 frag1 = ath11k_dp_rx_h_mpdu_start_frag_no(ar->ab, a); in ath11k_dp_rx_h_cmp_frags()
3549 frag2 = ath11k_dp_rx_h_mpdu_start_frag_no(ar->ab, b); in ath11k_dp_rx_h_cmp_frags()
3576 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_get_pn()
3603 encrypt_type = ath11k_dp_rx_h_mpdu_start_enctype(ar->ab, desc); in ath11k_dp_rx_h_defrag_validate_incr_pn()
3627 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_frag_h_mpdu() local
3640 peer_id = ath11k_dp_rx_h_mpdu_start_peer_id(ar->ab, rx_desc); in ath11k_dp_rx_frag_h_mpdu()
3641 tid = ath11k_dp_rx_h_mpdu_start_tid(ar->ab, rx_desc); in ath11k_dp_rx_frag_h_mpdu()
3642 seqno = ath11k_dp_rx_h_mpdu_start_seq_no(ar->ab, rx_desc); in ath11k_dp_rx_frag_h_mpdu()
3643 frag_no = ath11k_dp_rx_h_mpdu_start_frag_no(ar->ab, msdu); in ath11k_dp_rx_frag_h_mpdu()
3644 more_frags = ath11k_dp_rx_h_mpdu_start_more_frags(ar->ab, msdu); in ath11k_dp_rx_frag_h_mpdu()
3645 is_mcbc = ath11k_dp_rx_h_attn_is_mcbc(ar->ab, rx_desc); in ath11k_dp_rx_frag_h_mpdu()
3651 if (!ath11k_dp_rx_h_mpdu_start_seq_ctrl_valid(ar->ab, rx_desc) || in ath11k_dp_rx_frag_h_mpdu()
3652 !ath11k_dp_rx_h_mpdu_start_fc_valid(ar->ab, rx_desc) || in ath11k_dp_rx_frag_h_mpdu()
3664 spin_lock_bh(&ab->base_lock); in ath11k_dp_rx_frag_h_mpdu()
3665 peer = ath11k_peer_find_by_id(ab, peer_id); in ath11k_dp_rx_frag_h_mpdu()
3667 ath11k_warn(ab, "failed to find the peer to de-fragment received fragment peer_id %d\n", in ath11k_dp_rx_frag_h_mpdu()
3673 ath11k_warn(ab, "The peer %pM [%d] has uninitialized datapath\n", in ath11k_dp_rx_frag_h_mpdu()
3712 ath11k_dp_rx_link_desc_return(ab, ring_desc, in ath11k_dp_rx_frag_h_mpdu()
3723 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_frag_h_mpdu()
3725 spin_lock_bh(&ab->base_lock); in ath11k_dp_rx_frag_h_mpdu()
3727 peer = ath11k_peer_find_by_id(ab, peer_id); in ath11k_dp_rx_frag_h_mpdu()
3750 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_frag_h_mpdu()
3764 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_process_rx_err_buf()
3769 ath11k_warn(ar->ab, "rx err buf with invalid buf_id %d\n", in ath11k_dp_process_rx_err_buf()
3779 dma_unmap_single(ar->ab->dev, rxcb->paddr, in ath11k_dp_process_rx_err_buf()
3789 if (!rcu_dereference(ar->ab->pdevs_active[ar->pdev_idx])) { in ath11k_dp_process_rx_err_buf()
3800 msdu_len = ath11k_dp_rx_h_msdu_start_msdu_len(ar->ab, rx_desc); in ath11k_dp_process_rx_err_buf()
3802 hdr_status = ath11k_dp_rx_h_80211_hdr(ar->ab, rx_desc); in ath11k_dp_process_rx_err_buf()
3803 ath11k_warn(ar->ab, "invalid msdu leng %u", msdu_len); in ath11k_dp_process_rx_err_buf()
3804 ath11k_dbg_dump(ar->ab, ATH11K_DBG_DATA, NULL, "", hdr_status, in ath11k_dp_process_rx_err_buf()
3806 ath11k_dbg_dump(ar->ab, ATH11K_DBG_DATA, NULL, "", rx_desc, in ath11k_dp_process_rx_err_buf()
3816 ath11k_dp_rx_link_desc_return(ar->ab, ring_desc, in ath11k_dp_process_rx_err_buf()
3824 int ath11k_dp_process_rx_err(struct ath11k_base *ab, struct napi_struct *napi, in ath11k_dp_process_rx_err() argument
3848 dp = &ab->dp; in ath11k_dp_process_rx_err()
3852 srng = &ab->hal.srng_list[reo_except->ring_id]; in ath11k_dp_process_rx_err()
3856 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_process_rx_err()
3859 (desc = ath11k_hal_srng_dst_get_next_entry(ab, srng))) { in ath11k_dp_process_rx_err()
3862 ab->soc_stats.err_ring_pkts++; in ath11k_dp_process_rx_err()
3863 ret = ath11k_hal_desc_reo_parse_err(ab, desc, &paddr, in ath11k_dp_process_rx_err()
3866 ath11k_warn(ab, "failed to parse error reo desc %d\n", in ath11k_dp_process_rx_err()
3877 ab->soc_stats.invalid_rbm++; in ath11k_dp_process_rx_err()
3878 ath11k_warn(ab, "invalid return buffer manager %d\n", rbm); in ath11k_dp_process_rx_err()
3879 ath11k_dp_rx_link_desc_return(ab, desc, in ath11k_dp_process_rx_err()
3892 ath11k_dp_rx_link_desc_return(ab, desc, in ath11k_dp_process_rx_err()
3903 ar = ab->pdevs[mac_id].ar; in ath11k_dp_process_rx_err()
3920 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_process_rx_err()
3924 for (i = 0; i < ab->num_radios; i++) { in ath11k_dp_process_rx_err()
3928 ar = ab->pdevs[i].ar; in ath11k_dp_process_rx_err()
3931 ath11k_dp_rxbufs_replenish(ab, i, rx_ring, n_bufs_reaped[i], in ath11k_dp_process_rx_err()
3932 ab->hw_params.hal_params->rx_buf_rbm); in ath11k_dp_process_rx_err()
3947 (DP_RX_BUFFER_SIZE - ar->ab->hw_params.hal_desc_sz)); in ath11k_dp_rx_null_q_desc_sg_drop()
3971 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_null_q_desc()
3973 msdu_len = ath11k_dp_rx_h_msdu_start_msdu_len(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
3982 rx_attention = ath11k_dp_rx_get_attention(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
3984 ath11k_warn(ar->ab, in ath11k_dp_rx_h_null_q_desc()
3999 rxcb->is_first_msdu = ath11k_dp_rx_h_msdu_end_first_msdu(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
4000 rxcb->is_last_msdu = ath11k_dp_rx_h_msdu_end_last_msdu(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
4005 l3pad_bytes = ath11k_dp_rx_h_msdu_end_l3pad(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
4017 rxcb->tid = ath11k_dp_rx_h_mpdu_start_tid(ar->ab, desc); in ath11k_dp_rx_h_null_q_desc()
4033 ar->ab->soc_stats.reo_error[rxcb->err_code]++; in ath11k_dp_rx_h_reo_err()
4064 u32 hal_rx_desc_sz = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_h_tkip_mic_err()
4066 rxcb->is_first_msdu = ath11k_dp_rx_h_msdu_end_first_msdu(ar->ab, desc); in ath11k_dp_rx_h_tkip_mic_err()
4067 rxcb->is_last_msdu = ath11k_dp_rx_h_msdu_end_last_msdu(ar->ab, desc); in ath11k_dp_rx_h_tkip_mic_err()
4069 l3pad_bytes = ath11k_dp_rx_h_msdu_end_l3pad(ar->ab, desc); in ath11k_dp_rx_h_tkip_mic_err()
4070 msdu_len = ath11k_dp_rx_h_msdu_start_msdu_len(ar->ab, desc); in ath11k_dp_rx_h_tkip_mic_err()
4089 ar->ab->soc_stats.rxdma_error[rxcb->err_code]++; in ath11k_dp_rx_h_rxdma_err()
4135 int ath11k_dp_rx_process_wbm_err(struct ath11k_base *ab, in ath11k_dp_rx_process_wbm_err() argument
4139 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_rx_process_wbm_err()
4152 for (i = 0; i < ab->num_radios; i++) in ath11k_dp_rx_process_wbm_err()
4155 srng = &ab->hal.srng_list[dp->rx_rel_ring.ring_id]; in ath11k_dp_rx_process_wbm_err()
4159 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_rx_process_wbm_err()
4162 rx_desc = ath11k_hal_srng_dst_get_next_entry(ab, srng); in ath11k_dp_rx_process_wbm_err()
4166 ret = ath11k_hal_wbm_desc_parse_err(ab, rx_desc, &err_info); in ath11k_dp_rx_process_wbm_err()
4168 ath11k_warn(ab, in ath11k_dp_rx_process_wbm_err()
4177 ar = ab->pdevs[mac_id].ar; in ath11k_dp_rx_process_wbm_err()
4183 ath11k_warn(ab, "frame rx with invalid buf_id %d pdev %d\n", in ath11k_dp_rx_process_wbm_err()
4193 dma_unmap_single(ab->dev, rxcb->paddr, in ath11k_dp_rx_process_wbm_err()
4213 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_rx_process_wbm_err()
4220 for (i = 0; i < ab->num_radios; i++) { in ath11k_dp_rx_process_wbm_err()
4224 ar = ab->pdevs[i].ar; in ath11k_dp_rx_process_wbm_err()
4227 ath11k_dp_rxbufs_replenish(ab, i, rx_ring, num_buffs_reaped[i], in ath11k_dp_rx_process_wbm_err()
4228 ab->hw_params.hal_params->rx_buf_rbm); in ath11k_dp_rx_process_wbm_err()
4232 for (i = 0; i < ab->num_radios; i++) { in ath11k_dp_rx_process_wbm_err()
4233 if (!rcu_dereference(ab->pdevs_active[i])) { in ath11k_dp_rx_process_wbm_err()
4238 ar = ab->pdevs[i].ar; in ath11k_dp_rx_process_wbm_err()
4253 int ath11k_dp_process_rxdma_err(struct ath11k_base *ab, int mac_id, int budget) in ath11k_dp_process_rxdma_err() argument
4258 struct dp_link_desc_bank *link_desc_banks = ab->dp.link_desc_banks; in ath11k_dp_process_rxdma_err()
4276 ar = ab->pdevs[ath11k_hw_mac_id_to_pdev_id(&ab->hw_params, mac_id)].ar; in ath11k_dp_process_rxdma_err()
4277 err_ring = &ar->dp.rxdma_err_dst_ring[ath11k_hw_mac_id_to_srng_id(&ab->hw_params, in ath11k_dp_process_rxdma_err()
4281 srng = &ab->hal.srng_list[err_ring->ring_id]; in ath11k_dp_process_rxdma_err()
4285 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_process_rxdma_err()
4288 (desc = ath11k_hal_srng_dst_get_next_entry(ab, srng))) { in ath11k_dp_process_rxdma_err()
4289 ath11k_hal_rx_reo_ent_paddr_get(ab, desc, &paddr, &desc_bank); in ath11k_dp_process_rxdma_err()
4295 ab->soc_stats.rxdma_error[rxdma_err_code]++; in ath11k_dp_process_rxdma_err()
4309 ath11k_warn(ab, "rxdma error with invalid buf_id %d\n", in ath11k_dp_process_rxdma_err()
4319 dma_unmap_single(ab->dev, rxcb->paddr, in ath11k_dp_process_rxdma_err()
4327 ath11k_dp_rx_link_desc_return(ab, desc, in ath11k_dp_process_rxdma_err()
4331 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_process_rxdma_err()
4336 ath11k_dp_rxbufs_replenish(ab, mac_id, rx_ring, num_buf_freed, in ath11k_dp_process_rxdma_err()
4337 ab->hw_params.hal_params->rx_buf_rbm); in ath11k_dp_process_rxdma_err()
4342 void ath11k_dp_process_reo_status(struct ath11k_base *ab) in ath11k_dp_process_reo_status() argument
4344 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_process_reo_status()
4352 srng = &ab->hal.srng_list[dp->reo_status_ring.ring_id]; in ath11k_dp_process_reo_status()
4358 ath11k_hal_srng_access_begin(ab, srng); in ath11k_dp_process_reo_status()
4360 while ((reo_desc = ath11k_hal_srng_dst_get_next_entry(ab, srng))) { in ath11k_dp_process_reo_status()
4365 ath11k_hal_reo_status_queue_stats(ab, reo_desc, in ath11k_dp_process_reo_status()
4369 ath11k_hal_reo_flush_queue_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4373 ath11k_hal_reo_flush_cache_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4377 ath11k_hal_reo_unblk_cache_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4381 ath11k_hal_reo_flush_timeout_list_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4385 ath11k_hal_reo_desc_thresh_reached_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4389 ath11k_hal_reo_update_rx_reo_queue_status(ab, reo_desc, in ath11k_dp_process_reo_status()
4393 ath11k_warn(ab, "Unknown reo status type %d\n", tag); in ath11k_dp_process_reo_status()
4416 ath11k_hal_srng_access_end(ab, srng); in ath11k_dp_process_reo_status()
4421 void ath11k_dp_rx_pdev_free(struct ath11k_base *ab, int mac_id) in ath11k_dp_rx_pdev_free() argument
4423 struct ath11k *ar = ab->pdevs[mac_id].ar; in ath11k_dp_rx_pdev_free()
4429 int ath11k_dp_rx_pdev_alloc(struct ath11k_base *ab, int mac_id) in ath11k_dp_rx_pdev_alloc() argument
4431 struct ath11k *ar = ab->pdevs[mac_id].ar; in ath11k_dp_rx_pdev_alloc()
4439 ath11k_warn(ab, "failed to setup rx srngs\n"); in ath11k_dp_rx_pdev_alloc()
4445 ath11k_warn(ab, "failed to setup rxdma ring\n"); in ath11k_dp_rx_pdev_alloc()
4450 ret = ath11k_dp_tx_htt_srng_setup(ab, ring_id, mac_id, HAL_RXDMA_BUF); in ath11k_dp_rx_pdev_alloc()
4452 ath11k_warn(ab, "failed to configure rx_refill_buf_ring %d\n", in ath11k_dp_rx_pdev_alloc()
4457 if (ab->hw_params.rx_mac_buf_ring) { in ath11k_dp_rx_pdev_alloc()
4458 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_rx_pdev_alloc()
4460 ret = ath11k_dp_tx_htt_srng_setup(ab, ring_id, in ath11k_dp_rx_pdev_alloc()
4463 ath11k_warn(ab, "failed to configure rx_mac_buf_ring%d %d\n", in ath11k_dp_rx_pdev_alloc()
4470 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_rx_pdev_alloc()
4472 ret = ath11k_dp_tx_htt_srng_setup(ab, ring_id, in ath11k_dp_rx_pdev_alloc()
4475 ath11k_warn(ab, "failed to configure rxdma_err_dest_ring%d %d\n", in ath11k_dp_rx_pdev_alloc()
4481 if (!ab->hw_params.rxdma1_enable) in ath11k_dp_rx_pdev_alloc()
4485 ret = ath11k_dp_tx_htt_srng_setup(ab, ring_id, in ath11k_dp_rx_pdev_alloc()
4488 ath11k_warn(ab, "failed to configure rxdma_mon_buf_ring %d\n", in ath11k_dp_rx_pdev_alloc()
4492 ret = ath11k_dp_tx_htt_srng_setup(ab, in ath11k_dp_rx_pdev_alloc()
4496 ath11k_warn(ab, "failed to configure rxdma_mon_dst_ring %d\n", in ath11k_dp_rx_pdev_alloc()
4500 ret = ath11k_dp_tx_htt_srng_setup(ab, in ath11k_dp_rx_pdev_alloc()
4504 ath11k_warn(ab, "failed to configure rxdma_mon_dst_ring %d\n", in ath11k_dp_rx_pdev_alloc()
4510 for (i = 0; i < ab->hw_params.num_rxdma_per_pdev; i++) { in ath11k_dp_rx_pdev_alloc()
4512 ret = ath11k_dp_tx_htt_srng_setup(ab, ring_id, mac_id + i, in ath11k_dp_rx_pdev_alloc()
4515 ath11k_warn(ab, in ath11k_dp_rx_pdev_alloc()
4547 if (ar->ab->hw_params.rxdma1_enable) { in ath11k_dp_rx_monitor_link_desc_return()
4549 hal_srng = &ar->ab->hal.srng_list[dp_srng->ring_id]; in ath11k_dp_rx_monitor_link_desc_return()
4551 dp_srng = &ar->ab->dp.wbm_desc_rel_ring; in ath11k_dp_rx_monitor_link_desc_return()
4552 hal_srng = &ar->ab->hal.srng_list[dp_srng->ring_id]; in ath11k_dp_rx_monitor_link_desc_return()
4555 ath11k_hal_srng_access_begin(ar->ab, hal_srng); in ath11k_dp_rx_monitor_link_desc_return()
4557 src_srng_desc = ath11k_hal_srng_src_get_next_entry(ar->ab, hal_srng); in ath11k_dp_rx_monitor_link_desc_return()
4564 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_monitor_link_desc_return()
4569 ath11k_hal_srng_access_end(ar->ab, hal_srng); in ath11k_dp_rx_monitor_link_desc_return()
4723 if (!ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rx_mon_mpdu_pop()
4754 if (ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rx_mon_mpdu_pop()
4760 (void *)ar->ab->dp.link_desc_banks[sw_cookie].vaddr + in ath11k_dp_rx_mon_mpdu_pop()
4761 (paddr - ar->ab->dp.link_desc_banks[sw_cookie].paddr); in ath11k_dp_rx_mon_mpdu_pop()
4770 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_mpdu_pop()
4784 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_mpdu_pop()
4790 dma_unmap_single(ar->ab->dev, rxcb->paddr, in ath11k_dp_rx_mon_mpdu_pop()
4797 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_mpdu_pop()
4808 l2_hdr_offset = ath11k_dp_rx_h_msdu_end_l3pad(ar->ab, rx_desc); in ath11k_dp_rx_mon_mpdu_pop()
4811 if (!ath11k_dp_rxdesc_mpdu_valid(ar->ab, rx_desc)) { in ath11k_dp_rx_mon_mpdu_pop()
4820 ath11k_dp_rxdesc_get_ppduid(ar->ab, rx_desc); in ath11k_dp_rx_mon_mpdu_pop()
4863 if (ar->ab->hw_params.rxdma1_enable) { in ath11k_dp_rx_mon_mpdu_pop()
4867 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_mpdu_pop()
4870 ath11k_dp_rx_link_desc_return(ar->ab, rx_link_buf_info, in ath11k_dp_rx_mon_mpdu_pop()
4893 rx_pkt_offset = ar->ab->hw_params.hal_desc_sz; in ath11k_dp_rx_msdus_set_payload()
4894 l2_hdr_offset = ath11k_dp_rx_h_msdu_end_l3pad(ar->ab, in ath11k_dp_rx_msdus_set_payload()
4905 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_mon_merg_msdus() local
4918 rx_attention = ath11k_dp_rx_get_attention(ab, rx_desc); in ath11k_dp_rx_mon_merg_msdus()
4927 decap_format = ath11k_dp_rx_h_msdu_start_decap_type(ab, rx_desc); in ath11k_dp_rx_mon_merg_msdus()
4951 hdr_desc = ath11k_dp_rxdesc_get_80211hdr(ab, rx_desc); in ath11k_dp_rx_mon_merg_msdus()
4976 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_merg_msdus()
4980 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_merg_msdus()
5159 if (ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rx_mon_dest_process()
5164 mon_dst_srng = &ar->ab->hal.srng_list[ring_id]; in ath11k_dp_rx_mon_dest_process()
5169 ath11k_hal_srng_access_begin(ar->ab, mon_dst_srng); in ath11k_dp_rx_mon_dest_process()
5175 while ((ring_entry = ath11k_hal_srng_dst_peek(ar->ab, mon_dst_srng))) { in ath11k_dp_rx_mon_dest_process()
5197 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_dest_process()
5209 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_mon_dest_process()
5223 ring_entry = ath11k_hal_srng_dst_get_next_entry(ar->ab, in ath11k_dp_rx_mon_dest_process()
5226 ath11k_hal_srng_access_end(ar->ab, mon_dst_srng); in ath11k_dp_rx_mon_dest_process()
5233 hal_params = ar->ab->hw_params.hal_params; in ath11k_dp_rx_mon_dest_process()
5235 if (ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rx_mon_dest_process()
5236 ath11k_dp_rxbufs_replenish(ar->ab, dp->mac_id, in ath11k_dp_rx_mon_dest_process()
5241 ath11k_dp_rxbufs_replenish(ar->ab, dp->mac_id, in ath11k_dp_rx_mon_dest_process()
5248 int ath11k_dp_rx_process_mon_status(struct ath11k_base *ab, int mac_id, in ath11k_dp_rx_process_mon_status() argument
5251 struct ath11k *ar = ath11k_ab_to_ar(ab, mac_id); in ath11k_dp_rx_process_mon_status()
5266 num_buffs_reaped = ath11k_dp_rx_reap_mon_status_ring(ab, mac_id, &budget, in ath11k_dp_rx_process_mon_status()
5291 hal_status = ath11k_hal_rx_parse_mon_status(ab, ppdu_info, skb); in ath11k_dp_rx_process_mon_status()
5298 if (!ab->hw_params.full_monitor_mode) { in ath11k_dp_rx_process_mon_status()
5312 spin_lock_bh(&ab->base_lock); in ath11k_dp_rx_process_mon_status()
5313 peer = ath11k_peer_find_by_id(ab, ppdu_info->peer_id); in ath11k_dp_rx_process_mon_status()
5316 ath11k_dbg(ab, ATH11K_DBG_DATA, in ath11k_dp_rx_process_mon_status()
5329 spin_unlock_bh(&ab->base_lock); in ath11k_dp_rx_process_mon_status()
5411 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_full_mon_mpdu_pop()
5422 dma_unmap_single(ar->ab->dev, rxcb->paddr, in ath11k_dp_rx_full_mon_mpdu_pop()
5429 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_full_mon_mpdu_pop()
5440 l2_hdr_offset = ath11k_dp_rx_h_msdu_end_l3pad(ar->ab, rx_desc); in ath11k_dp_rx_full_mon_mpdu_pop()
5443 if (!ath11k_dp_rxdesc_mpdu_valid(ar->ab, rx_desc)) { in ath11k_dp_rx_full_mon_mpdu_pop()
5479 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, in ath11k_dp_rx_full_mon_mpdu_pop()
5542 struct ath11k_base *ab = ar->ab; in ath11k_dp_rx_full_mon_deliver_ppdu() local
5543 struct ath11k_dp *dp = &ab->dp; in ath11k_dp_rx_full_mon_deliver_ppdu()
5557 ath11k_dbg(ar->ab, ATH11K_DBG_DATA, "full mon: deliver ppdu\n"); in ath11k_dp_rx_full_mon_deliver_ppdu()
5566 ath11k_dp_rx_process_full_mon_status_ring(struct ath11k_base *ab, int mac_id, in ath11k_dp_rx_process_full_mon_status_ring() argument
5569 struct ath11k *ar = ab->pdevs[mac_id].ar; in ath11k_dp_rx_process_full_mon_status_ring()
5578 quota = ath11k_dp_rx_process_mon_status(ab, mac_id, in ath11k_dp_rx_process_full_mon_status_ring()
5583 quota += ath11k_dp_rx_process_mon_status(ab, mac_id, in ath11k_dp_rx_process_full_mon_status_ring()
5603 ath11k_dp_rx_full_mon_drop_ppdu(&ab->dp, pmon->mon_mpdu); in ath11k_dp_rx_process_full_mon_status_ring()
5608 static int ath11k_dp_full_mon_process_rx(struct ath11k_base *ab, int mac_id, in ath11k_dp_full_mon_process_rx() argument
5611 struct ath11k *ar = ab->pdevs[mac_id].ar; in ath11k_dp_full_mon_process_rx()
5633 mon_dst_srng = &ar->ab->hal.srng_list[dp->rxdma_mon_dst_ring.ring_id]; in ath11k_dp_full_mon_process_rx()
5636 ath11k_hal_srng_access_begin(ar->ab, mon_dst_srng); in ath11k_dp_full_mon_process_rx()
5637 while ((ring_entry = ath11k_hal_srng_dst_peek(ar->ab, mon_dst_srng))) { in ath11k_dp_full_mon_process_rx()
5649 ret = ath11k_dp_rx_full_mon_prepare_mpdu(&ab->dp, in ath11k_dp_full_mon_process_rx()
5672 ring_entry = ath11k_hal_srng_dst_get_next_entry(ar->ab, in ath11k_dp_full_mon_process_rx()
5678 ath11k_hal_srng_access_end(ar->ab, mon_dst_srng); in ath11k_dp_full_mon_process_rx()
5683 ath11k_dp_rxbufs_replenish(ar->ab, dp->mac_id, in ath11k_dp_full_mon_process_rx()
5690 quota = ath11k_dp_rx_process_full_mon_status_ring(ab, mac_id, in ath11k_dp_full_mon_process_rx()
5696 int ath11k_dp_rx_process_mon_rings(struct ath11k_base *ab, int mac_id, in ath11k_dp_rx_process_mon_rings() argument
5699 struct ath11k *ar = ath11k_ab_to_ar(ab, mac_id); in ath11k_dp_rx_process_mon_rings()
5703 ab->hw_params.full_monitor_mode) in ath11k_dp_rx_process_mon_rings()
5704 ret = ath11k_dp_full_mon_process_rx(ab, mac_id, napi, budget); in ath11k_dp_rx_process_mon_rings()
5706 ret = ath11k_dp_rx_process_mon_status(ab, mac_id, napi, budget); in ath11k_dp_rx_process_mon_rings()
5734 ath11k_warn(ar->ab, "pdev_mon_status_attach() failed"); in ath11k_dp_rx_pdev_mon_attach()
5741 if (!ar->ab->hw_params.rxdma1_enable) in ath11k_dp_rx_pdev_mon_attach()
5746 ath11k_hal_srng_get_entrysize(ar->ab, HAL_RXDMA_MONITOR_DESC); in ath11k_dp_rx_pdev_mon_attach()
5748 &ar->ab->hal.srng_list[dp->rxdma_mon_desc_ring.ring_id]; in ath11k_dp_rx_pdev_mon_attach()
5750 ret = ath11k_dp_link_desc_setup(ar->ab, pmon->link_desc_banks, in ath11k_dp_rx_pdev_mon_attach()
5754 ath11k_warn(ar->ab, "mon_link_desc_pool_setup() failed"); in ath11k_dp_rx_pdev_mon_attach()
5769 ath11k_dp_link_desc_cleanup(ar->ab, pmon->link_desc_banks, in ath11k_dp_mon_link_free()
5781 int ath11k_dp_rx_pktlog_start(struct ath11k_base *ab) in ath11k_dp_rx_pktlog_start() argument
5784 mod_timer(&ab->mon_reap_timer, in ath11k_dp_rx_pktlog_start()
5790 int ath11k_dp_rx_pktlog_stop(struct ath11k_base *ab, bool stop_timer) in ath11k_dp_rx_pktlog_stop() argument
5795 del_timer_sync(&ab->mon_reap_timer); in ath11k_dp_rx_pktlog_stop()
5798 ret = ath11k_dp_purge_mon_ring(ab); in ath11k_dp_rx_pktlog_stop()
5800 ath11k_warn(ab, "failed to purge dp mon ring: %d\n", ret); in ath11k_dp_rx_pktlog_stop()